diff --git a/.vscode-template/settings.json b/.vscode-template/settings.json index cc88f3c9e7e..719d1639a04 100644 --- a/.vscode-template/settings.json +++ b/.vscode-template/settings.json @@ -8,7 +8,8 @@ ".env*": "ini", "Dockerfile*": "dockerfile", "**/requirements/*.txt": "pip-requirements", - "**/requirements/*.in": "pip-requirements" + "**/requirements/*.in": "pip-requirements", + "*Makefile": "makefile" }, "files.eol": "\n", "files.insertFinalNewline": true, @@ -28,7 +29,8 @@ }, "python.testing.pyTestEnabled": true, "autoDocstring.docstringFormat": "sphinx", - "shellcheck.executablePath": "${workspaceFolder}/scripts/shellcheck", + "shellcheck.executablePath": "${workspaceFolder}/scripts/shellcheck.bash", "shellcheck.run": "onSave", - "shellcheck.enableQuickFix": true + "shellcheck.enableQuickFix": true, + "python.formatting.provider": "black" } diff --git a/Makefile b/Makefile index ba32da65096..c116bd38d81 100644 --- a/Makefile +++ b/Makefile @@ -340,9 +340,9 @@ openapi-specs: ## bundles and validates openapi specifications and schemas of AL .PHONY: code-analysis code-analysis: .codeclimate.yml ## runs code-climate analysis # Validates $< - ./scripts/code-climate.sh validate-config + ./scripts/code-climate.bash validate-config # Running analysis - ./scripts/code-climate.sh analyze + ./scripts/code-climate.bash analyze .PHONY: info info-images info-swarm info-tools diff --git a/packages/postgres-database/Makefile b/packages/postgres-database/Makefile index 66cf24e7180..1466fec87e1 100644 --- a/packages/postgres-database/Makefile +++ b/packages/postgres-database/Makefile @@ -1,26 +1,17 @@ # -# TODO: under development +# Targets for DEVELOPMENT of postgres-database # -.DEFAULT_GOAL := help +include ../../scripts/common.Makefile REPO_BASE_DIR = $(abspath $(CURDIR)/../../) -.PHONY: devenv -devenv: ## build development environment (using main services/docker-compose-build.yml) - @$(MAKE) --directory ${REPO_BASE_DIR} $@ - - .PHONY: requirements requirements: ## compiles pip requirements (.in -> .txt) @$(MAKE) --directory requirements all -.check-venv-active: - # checking whether virtual environment was activated - @python3 -c "import sys; assert sys.base_prefix!=sys.prefix" - .PHONY: install-dev install-prod install-ci -install-dev install-prod install-ci: requirements .check-venv-active ## install app in development/production or CI mode +install-dev install-prod install-ci: requirements _check_venv_active ## install app in development/production or CI mode # installing in $(subst install-,,$@) mode pip-sync requirements/$(subst install-,,$@).txt @@ -31,20 +22,6 @@ tests: ## runs unit tests @pytest -vv --exitfirst --failed-first --durations=10 --pdb $(CURDIR)/tests -.PHONY: version-patch version-minor -version-patch version-minor version-major: ## commits version as patch (bug fixes not affecting the API), minor/minor (backwards-compatible/INcompatible API addition or changes) - # upgrades as $(subst version-,,$@) version, commits and tags - @bump2version --verbose --list $(subst version-,,$@) - - -.PHONY: info -info: ## displays - # installed - @pip list - # version - @cat setup.py | grep version= - - .PHONY: setup-commit setup-commit: install-dev up-pg ## sets up a database to create a new commit into migration history # discovering @@ -57,38 +34,12 @@ setup-commit: install-dev up-pg ## sets up a database to create a new commit int @echo "To add new commit, sc-pg review -m \"Some message\" " -_docker_compose_config := tests/docker-compose.yml - .PHONY: up-pg down-pg -up-pg: ## starts pg server - docker-compose -f $(_docker_compose_config) up -d - -down-pg: ## stops pg server - docker-compose -f $(_docker_compose_config) down - - -.PHONY: autoformat -autoformat: ## runs black python formatter on this service's code [https://black.readthedocs.io/en/stable/] - # auto formatting with black - @python3 -m black --verbose \ - --exclude "/(\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|\.svn|_build|buck-out|build|dist|migration)/" \ - $(CURDIR) - - -.PHONY: clean -clean: down ## cleans all unversioned files in project and temp files create by this makefile - # Cleaning unversioned - @git clean -ndxf - @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @git clean -dxf +DOCKER_COMPOSE_CONFIG := tests/docker-compose.yml +up-pg: $(DOCKER_COMPOSE_CONFIG) ## starts pg server + docker-compose -f $< up -d -.PHONY: help -# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html -help: ## this colorful help - @echo "Recipes for '$(notdir $(CURDIR))':" - @echo "" - @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" +down-pg: $(DOCKER_COMPOSE_CONFIG) ## stops pg server + docker-compose -f $< down diff --git a/packages/postgres-database/docker/Makefile b/packages/postgres-database/docker/Makefile index fc29722cdb2..6f6439a2ca4 100644 --- a/packages/postgres-database/docker/Makefile +++ b/packages/postgres-database/docker/Makefile @@ -53,9 +53,12 @@ upgrade: ## migrate the postgres database down: ## stop migration service @docker service rm $(APP_NAME) + +DOCKER_IMAGES = $(shell docker images */$(APP_NAME):* -q) + .PHONY: clean clean: ## clean all created images - -@docker image rm -f $(shell docker images */$(APP_NAME):* -q) + $(if $(DOCKER_IMAGES),@docker image rm -f $(DOCKER_IMAGES),$(info No image to delete)) .PHONY: help # thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html diff --git a/packages/postgres-database/src/simcore_postgres_database/models/comp_tasks.py b/packages/postgres-database/src/simcore_postgres_database/models/comp_tasks.py index 5ed0f0aa3ba..b1fd8bbdf00 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/comp_tasks.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/comp_tasks.py @@ -10,9 +10,9 @@ class NodeClass(enum.Enum): - COMPUTATIONAL="COMPUTATIONAL" - INTERACTIVE="INTERACTIVE" - FRONTEND="FRONTEND" + COMPUTATIONAL = "COMPUTATIONAL" + INTERACTIVE = "INTERACTIVE" + FRONTEND = "FRONTEND" comp_tasks = sa.Table( @@ -37,6 +37,5 @@ class NodeClass(enum.Enum): sa.Column("submit", sa.DateTime), sa.Column("start", sa.DateTime), sa.Column("end", sa.DateTime), - - sa.UniqueConstraint('project_id', 'node_id', name='project_node_uniqueness'), + sa.UniqueConstraint("project_id", "node_id", name="project_node_uniqueness"), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py b/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py index eacd05d0ea5..38f7dbf2b1c 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py @@ -15,12 +15,8 @@ nullable=False, ), sa.Column( - "project_id", - sa.BigInteger, - sa.ForeignKey(projects.c.id), - nullable=False, + "project_id", sa.BigInteger, sa.ForeignKey(projects.c.id), nullable=False, ), - # TODO: do not ondelete=cascase for project_id or it will delete SHARED PROJECT # add instead sa.UniqueConstraint('user_id', 'project_id', name='user_project_uniqueness'), # diff --git a/packages/postgres-database/tests/conftest.py b/packages/postgres-database/tests/conftest.py index d3e68121e61..f45d746514a 100644 --- a/packages/postgres-database/tests/conftest.py +++ b/packages/postgres-database/tests/conftest.py @@ -10,40 +10,42 @@ import aiopg.sa -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def postgres_service(docker_services, docker_ip, docker_compose_file) -> str: # container environment with open(docker_compose_file) as fh: config = yaml.safe_load(fh) - environ = config['services']['postgres']['environment'] + environ = config["services"]["postgres"]["environment"] dsn = "postgresql://{user}:{password}@{host}:{port}/{database}".format( - user=environ['POSTGRES_USER'], - password=environ['POSTGRES_PASSWORD'], + user=environ["POSTGRES_USER"], + password=environ["POSTGRES_PASSWORD"], host=docker_ip, - port=docker_services.port_for('postgres', 5432), - database=environ['POSTGRES_DB'], + port=docker_services.port_for("postgres", 5432), + database=environ["POSTGRES_DB"], ) # Wait until service is responsive. docker_services.wait_until_responsive( - check=lambda: is_postgres_responsive(dsn), - timeout=30.0, - pause=0.1, + check=lambda: is_postgres_responsive(dsn), timeout=30.0, pause=0.1, ) return dsn + from typing import Union, Coroutine, Callable + @pytest.fixture def make_engine(postgres_service): dsn = postgres_service + def maker(is_async=True) -> Union[Coroutine, Callable]: return aiopg.sa.create_engine(dsn) if is_async else sa.create_engine(dsn) + return maker -def is_postgres_responsive(dsn)-> bool: +def is_postgres_responsive(dsn) -> bool: """Check if something responds to ``url`` """ try: engine = sa.create_engine(dsn) diff --git a/packages/postgres-database/tests/test_delete_projects_and_users.py b/packages/postgres-database/tests/test_delete_projects_and_users.py index a10d483ee2d..af282d9d976 100644 --- a/packages/postgres-database/tests/test_delete_projects_and_users.py +++ b/packages/postgres-database/tests/test_delete_projects_and_users.py @@ -12,31 +12,36 @@ from aiopg.sa.result import ResultProxy, RowProxy from simcore_postgres_database.models.base import metadata -from simcore_postgres_database.webserver_models import (UserStatus, projects, - user_to_projects, - users) +from simcore_postgres_database.webserver_models import ( + UserStatus, + projects, + user_to_projects, + users, +) fake = faker.Faker() + def random_user(**overrides): data = dict( - name = fake.name(), - email = fake.email(), - password_hash = fake.numerify(text='#'*5), - status = UserStatus.ACTIVE, - created_ip=fake.ipv4() + name=fake.name(), + email=fake.email(), + password_hash=fake.numerify(text="#" * 5), + status=UserStatus.ACTIVE, + created_ip=fake.ipv4(), ) data.update(overrides) return data + def random_project(**overrides): data = dict( - uuid = uuid4(), - name = fake.word(), - description= fake.sentence(), - prj_owner = fake.email(), - workbench = {}, - published = False + uuid=uuid4(), + name=fake.word(), + description=fake.sentence(), + prj_owner=fake.email(), + workbench={}, + published=False, ) data.update(overrides) return data @@ -59,9 +64,15 @@ async def start(): await conn.execute(projects.insert().values(**random_project())) await conn.execute(projects.insert().values(**random_project())) - await conn.execute(user_to_projects.insert().values(user_id=1, project_id=1)) - await conn.execute(user_to_projects.insert().values(user_id=1, project_id=2)) - await conn.execute(user_to_projects.insert().values(user_id=2, project_id=3)) + await conn.execute( + user_to_projects.insert().values(user_id=1, project_id=1) + ) + await conn.execute( + user_to_projects.insert().values(user_id=1, project_id=2) + ) + await conn.execute( + user_to_projects.insert().values(user_id=2, project_id=3) + ) return engine @@ -92,8 +103,8 @@ async def test_view(engine): assert len(rows) == 3 # effect of cascade is that relation deletes as well - res = await conn.execute(user_to_projects.select()) + res = await conn.execute(user_to_projects.select()) rows = await res.fetchall() assert len(rows) == 1 - assert not any( row[user_to_projects.c.user_id]==1 for row in rows ) + assert not any(row[user_to_projects.c.user_id] == 1 for row in rows) diff --git a/packages/service-library/Makefile b/packages/service-library/Makefile index 7ae88f2e620..37d89b948a9 100644 --- a/packages/service-library/Makefile +++ b/packages/service-library/Makefile @@ -1,54 +1,18 @@ # -# TODO: under development +# Targets for DEVELOPMENT of Service Library # -.DEFAULT_GOAL := help +include ../../scripts/common.Makefile REPO_BASE_DIR = $(abspath $(CURDIR)/../../) -.PHONY: devenv -devenv: ## build development environment (using main services/docker-compose-build.yml) - @$(MAKE) --directory ${REPO_BASE_DIR} $@ - .PHONY: install-dev install-prod install-ci -install-dev install-prod install-ci: ## install app in development/production or CI mode +install-dev install-prod install-ci: _check_venv_active ## install app in development/production or CI mode # installing in $(subst install-,,$@) mode - @pip3 install -r requirements/$(subst install-,,$@).txt + python -m pip install -r requirements/$(subst install-,,$@).txt .PHONY: tests tests: ## runs unit tests # running unit tests @pytest -vv --exitfirst --failed-first --durations=10 --pdb $(CURDIR)/tests - - -.PHONY: version-patch version-minor -version-patch version-minor version-major: ## commits version as patch (bug fixes not affecting the API), minor/minor (backwards-compatible/INcompatible API addition or changes) - # upgrades as $(subst version-,,$@) version, commits and tags - @bump2version --verbose --list $(subst version-,,$@) - - -.PHONY: info -info: ## displays - # installed - @pip list - # version - @cat setup.py | grep version= - - -.PHONY: clean -clean: ## cleans all unversioned files in project and temp files create by this makefile - # Cleaning unversioned - @git clean -ndxf - @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @git clean -dxf - - -.PHONY: help -# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html -help: ## this colorful help - @echo "Recipes for '$(notdir $(CURDIR))':" - @echo "" - @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" diff --git a/packages/service-library/setup.py b/packages/service-library/setup.py index ef256bb5be9..96ddbbb4e98 100644 --- a/packages/service-library/setup.py +++ b/packages/service-library/setup.py @@ -7,38 +7,40 @@ here = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -def read_reqs( reqs_path: Path): - return re.findall(r'(^[^#-][\w]+[-~>=<.\w]+)', reqs_path.read_text(), re.MULTILINE) +def read_reqs(reqs_path: Path): + return re.findall(r"(^[^#-][\w]+[-~>=<.\w]+)", reqs_path.read_text(), re.MULTILINE) -install_requirements = read_reqs( here / "requirements" / "_base.in" ) # WEAK requirements +install_requirements = read_reqs( + here / "requirements" / "_base.in" +) # WEAK requirements -test_requirements = read_reqs( here / "requirements" / "_test.txt" ) # STRONG requirements +test_requirements = read_reqs( + here / "requirements" / "_test.txt" +) # STRONG requirements -readme = Path( here / "README.rst" ).read_text() +readme = Path(here / "README.rst").read_text() setup( - name='simcore-service-library', - version='0.1.0', + name="simcore-service-library", + version="0.1.0", author="Pedro Crespo (pcrespov)", description="Core service library for simcore (or servicelib)", classifiers=[ - 'Development Status :: 2 - Pre-Alpha', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: MIT License', - 'Natural Language :: English', - 'Programming Language :: Python :: 3.6', + "Development Status :: 2 - Pre-Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python :: 3.6", ], long_description=readme, license="MIT license", install_requires=install_requirements, - packages=find_packages(where='src'), - package_dir={'': 'src'}, + packages=find_packages(where="src"), + package_dir={"": "src"}, include_package_data=True, - test_suite='tests', + test_suite="tests", tests_require=test_requirements, - extras_require= { - 'test': test_requirements - }, - zip_safe=False + extras_require={"test": test_requirements}, + zip_safe=False, ) diff --git a/packages/service-library/src/servicelib/__init__.py b/packages/service-library/src/servicelib/__init__.py index c3c50323628..9e1daa52652 100644 --- a/packages/service-library/src/servicelib/__init__.py +++ b/packages/service-library/src/servicelib/__init__.py @@ -2,4 +2,4 @@ """ -__version__ = '0.1.0' +__version__ = "0.1.0" diff --git a/packages/service-library/src/servicelib/aiopg_utils.py b/packages/service-library/src/servicelib/aiopg_utils.py index 6c3f81b654e..424b4f6c5a9 100644 --- a/packages/service-library/src/servicelib/aiopg_utils.py +++ b/packages/service-library/src/servicelib/aiopg_utils.py @@ -22,24 +22,32 @@ from aiopg.sa import Engine, create_engine from psycopg2 import DatabaseError from psycopg2 import Error as DBAPIError -from tenacity import (RetryCallState, after_log, before_sleep_log, retry, - retry_if_exception_type, stop_after_attempt, wait_fixed) +from tenacity import ( + RetryCallState, + after_log, + before_sleep_log, + retry, + retry_if_exception_type, + stop_after_attempt, + wait_fixed, +) log = logging.getLogger(__name__) DSN = "postgresql://{user}:{password}@{host}:{port}/{database}" + @attr.s(auto_attribs=True) class DataSourceName: # Attributes for postgres db user: str - password: str=attr.ib(repr=False) + password: str = attr.ib(repr=False) database: str - host: str='localhost' - port: int=5432 + host: str = "localhost" + port: int = 5432 # Attributes about the caller - application_name: Optional[str]=None + application_name: Optional[str] = None def asdict(self) -> Dict: return attr.asdict(self) @@ -51,8 +59,7 @@ def to_uri(self, with_query=False) -> str: return uri - -def create_pg_engine(dsn: DataSourceName, minsize:int=1, maxsize:int=4): +def create_pg_engine(dsn: DataSourceName, minsize: int = 1, maxsize: int = 4): """ Adapts the arguments of aiopg.sa.create_engine Returns a coroutine that is awaitable, i.e. @@ -62,10 +69,11 @@ def create_pg_engine(dsn: DataSourceName, minsize:int=1, maxsize:int=4): assert engine.closed """ - awaitable_engine_coro = create_engine(dsn.to_uri(), + awaitable_engine_coro = create_engine( + dsn.to_uri(), application_name=dsn.application_name, minsize=minsize, - maxsize=maxsize + maxsize=maxsize, ) return awaitable_engine_coro @@ -115,11 +123,10 @@ def is_postgres_responsive(dsn: DataSourceName) -> bool: return ok - def raise_http_unavailable_error(retry_state: RetryCallState): # TODO: mark incident on db to determine the quality of service. E.g. next time we do not stop. TIP: obj, query = retry_state.args; obj.app.register_incidents - exc :DatabaseError = retry_state.outcome.exception() + exc: DatabaseError = retry_state.outcome.exception() # StandardError # |__ Warning # |__ Error @@ -136,7 +143,6 @@ def raise_http_unavailable_error(retry_state: RetryCallState): # SEE https://aiopg.readthedocs.io/en/stable/core.html?highlight=Exception#exceptions # SEE http://initd.org/psycopg/docs/module.html#dbapi-exceptions - # TODO: add header with Retry-After https://tools.ietf.org/html/rfc7231#section-7.1.3 resp = web.HTTPServiceUnavailable() @@ -150,26 +156,29 @@ def raise_http_unavailable_error(retry_state: RetryCallState): class PostgresRetryPolicyUponInitialization: """ Retry policy upon service initialization """ + WAIT_SECS = 2 ATTEMPTS_COUNT = 20 - def __init__(self, logger: Optional[logging.Logger]=None): + def __init__(self, logger: Optional[logging.Logger] = None): logger = logger or log self.kwargs = dict( wait=wait_fixed(self.WAIT_SECS), stop=stop_after_attempt(self.ATTEMPTS_COUNT), before_sleep=before_sleep_log(logger, logging.INFO), - reraise=True + reraise=True, ) + class PostgresRetryPolicyUponOperation: """ Retry policy upon service operation """ + WAIT_SECS = 2 ATTEMPTS_COUNT = 3 - def __init__(self, logger: Optional[logging.Logger]=None): + def __init__(self, logger: Optional[logging.Logger] = None): logger = logger or log self.kwargs = dict( @@ -177,9 +186,10 @@ def __init__(self, logger: Optional[logging.Logger]=None): wait=wait_fixed(self.WAIT_SECS), stop=stop_after_attempt(self.ATTEMPTS_COUNT), after=after_log(logger, logging.WARNING), - retry_error_callback=raise_http_unavailable_error + retry_error_callback=raise_http_unavailable_error, ) + # alias postgres_service_retry_policy_kwargs = PostgresRetryPolicyUponOperation().kwargs @@ -199,7 +209,7 @@ async def wrapper(*args, **kargs): result = await _deco_func(*args, **kargs) finally: stats = _deco_func.retry.statistics - _total_retry_count += int(stats.get('attempt_number', 0)) + _total_retry_count += int(stats.get("attempt_number", 0)) return result def total_retry_count(): @@ -210,9 +220,8 @@ def total_retry_count(): return wrapper - __all__ = [ - 'DBAPIError', - 'PostgresRetryPolicyUponInitialization', - 'PostgresRetryPolicyUponOperation' + "DBAPIError", + "PostgresRetryPolicyUponInitialization", + "PostgresRetryPolicyUponOperation", ] diff --git a/packages/service-library/src/servicelib/application.py b/packages/service-library/src/servicelib/application.py index acd5c4fb957..57c639e0fee 100644 --- a/packages/service-library/src/servicelib/application.py +++ b/packages/service-library/src/servicelib/application.py @@ -5,6 +5,7 @@ from .application_keys import APP_CONFIG_KEY from .client_session import persistent_client_session + async def startup_info(app: web.Application): print(f"INFO: STARTING UP {app}...", flush=True) @@ -13,7 +14,7 @@ async def shutdown_info(app: web.Application): print(f"INFO: SHUTING DOWN {app} ...", flush=True) -def create_safe_application(config: Optional[Dict]=None) -> web.Application: +def create_safe_application(config: Optional[Dict] = None) -> web.Application: app = web.Application() # Enxures config entry diff --git a/packages/service-library/src/servicelib/application_keys.py b/packages/service-library/src/servicelib/application_keys.py index d19f14f6f37..d1cf96f6ec4 100644 --- a/packages/service-library/src/servicelib/application_keys.py +++ b/packages/service-library/src/servicelib/application_keys.py @@ -18,13 +18,13 @@ # # web.Application keys, i.e. app[APP_*_KEY] # -APP_CONFIG_KEY = f'{__name__ }.config' -APP_OPENAPI_SPECS_KEY = f'{__name__ }.openapi_specs' -APP_JSONSCHEMA_SPECS_KEY = f'{__name__ }.jsonschema_specs' +APP_CONFIG_KEY = f"{__name__ }.config" +APP_OPENAPI_SPECS_KEY = f"{__name__ }.openapi_specs" +APP_JSONSCHEMA_SPECS_KEY = f"{__name__ }.jsonschema_specs" -APP_DB_ENGINE_KEY = f'{__name__ }.db_engine' +APP_DB_ENGINE_KEY = f"{__name__ }.db_engine" -APP_CLIENT_SESSION_KEY = f'{__name__ }.session' +APP_CLIENT_SESSION_KEY = f"{__name__ }.session" # # web.Response keys, i.e. app[RSP_*_KEY] diff --git a/packages/service-library/src/servicelib/application_setup.py b/packages/service-library/src/servicelib/application_setup.py index b69c8915f5b..c0a8a1637d5 100644 --- a/packages/service-library/src/servicelib/application_setup.py +++ b/packages/service-library/src/servicelib/application_setup.py @@ -12,21 +12,29 @@ APP_SETUP_KEY = f"{__name__ }.setup" + class ModuleCategory(Enum): SYSTEM = 0 ADDON = 1 + class ApplicationSetupError(Exception): pass + class DependencyError(ApplicationSetupError): pass -def app_module_setup(module_name: str, category: ModuleCategory,*, - depends: Optional[List[str]]=None, - config_section: str=None, config_enabled: str=None, - logger: Optional[logging.Logger]=None - ) -> Callable: + +def app_module_setup( + module_name: str, + category: ModuleCategory, + *, + depends: Optional[List[str]] = None, + config_section: str = None, + config_enabled: str = None, + logger: Optional[logging.Logger] = None, +) -> Callable: """ Decorator that marks a function as 'a setup function' for a given module in an application - Marks a function as 'setup' of a given module in an application @@ -75,20 +83,22 @@ def decorate(setup_func): if "setup" not in setup_func.__name__: logger.warning("Rename '%s' to contain 'setup'", setup_func.__name__) - # metadata info + # metadata info def setup_metadata() -> Dict: return { - 'module_name': module_name, - 'dependencies': depends, - 'config_section': section, - 'config_enabled': config_enabled + "module_name": module_name, + "dependencies": depends, + "config_section": section, + "config_enabled": config_enabled, } # wrapper @functools.wraps(setup_func) def setup_wrapper(app: web.Application, *args, **kargs) -> bool: # pre-setup - logger.debug("Setting up '%s' [%s; %s] ... ", module_name, category.name, depends) + logger.debug( + "Setting up '%s' [%s; %s] ... ", module_name, category.name, depends + ) if APP_SETUP_KEY not in app: app[APP_SETUP_KEY] = [] @@ -100,7 +110,9 @@ def setup_wrapper(app: web.Application, *args, **kargs) -> bool: def _get(cfg_, parts): for part in parts: - if section and part == "enabled": # if section exists, no need to explicitly enable it + if ( + section and part == "enabled" + ): # if section exists, no need to explicitly enable it cfg_ = cfg_.get(part, True) else: cfg_ = cfg_[part] @@ -109,14 +121,21 @@ def _get(cfg_, parts): try: is_enabled = _get(cfg, config_enabled.split(".")) except KeyError as ee: - raise ApplicationSetupError(f"Cannot find '{config_enabled}' in app config at [ {ee} ]") + raise ApplicationSetupError( + f"Cannot find '{config_enabled}' in app config at [ {ee} ]" + ) if not is_enabled: - logger.info("Skipping '%s' setup. Explicitly disabled in config", module_name) + logger.info( + "Skipping '%s' setup. Explicitly disabled in config", + module_name, + ) return False if depends: - uninitialized = [dep for dep in depends if dep not in app[APP_SETUP_KEY]] + uninitialized = [ + dep for dep in depends if dep not in app[APP_SETUP_KEY] + ] if uninitialized: msg = f"The following '{module_name}'' dependencies are still uninitialized: {uninitialized}" log.error(msg) @@ -141,14 +160,20 @@ def _get(cfg_, parts): return ok setup_wrapper.metadata = setup_metadata - setup_wrapper.MARK = 'setup' + setup_wrapper.MARK = "setup" return setup_wrapper + return decorate def is_setup_function(fun): - return inspect.isfunction(fun) and \ - hasattr(fun, 'MARK') and fun.MARK == 'setup' and \ - any(param.annotation == web.Application - for name, param in inspect.signature(fun).parameters.items()) + return ( + inspect.isfunction(fun) + and hasattr(fun, "MARK") + and fun.MARK == "setup" + and any( + param.annotation == web.Application + for name, param in inspect.signature(fun).parameters.items() + ) + ) diff --git a/packages/service-library/src/servicelib/client_session.py b/packages/service-library/src/servicelib/client_session.py index a6e6be95fda..8b282dd797c 100644 --- a/packages/service-library/src/servicelib/client_session.py +++ b/packages/service-library/src/servicelib/client_session.py @@ -11,6 +11,7 @@ log = logging.getLogger(__name__) + def get_client_session(app: web.Application) -> ClientSession: """ Lazy initialization of ClientSession @@ -38,19 +39,18 @@ async def persistent_client_session(app: web.Application): # closes held session if session is not app.get(APP_CLIENT_SESSION_KEY): - log.error("Unexpected client session upon cleanup! expected %s, got %s", + log.error( + "Unexpected client session upon cleanup! expected %s, got %s", session, - app.get(APP_CLIENT_SESSION_KEY)) + app.get(APP_CLIENT_SESSION_KEY), + ) await session.close() log.info("Session is actually closed? %s", session.closed) + # FIXME: if get_client_session upon startup fails and session is NOT closed. Implement some kind of gracefull shutdonw https://docs.aiohttp.org/en/latest/client_advanced.html#graceful-shutdown # TODO: add some tests -__all__ = [ - 'APP_CLIENT_SESSION_KEY', - 'get_client_session', - 'persistent_client_session' -] +__all__ = ["APP_CLIENT_SESSION_KEY", "get_client_session", "persistent_client_session"] diff --git a/packages/service-library/src/servicelib/config_schema_utils.py b/packages/service-library/src/servicelib/config_schema_utils.py index 378c674a7ff..33c516031e4 100644 --- a/packages/service-library/src/servicelib/config_schema_utils.py +++ b/packages/service-library/src/servicelib/config_schema_utils.py @@ -1,12 +1,11 @@ import trafaret as T -def addon_section(name: str, optional: bool=False) -> T.Key: +def addon_section(name: str, optional: bool = False) -> T.Key: if optional: return T.Key(name, default=dict(enabled=True), optional=optional) return T.Key(name) + def minimal_addon_schema() -> T.Dict: - return T.Dict({ - T.Key("enabled", default=True, optional=True): T.Bool() - }) + return T.Dict({T.Key("enabled", default=True, optional=True): T.Bool()}) diff --git a/packages/service-library/src/servicelib/decorators.py b/packages/service-library/src/servicelib/decorators.py index ae8e156a284..e51d1fb4a3b 100644 --- a/packages/service-library/src/servicelib/decorators.py +++ b/packages/service-library/src/servicelib/decorators.py @@ -11,11 +11,10 @@ log = logging.getLogger(__name__) - def safe_return(if_fails_return=False, catch=None, logger=None): # defaults if catch is None: - catch = (RuntimeError, ) + catch = (RuntimeError,) if logger is None: logger = log @@ -27,8 +26,10 @@ def safe_func(*args, **kargs): return res except catch as err: logger.info("%s failed: %s", func.__name__, str(err)) - except Exception: #pylint: disable=broad-except + except Exception: # pylint: disable=broad-except logger.info("%s failed unexpectedly", func.__name__, exc_info=True) - return deepcopy(if_fails_return) # avoid issues with default mutables + return deepcopy(if_fails_return) # avoid issues with default mutables + return safe_func + return decorate diff --git a/packages/service-library/src/servicelib/jsonschema_specs.py b/packages/service-library/src/servicelib/jsonschema_specs.py index b7979a74d5b..c3d44917c9c 100644 --- a/packages/service-library/src/servicelib/jsonschema_specs.py +++ b/packages/service-library/src/servicelib/jsonschema_specs.py @@ -14,13 +14,17 @@ def _load_from_path(filepath: Path) -> Dict: spec_dict = json.load(f) return spec_dict + async def _load_from_url(session: ClientSession, url: URL) -> Dict: async with session.get(url) as resp: text = await resp.text() spec_dict = json.loads(text) return spec_dict -async def create_jsonschema_specs(location: Path, session: ClientSession=None) -> Dict: + +async def create_jsonschema_specs( + location: Path, session: ClientSession = None +) -> Dict: """ Loads specs from a given location (url or path), validates them and returns a working instance @@ -38,7 +42,7 @@ async def create_jsonschema_specs(location: Path, session: ClientSession=None) - if URL(str(location)).host: spec_dict = await _load_from_url(session, URL(location)) else: - path = Path(location).expanduser().resolve() #pylint: disable=no-member + path = Path(location).expanduser().resolve() # pylint: disable=no-member spec_dict = _load_from_path(path) try: diff --git a/packages/service-library/src/servicelib/jsonschema_validation.py b/packages/service-library/src/servicelib/jsonschema_validation.py index c40285f1f79..dc1f6201a1d 100644 --- a/packages/service-library/src/servicelib/jsonschema_validation.py +++ b/packages/service-library/src/servicelib/jsonschema_validation.py @@ -5,6 +5,7 @@ log = logging.getLogger(__name__) + def validate_instance(instance: Dict, schema: Dict, *, log_errors=True): try: validate(instance, schema) diff --git a/packages/service-library/src/servicelib/monitoring.py b/packages/service-library/src/servicelib/monitoring.py index 5fe020354cb..375829bc3cd 100644 --- a/packages/service-library/src/servicelib/monitoring.py +++ b/packages/service-library/src/servicelib/monitoring.py @@ -25,9 +25,10 @@ def middleware_factory(app_name): async def middleware_handler(request: web.Request, handler): # See https://prometheus.io/docs/concepts/metric_types try: - request['start_time'] = time.time() - request.app['REQUEST_IN_PROGRESS'].labels( - app_name, request.path, request.method).inc() + request["start_time"] = time.time() + request.app["REQUEST_IN_PROGRESS"].labels( + app_name, request.path, request.method + ).inc() resp = await handler(request) @@ -35,35 +36,42 @@ async def middleware_handler(request: web.Request, handler): # Captures raised reponses (success/failures accounted with resp.status) resp = exc raise - except Exception as exc: #pylint: disable=broad-except + except Exception as exc: # pylint: disable=broad-except # Prevents issue #1025. resp = web.HTTPInternalServerError(reason=str(exc)) - resp_time = time.time() - request['start_time'] + resp_time = time.time() - request["start_time"] # NOTE: all access to API (i.e. and not other paths as /socket, /x, etc) shall return web.HTTPErrors since processed by error_middleware_factory - log.exception('Unexpected server error "%s" from access: %s "%s %s" done in %3.2f secs. Responding with status %s', + log.exception( + 'Unexpected server error "%s" from access: %s "%s %s" done in %3.2f secs. Responding with status %s', type(exc), - request.remote, request.method, request.path, + request.remote, + request.method, + request.path, resp_time, - resp.status + resp.status, ) finally: # metrics on the same request - resp_time = time.time() - request['start_time'] - request.app['REQUEST_LATENCY'].labels( - app_name, request.path).observe(resp_time) + resp_time = time.time() - request["start_time"] + request.app["REQUEST_LATENCY"].labels(app_name, request.path).observe( + resp_time + ) - request.app['REQUEST_IN_PROGRESS'].labels( - app_name, request.path, request.method).dec() + request.app["REQUEST_IN_PROGRESS"].labels( + app_name, request.path, request.method + ).dec() - request.app['REQUEST_COUNT'].labels( - app_name, request.method, request.path, resp.status).inc() + request.app["REQUEST_COUNT"].labels( + app_name, request.method, request.path, resp.status + ).inc() return resp middleware_handler.__middleware_name__ = __name__ return middleware_handler + async def metrics(_request): # TODO: NOT async! # prometheus_client access to a singleton registry! @@ -71,34 +79,39 @@ async def metrics(_request): resp.content_type = CONTENT_TYPE_LATEST return resp + async def check_outermost_middleware(app: web.Application): m = app.middlewares[0] - ok = m and hasattr(m, "__middleware_name__") and m.__middleware_name__==__name__ + ok = m and hasattr(m, "__middleware_name__") and m.__middleware_name__ == __name__ if not ok: # TODO: name all middleware and list middleware in log - log.critical("Monitoring middleware expected in the outermost layer." - "TIP: Check setup order") + log.critical( + "Monitoring middleware expected in the outermost layer." + "TIP: Check setup order" + ) + def setup_monitoring(app: web.Application, app_name: str): # NOTE: prometheus_client registers metrics in **globals**. Therefore # tests might fail when fixtures get re-created # Total number of requests processed - app['REQUEST_COUNT'] = Counter( - 'http_requests_total', 'Total Request Count', - ['app_name', 'method', 'endpoint', 'http_status'] + app["REQUEST_COUNT"] = Counter( + "http_requests_total", + "Total Request Count", + ["app_name", "method", "endpoint", "http_status"], ) # Latency of a request in seconds - app['REQUEST_LATENCY'] = Histogram( - 'http_request_latency_seconds', 'Request latency', - ['app_name', 'endpoint'] + app["REQUEST_LATENCY"] = Histogram( + "http_request_latency_seconds", "Request latency", ["app_name", "endpoint"] ) # Number of requests in progress - app['REQUEST_IN_PROGRESS']=Gauge( - 'http_requests_in_progress_total', 'Requests in progress', - ['app_name', 'endpoint', 'method'] + app["REQUEST_IN_PROGRESS"] = Gauge( + "http_requests_in_progress_total", + "Requests in progress", + ["app_name", "endpoint", "method"], ) # ensures is first layer but cannot guarantee the order setup is applied diff --git a/packages/service-library/src/servicelib/observer.py b/packages/service-library/src/servicelib/observer.py index 43567162f2b..5eb47c1ecd3 100644 --- a/packages/service-library/src/servicelib/observer.py +++ b/packages/service-library/src/servicelib/observer.py @@ -12,6 +12,7 @@ event_registry = defaultdict(list) + async def emit(event: str, *args, **kwargs): if not event_registry[event]: return @@ -20,6 +21,7 @@ async def emit(event: str, *args, **kwargs): # all coroutine called in // await asyncio.gather(*coroutines, return_exceptions=True) + def observe(event: str): def decorator(func): if func not in event_registry[event]: @@ -29,5 +31,7 @@ def decorator(func): @wraps(func) def wrapped(*args, **kwargs): return func(*args, **kwargs) + return wrapped + return decorator diff --git a/packages/service-library/src/servicelib/openapi.py b/packages/service-library/src/servicelib/openapi.py index 300a4b153eb..c7f52e1b97a 100644 --- a/packages/service-library/src/servicelib/openapi.py +++ b/packages/service-library/src/servicelib/openapi.py @@ -15,13 +15,17 @@ # Supported version of openapi (last number indicates only editorial changes) # TODO: ensure openapi_core.__version__ is up-to-date with OAI_VERSION -OAI_VERSION = '3.0.2' -OAI_VERSION_URL = 'https://github.com/OAI/OpenAPI-Specification/blob/master/versions/%s.md'%OAI_VERSION +OAI_VERSION = "3.0.2" +OAI_VERSION_URL = ( + "https://github.com/OAI/OpenAPI-Specification/blob/master/versions/%s.md" + % OAI_VERSION +) # alias OpenApiSpec = Spec -def get_base_path(specs: OpenApiSpec) ->str: + +def get_base_path(specs: OpenApiSpec) -> str: """ Expected API basepath By convention, the API basepath indicates the major @@ -33,7 +37,7 @@ def get_base_path(specs: OpenApiSpec) ->str: :rtype: str """ # TODO: guarantee this convention is true - return '/v' + specs.info.version.split('.')[0] + return "/v" + specs.info.version.split(".")[0] # TODO: _load_from_* is also found in jsonshema_specs @@ -42,6 +46,7 @@ def _load_from_path(filepath: Path) -> Tuple[Dict, str]: spec_dict = yaml.safe_load(f) return spec_dict, filepath.as_uri() + async def _load_from_url(session: ClientSession, url: URL) -> Tuple[Dict, str]: async with session.get(url) as resp: text = await resp.text() @@ -49,7 +54,7 @@ async def _load_from_url(session: ClientSession, url: URL) -> Tuple[Dict, str]: return spec_dict, str(url) -async def create_openapi_specs(location, session: ClientSession=None) -> OpenApiSpec: +async def create_openapi_specs(location, session: ClientSession = None) -> OpenApiSpec: """ Loads specs from a given location (url or path), validates them and returns a working instance @@ -69,17 +74,14 @@ async def create_openapi_specs(location, session: ClientSession=None) -> OpenApi raise ValueError("Client session required in arguments") spec_dict, spec_url = await _load_from_url(session, URL(location)) else: - path = Path(location).expanduser().resolve() #pylint: disable=no-member + path = Path(location).expanduser().resolve() # pylint: disable=no-member spec_dict, spec_url = _load_from_path(path) return openapi_core.create_spec(spec_dict, spec_url) - def create_specs(openapi_path: Path) -> OpenApiSpec: - warnings.warn("Use instead create_openapi_specs", - category=DeprecationWarning) - + warnings.warn("Use instead create_openapi_specs", category=DeprecationWarning) # TODO: spec_from_file and spec_from_url with openapi_path.open() as f: @@ -89,11 +91,10 @@ def create_specs(openapi_path: Path) -> OpenApiSpec: return spec - __all__ = ( - 'get_base_path', - 'create_openapi_specs', - 'OpenApiSpec', - 'OpenAPIError', - 'OpenAPIMappingError' + "get_base_path", + "create_openapi_specs", + "OpenApiSpec", + "OpenAPIError", + "OpenAPIMappingError", ) diff --git a/packages/service-library/src/servicelib/openapi_servers.py b/packages/service-library/src/servicelib/openapi_servers.py index 9153e51912f..7b965d7cb54 100644 --- a/packages/service-library/src/servicelib/openapi_servers.py +++ b/packages/service-library/src/servicelib/openapi_servers.py @@ -1,6 +1,3 @@ - - - def get_server(servers, url): # Development server: http://{host}:{port}/{basePath} for server in servers: diff --git a/packages/service-library/src/servicelib/openapi_validation.py b/packages/service-library/src/servicelib/openapi_validation.py index fe640fd0c6e..9266fedfd93 100644 --- a/packages/service-library/src/servicelib/openapi_validation.py +++ b/packages/service-library/src/servicelib/openapi_validation.py @@ -11,8 +11,11 @@ from openapi_core.validation.request.validators import RequestValidator from openapi_core.validation.response.validators import ResponseValidator -from .openapi_wrappers import (PARAMETERS_KEYS, AiohttpOpenAPIRequest, - AiohttpOpenAPIResponse) +from .openapi_wrappers import ( + PARAMETERS_KEYS, + AiohttpOpenAPIRequest, + AiohttpOpenAPIResponse, +) logger = logging.getLogger(__name__) @@ -32,28 +35,30 @@ async def validate_request(request: web.Request, spec: OpenApiSpec): return result.parameters, result.body, result.errors + async def validate_parameters(spec: OpenApiSpec, request: web.Request): req = await AiohttpOpenAPIRequest.create(request) return shortcuts.validate_parameters(spec, req) + async def validate_body(spec: OpenApiSpec, request: web.Request): req = await AiohttpOpenAPIRequest.create(request) return shortcuts.validate_body(spec, req) + async def validate_data(spec: OpenApiSpec, request, response: web.Response): if isinstance(request, web.Request): req = await AiohttpOpenAPIRequest.create(request) else: # TODO: alternative MockRequest - #params = ['host_url', 'method', 'path'] - #opapi_request = MockRequest(*args) + # params = ['host_url', 'method', 'path'] + # opapi_request = MockRequest(*args) - params = ['full_url_pattern', 'method'] - assert all(hasattr(request, attr) for attr in params) # nosec + params = ["full_url_pattern", "method"] + assert all(hasattr(request, attr) for attr in params) # nosec # TODO: if a dict with params, convert dict to dot operations! and reverse - req = request res = await AiohttpOpenAPIResponse.create(response) @@ -65,7 +70,10 @@ async def validate_data(spec: OpenApiSpec, request, response: web.Response): return result.data -async def validate_response(spec: OpenApiSpec, request: web.Request, response: web.Response): + +async def validate_response( + spec: OpenApiSpec, request: web.Request, response: web.Response +): """ Validates server response against openapi specs @@ -74,12 +82,11 @@ async def validate_response(spec: OpenApiSpec, request: web.Request, response: w validator = ResponseValidator(spec) req = await AiohttpOpenAPIRequest.create(request) - res = AiohttpOpenAPIResponse(response, response.text) # FIXME:ONLY IN SERVER side. Async in client! + res = AiohttpOpenAPIResponse( + response, response.text + ) # FIXME:ONLY IN SERVER side. Async in client! result = validator.validate(req, res) result.raise_for_errors() -__all__ = ( - 'validate_request', - 'validate_data' -) +__all__ = ("validate_request", "validate_data") diff --git a/packages/service-library/src/servicelib/openapi_wrappers.py b/packages/service-library/src/servicelib/openapi_wrappers.py index bd116afc4f4..ad090e0f531 100644 --- a/packages/service-library/src/servicelib/openapi_wrappers.py +++ b/packages/service-library/src/servicelib/openapi_wrappers.py @@ -10,14 +10,14 @@ log = logging.getLogger(__name__) -CAPTURES = re.compile(r'\(\?P<([_a-zA-Z][_a-zA-Z0-9]+)>(.[^)]+)\)') -PARAMETERS_KEYS = ('path', 'query', 'header', 'cookie') +CAPTURES = re.compile(r"\(\?P<([_a-zA-Z][_a-zA-Z0-9]+)>(.[^)]+)\)") +PARAMETERS_KEYS = ("path", "query", "header", "cookie") PATH_KEY, QUERY_KEY, HEADER_KEY, COOKIE_KEY = PARAMETERS_KEYS + class AiohttpOpenAPIRequest(BaseOpenAPIRequest): wrappedcls = web.Request - def __init__(self, request: web.Request, data: str): self._request = request self._body = data @@ -48,18 +48,18 @@ def path_pattern(self): info = match_info.get_info() # if PlainResource then - path_pattern = info.get('path') + path_pattern = info.get("path") # if DynamicResource then whe need to undo the conversion to formatter and pattern if not path_pattern: - formatter = info.get('formatter') - re_pattern = info.get('pattern').pattern + formatter = info.get("formatter") + re_pattern = info.get("pattern").pattern kargs = {} # TODO: create a test with '/my/tokens/{service}/' # TODO: create a test with '/my/tokens/{service:google|facebook}/' # TODO: create a test with '/my/tokens/{identifier:\d+}/' for key, value in CAPTURES.findall(re_pattern): - if value == '[^{}/]+': # = no re in pattern + if value == "[^{}/]+": # = no re in pattern kargs[key] = "{%s}" % (key) else: kargs[key] = "{%s:%s}" % (key, value) diff --git a/packages/service-library/src/servicelib/request_keys.py b/packages/service-library/src/servicelib/request_keys.py index 1185c7fa124..89a3b6a2777 100644 --- a/packages/service-library/src/servicelib/request_keys.py +++ b/packages/service-library/src/servicelib/request_keys.py @@ -3,6 +3,4 @@ """ # RQT=request -RQT_USERID_KEY = __name__ + '.userid' - - +RQT_USERID_KEY = __name__ + ".userid" diff --git a/packages/service-library/src/servicelib/requests_utils.py b/packages/service-library/src/servicelib/requests_utils.py index e8e8e78b508..c1ebbbb4cc1 100644 --- a/packages/service-library/src/servicelib/requests_utils.py +++ b/packages/service-library/src/servicelib/requests_utils.py @@ -5,10 +5,12 @@ def get_request(*args, **kwargs) -> web.BaseRequest: """ Helper for handler function decorators to retrieve requests """ - request = kwargs.get('request', args[-1] if args else None) + request = kwargs.get("request", args[-1] if args else None) if not isinstance(request, web.BaseRequest): - msg = ("Incorrect decorator usage. " - "Expecting `def handler(request)` " - "or `def handler(self, request)`.") + msg = ( + "Incorrect decorator usage. " + "Expecting `def handler(request)` " + "or `def handler(self, request)`." + ) raise RuntimeError(msg) return request diff --git a/packages/service-library/src/servicelib/resources.py b/packages/service-library/src/servicelib/resources.py index c9c3991a2ea..b382ea2860d 100644 --- a/packages/service-library/src/servicelib/resources.py +++ b/packages/service-library/src/servicelib/resources.py @@ -17,6 +17,7 @@ class ResourcesFacade: Resources are read-only files/folders """ + package_name: str distribution_name: str config_folder: str @@ -40,7 +41,9 @@ def get_path(self, resource_name: str) -> Path: WARNING: existence of file is not guaranteed. Use resources.exists WARNING: resource files are supposed to be used as read-only! """ - resource_path = pathlib.Path( pkg_resources.resource_filename(self.package_name, resource_name) ) + resource_path = pathlib.Path( + pkg_resources.resource_filename(self.package_name, resource_name) + ) return resource_path def get_distribution(self): @@ -48,13 +51,13 @@ def get_distribution(self): return pkg_resources.get_distribution(self.distribution_name) - # TODO: create abc @attr.s(auto_attribs=True) class FileResource: """ TODO: lazy evaluation of attribs """ + name: str diff --git a/packages/service-library/src/servicelib/rest_codecs.py b/packages/service-library/src/servicelib/rest_codecs.py index 34f33c187e2..4e6effba714 100644 --- a/packages/service-library/src/servicelib/rest_codecs.py +++ b/packages/service-library/src/servicelib/rest_codecs.py @@ -14,7 +14,8 @@ class DataEncoder(json.JSONEncoder): TODO: extend to more types like apiset """ - def default(self, o): #pylint: disable=E0202 + + def default(self, o): # pylint: disable=E0202 if attr.has(o.__class__): return attr.asdict(o) return json.JSONEncoder.default(self, o) diff --git a/packages/service-library/src/servicelib/rest_middlewares.py b/packages/service-library/src/servicelib/rest_middlewares.py index 09e69aeda22..a929dfa7046 100644 --- a/packages/service-library/src/servicelib/rest_middlewares.py +++ b/packages/service-library/src/servicelib/rest_middlewares.py @@ -9,9 +9,14 @@ from openapi_core.schema.exceptions import OpenAPIError from .rest_models import ErrorItemType, ErrorType, LogMessageType -from .rest_responses import (JSON_CONTENT_TYPE, create_data_response, - create_error_response, is_enveloped_from_map, - is_enveloped_from_text, wrap_as_envelope) +from .rest_responses import ( + JSON_CONTENT_TYPE, + create_data_response, + create_error_response, + is_enveloped_from_map, + is_enveloped_from_text, + wrap_as_envelope, +) from .rest_utils import EnvelopeFactory from .rest_validators import OpenApiValidator @@ -29,15 +34,16 @@ def is_api_request(request: web.Request, api_version: str) -> bool: def _process_and_raise_unexpected_error(request: web.BaseRequest, err: Exception): # FIXME: send info + trace to client ONLY in debug mode!!! resp = create_error_response( - [err,], - "Unexpected Server error", - web.HTTPInternalServerError - ) + [err,], "Unexpected Server error", web.HTTPInternalServerError + ) - logger.exception('Unexpected server error "%s" from access: %s "%s %s". Responding with status %s', + logger.exception( + 'Unexpected server error "%s" from access: %s "%s %s". Responding with status %s', type(err), - request.remote, request.method, request.path, - resp.status + request.remote, + request.method, + request.path, + resp.status, ) raise resp @@ -65,9 +71,9 @@ async def _middleware(request: web.Request, handler): if not err.text or not is_enveloped_from_text(err.text): error = ErrorType( - errors=[ErrorItemType.from_error(err), ], + errors=[ErrorItemType.from_error(err),], status=err.status, - logs=[LogMessageType(message=err.reason, level="ERROR"), ] + logs=[LogMessageType(message=err.reason, level="ERROR"),], ) err.text = EnvelopeFactory(error=error).as_text() @@ -89,7 +95,7 @@ async def _middleware(request: web.Request, handler): logger.debug("Redirected to %s", ex) raise - except Exception as err: # pylint: disable=broad-except + except Exception as err: # pylint: disable=broad-except _process_and_raise_unexpected_error(request, err) return _middleware @@ -106,8 +112,7 @@ async def _middleware(request: web.Request, handler): return await handler(request) # TODO: move this outside! - RQ_VALIDATED_DATA_KEYS = ( - "validated-path", "validated-query", "validated-body") + RQ_VALIDATED_DATA_KEYS = ("validated-path", "validated-query", "validated-body") try: validator = OpenApiValidator.create(request.app, api_version) @@ -156,15 +161,18 @@ async def _middleware(request: web.Request, handler): # Enforced by user. Should check it is json? response = resp return response + return _middleware -def append_rest_middlewares(app: web.Application, api_version: str = DEFAULT_API_VERSION): +def append_rest_middlewares( + app: web.Application, api_version: str = DEFAULT_API_VERSION +): """ Helper that appends rest-middlewares in the correct order """ app.middlewares.append(error_middleware_factory(api_version)) # FIXME: openapi-core fails to validate response when specs are in separate files! # FIXME: disabled so webserver and storage do not get this issue - #app.middlewares.append(validate_middleware_factory(api_version)) + # app.middlewares.append(validate_middleware_factory(api_version)) app.middlewares.append(envelope_middleware_factory(api_version)) diff --git a/packages/service-library/src/servicelib/rest_models.py b/packages/service-library/src/servicelib/rest_models.py index 269629e6ae8..a83abf15329 100644 --- a/packages/service-library/src/servicelib/rest_models.py +++ b/packages/service-library/src/servicelib/rest_models.py @@ -1,4 +1,3 @@ - """ rest - common schema models and classes UNDER DEVELOPMENT @@ -17,8 +16,8 @@ @attr.s(auto_attribs=True) class LogMessageType: message: str - level: str = 'INFO' - logger: str = 'user' + level: str = "INFO" + logger: str = "user" @attr.s(auto_attribs=True) @@ -30,11 +29,9 @@ class ErrorItemType: @classmethod def from_error(cls, err: BaseException): - item = cls(code=err.__class__.__name__, - message=str(err), - resource=None, - field=None - ) + item = cls( + code=err.__class__.__name__, message=str(err), resource=None, field=None + ) return item diff --git a/packages/service-library/src/servicelib/rest_oas.py b/packages/service-library/src/servicelib/rest_oas.py index 25299e20b1f..eb51bfbf53e 100644 --- a/packages/service-library/src/servicelib/rest_oas.py +++ b/packages/service-library/src/servicelib/rest_oas.py @@ -16,16 +16,12 @@ def set_specs(app: web.Application, specs: Spec) -> Spec: app[APP_OPENAPI_SPECS_KEY] = specs return app[APP_OPENAPI_SPECS_KEY] + def get_specs(app: web.Application) -> Spec: # TODO consider the case of multiple versions of spec -> Dict[Spec] ?? return app[APP_OPENAPI_SPECS_KEY] - OpenApiSpec = Spec -__all__ = ( - 'set_specs', 'get_specs', - 'OpenApiSpec', - 'create_specs' -) +__all__ = ("set_specs", "get_specs", "OpenApiSpec", "create_specs") diff --git a/packages/service-library/src/servicelib/rest_responses.py b/packages/service-library/src/servicelib/rest_responses.py index 62a767dafd9..032b457ef33 100644 --- a/packages/service-library/src/servicelib/rest_responses.py +++ b/packages/service-library/src/servicelib/rest_responses.py @@ -11,12 +11,14 @@ from .rest_codecs import jsonify, json from .rest_models import ErrorItemType, ErrorType -ENVELOPE_KEYS = ('data', 'error') -JSON_CONTENT_TYPE = 'application/json' +ENVELOPE_KEYS = ("data", "error") +JSON_CONTENT_TYPE = "application/json" + def is_enveloped_from_map(payload: Mapping) -> bool: return all(k in ENVELOPE_KEYS for k in payload.keys()) + def is_enveloped_from_text(text: str) -> bool: try: payload = json.loads(text) @@ -24,6 +26,7 @@ def is_enveloped_from_text(text: str) -> bool: return False return is_enveloped_from_map(payload) + def is_enveloped(payload) -> bool: if isinstance(payload, Mapping): return is_enveloped_from_map(payload) @@ -38,9 +41,9 @@ def wrap_as_envelope(data=None, error=None, as_null=True): """ payload = {} if data or as_null: - payload['data'] = data + payload["data"] = data if error or as_null: - payload['error'] = error + payload["error"] = error return payload @@ -53,6 +56,7 @@ def unwrap_envelope(payload: Dict) -> Tuple: # RESPONSES FACTORIES ------------------------------- + def create_data_response(data) -> web.Response: response = None try: @@ -63,18 +67,15 @@ def create_data_response(data) -> web.Response: response = web.json_response(payload, dumps=jsonify) except (TypeError, ValueError) as err: - response = create_error_response( - [err,], - str(err), - web.HTTPInternalServerError - ) + response = create_error_response([err,], str(err), web.HTTPInternalServerError) return response def create_error_response( - errors: List[Exception], - reason: Optional[str]=None, - error_cls: Optional[web.HTTPError]=None ) -> web.HTTPError: + errors: List[Exception], + reason: Optional[str] = None, + error_cls: Optional[web.HTTPError] = None, +) -> web.HTTPError: # TODO: guarantee no throw! if error_cls is None: error_cls = web.HTTPInternalServerError @@ -82,15 +83,13 @@ def create_error_response( # TODO: assumes openapi error model!!! error = ErrorType( errors=[ErrorItemType.from_error(err) for err in errors], - status=error_cls.status_code + status=error_cls.status_code, ) payload = wrap_as_envelope(error=attr.asdict(error)) response = error_cls( - reason=reason, - text=jsonify(payload), - content_type=JSON_CONTENT_TYPE + reason=reason, text=jsonify(payload), content_type=JSON_CONTENT_TYPE ) return response @@ -103,8 +102,5 @@ def create_log_response(msg: str, level: str) -> web.Response: """ # TODO: link more with real logger msg = LogMessageType(msg, level) - response = web.json_response(data={ - 'data': attr.asdict(msg), - 'error': None - }) + response = web.json_response(data={"data": attr.asdict(msg), "error": None}) return response diff --git a/packages/service-library/src/servicelib/rest_routing.py b/packages/service-library/src/servicelib/rest_routing.py index 35ac96a47f9..675bbe3c93a 100644 --- a/packages/service-library/src/servicelib/rest_routing.py +++ b/packages/service-library/src/servicelib/rest_routing.py @@ -17,8 +17,10 @@ def has_handler_signature(fun) -> bool: # TODO: last parameter is web.Request or called request? - return any(param.annotation == web.Request - for name, param in inspect.signature(fun).parameters.items()) + return any( + param.annotation == web.Request + for name, param in inspect.signature(fun).parameters.items() + ) def get_handlers_from_namespace(handlers_nsp) -> Dict: @@ -31,7 +33,9 @@ def get_handlers_from_namespace(handlers_nsp) -> Dict: elif hasattr(handlers_nsp, "__class__"): predicate = lambda obj: inspect.ismethod(obj) and has_handler_signature(obj) else: - raise ValueError("Expected module or class as namespace, got %s" % type(handlers_nsp)) + raise ValueError( + "Expected module or class as namespace, got %s" % type(handlers_nsp) + ) name_to_handler_map = dict(inspect.getmembers(handlers_nsp, predicate)) return name_to_handler_map @@ -46,14 +50,15 @@ def iter_path_operations(specs: OpenApiSpec) -> Generator: for url, path in specs.paths.items(): for method, operation in path.operations.items(): - yield method.upper(), base_path+url, operation.operation_id, operation.tags + yield method.upper(), base_path + url, operation.operation_id, operation.tags def map_handlers_with_operations( - handlers_map: Mapping[str, Callable], - operations_it: Generator, - * , - strict: bool=True) -> List[web.RouteDef]: + handlers_map: Mapping[str, Callable], + operations_it: Generator, + *, + strict: bool = True +) -> List[web.RouteDef]: """ Matches operation ids with handler names and returns a list of routes :param handlers_map: .See get_handlers_from_namespace @@ -72,24 +77,23 @@ def map_handlers_with_operations( for method, path, operation_id, _tags in operations_it: handler = handlers.pop(operation_id, None) if handler: - routes.append( web.route(method.upper(), path, handler, name=operation_id) ) + routes.append(web.route(method.upper(), path, handler, name=operation_id)) elif strict: raise ValueError("Cannot find any handler named {} ".format(operation_id)) if handlers and strict: - raise RuntimeError("{} handlers were not mapped to routes: {}".format( - len(handlers), - handlers.keys()) + raise RuntimeError( + "{} handlers were not mapped to routes: {}".format( + len(handlers), handlers.keys() ) + ) return routes def create_routes_from_namespace( - specs: OpenApiSpec, - handlers_nsp, - *, - strict: bool=True) -> List[web.RouteDef]: + specs: OpenApiSpec, handlers_nsp, *, strict: bool = True +) -> List[web.RouteDef]: """ Gets *all* available handlers and maps one-to-one to *all* specs routes :param specs: openapi spec object @@ -104,6 +108,8 @@ def create_routes_from_namespace( if not handlers and strict: raise ValueError("No handlers found in %s" % handlers_nsp) - routes = map_handlers_with_operations(handlers, iter_path_operations(specs), strict=strict) + routes = map_handlers_with_operations( + handlers, iter_path_operations(specs), strict=strict + ) return routes diff --git a/packages/service-library/src/servicelib/rest_utils.py b/packages/service-library/src/servicelib/rest_utils.py index 41ffb47cf5d..f0e13249da1 100644 --- a/packages/service-library/src/servicelib/rest_utils.py +++ b/packages/service-library/src/servicelib/rest_utils.py @@ -11,8 +11,13 @@ from aiohttp import web from openapi_core.extensions.models.factories import Model as BodyModel -from .openapi_validation import (COOKIE_KEY, HEADER_KEY, PATH_KEY, QUERY_KEY, - validate_request) +from .openapi_validation import ( + COOKIE_KEY, + HEADER_KEY, + PATH_KEY, + QUERY_KEY, + validate_request, +) from .rest_models import ErrorItemType, ErrorType from .rest_oas import get_specs @@ -20,8 +25,8 @@ def body_to_dict(body: BodyModel) -> Dict: # openapi_core.extensions.models.factories.Model -> dict dikt = {} - for k,v in body.__dict__.items(): - if hasattr(v, '__dict__'): + for k, v in body.__dict__.items(): + if hasattr(v, "__dict__"): v = body_to_dict(v) dikt[k] = v return dikt @@ -33,8 +38,9 @@ class EnvelopeFactory: as suggested in https://medium.com/studioarmix/learn-restful-api-design-ideals-c5ec915a430f """ + def __init__(self, data=None, error=None): - enveloped = {'data': data, 'error': error} + enveloped = {"data": data, "error": error} for key, value in enveloped.items(): if value is not None and not isinstance(value, dict): enveloped[key] = attr.asdict(value) @@ -62,20 +68,15 @@ async def extract_and_validate(request: web.Request): if errors: error = ErrorType( errors=[ErrorItemType.from_error(err) for err in errors], - status=web.HTTPBadRequest.status_code + status=web.HTTPBadRequest.status_code, ) raise web.HTTPBadRequest( reason="Failed request validation against API specs", text=EnvelopeFactory(error=error).as_text(), - content_type='application/json', - ) + content_type="application/json", + ) return params[PATH_KEY], params[QUERY_KEY], body -__all__ = ( - 'COOKIE_KEY', - 'HEADER_KEY', - 'PATH_KEY', - 'QUERY_KEY' -) +__all__ = ("COOKIE_KEY", "HEADER_KEY", "PATH_KEY", "QUERY_KEY") diff --git a/packages/service-library/src/servicelib/rest_validators.py b/packages/service-library/src/servicelib/rest_validators.py index 950897bb3a4..7151e6ba17b 100644 --- a/packages/service-library/src/servicelib/rest_validators.py +++ b/packages/service-library/src/servicelib/rest_validators.py @@ -1,10 +1,13 @@ - from aiohttp import web from openapi_core.validation.request.validators import RequestValidator from openapi_core.validation.response.validators import ResponseValidator -from .openapi_wrappers import (PATH_KEY, QUERY_KEY, AiohttpOpenAPIRequest, - AiohttpOpenAPIResponse) +from .openapi_wrappers import ( + PATH_KEY, + QUERY_KEY, + AiohttpOpenAPIRequest, + AiohttpOpenAPIResponse, +) from .rest_oas import OpenApiSpec, get_specs from .rest_responses import create_error_response @@ -13,6 +16,7 @@ class OpenApiValidator: """ Used to validate data in the request->response cycle against openapi specs """ + @classmethod def create(cls, app: web.Application, _version=""): specs = get_specs(app) @@ -24,7 +28,7 @@ def __init__(self, spec: OpenApiSpec): self._resvtor = ResponseValidator(spec, custom_formatters=None) # Current - self.current_request = None # wrapper request + self.current_request = None # wrapper request async def check_request(self, request: web.Request): self.current_request = None @@ -36,22 +40,28 @@ async def check_request(self, request: web.Request): self.current_request = rq if result.errors: - err = create_error_response(result.errors, - "Failed request validation against API specs", - web.HTTPBadRequest) + err = create_error_response( + result.errors, + "Failed request validation against API specs", + web.HTTPBadRequest, + ) raise err - path, query = [ result.parameters[k] for k in (PATH_KEY, QUERY_KEY) ] + path, query = [result.parameters[k] for k in (PATH_KEY, QUERY_KEY)] return path, query, result.body def check_response(self, response: web.Response): req = self.current_request - res = AiohttpOpenAPIResponse(response, response.text) # FIXME:ONLY IN SERVER side. Async in client! + res = AiohttpOpenAPIResponse( + response, response.text + ) # FIXME:ONLY IN SERVER side. Async in client! result = self._resvtor.validate(req, res) if result.errors: - err = create_error_response(result.errors, - "Failed response validation against API specs", - web.HTTPServiceUnavailable) + err = create_error_response( + result.errors, + "Failed response validation against API specs", + web.HTTPServiceUnavailable, + ) raise err diff --git a/packages/service-library/src/servicelib/tracing.py b/packages/service-library/src/servicelib/tracing.py index 94f650336df..caf531c4bef 100644 --- a/packages/service-library/src/servicelib/tracing.py +++ b/packages/service-library/src/servicelib/tracing.py @@ -15,16 +15,23 @@ log = logging.getLogger(__name__) -def setup_tracing(app: web.Application, app_name: str, host: str, port: str, config: Dict) -> bool: + +def setup_tracing( + app: web.Application, app_name: str, host: str, port: str, config: Dict +) -> bool: zipkin_address = f"{config['zipkin_endpoint']}/api/v2/spans" endpoint = az.create_endpoint(app_name, ipv4=host, port=port) loop = asyncio.get_event_loop() - tracer = loop.run_until_complete(az.create(zipkin_address, endpoint, sample_rate=1.0)) + tracer = loop.run_until_complete( + az.create(zipkin_address, endpoint, sample_rate=1.0) + ) az.setup(app, tracer) return True -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Or(T.Bool(), T.Int), - T.Key('zipkin_endpoint', default="http://jaeger:9411"): T.String() + +schema = T.Dict( + { + T.Key("enabled", default=True, optional=True): T.Or(T.Bool(), T.Int), + T.Key("zipkin_endpoint", default="http://jaeger:9411"): T.String(), } ) diff --git a/packages/service-library/src/servicelib/utils.py b/packages/service-library/src/servicelib/utils.py index 5e013f139bf..b1abb8e53a0 100644 --- a/packages/service-library/src/servicelib/utils.py +++ b/packages/service-library/src/servicelib/utils.py @@ -22,7 +22,7 @@ def search_osparc_repo_dir(start, max_iterations=8): max_iterations = max(max_iterations, 1) root_dir = Path(start) iteration_number = 0 - while not is_osparc_repo_dir(root_dir) and iteration_number Dict: return { - 'foo': { - "enabled": True - }, - 'bar': { - "enabled": False - }, - 'main':{ - 'zee_enabled': True - } + "foo": {"enabled": True}, + "bar": {"enabled": False}, + "main": {"zee_enabled": True}, } + @pytest.fixture def app(app_config): _app = web.Application() @@ -61,8 +64,8 @@ def app(app_config): def test_setup_config_enabled(app_config, app): assert setup_zee(app, 1) - assert setup_zee.metadata()['config_enabled'] == "main.zee_enabled" - app_config['main']['zee_enabled'] = False + assert setup_zee.metadata()["config_enabled"] == "main.zee_enabled" + app_config["main"]["zee_enabled"] = False assert not setup_zee(app, 2) @@ -74,14 +77,16 @@ def test_setup_dependencies(app_config, app): assert setup_foo(app, 1) assert setup_needs_foo(app, 2) - assert setup_needs_foo.metadata()['dependencies'] == [setup_foo.metadata()['module_name'], ] + assert setup_needs_foo.metadata()["dependencies"] == [ + setup_foo.metadata()["module_name"], + ] def test_marked_setup(app_config, app): assert setup_foo(app, 1) - assert setup_foo.metadata()['module_name'] == 'package.foo' - assert setup_foo.metadata()['module_name'] in app[APP_SETUP_KEY] + assert setup_foo.metadata()["module_name"] == "package.foo" + assert setup_foo.metadata()["module_name"] in app[APP_SETUP_KEY] - app_config['foo']['enabled'] = False + app_config["foo"]["enabled"] = False assert not setup_foo(app, 2) diff --git a/packages/service-library/tests/test_decorators.py b/packages/service-library/tests/test_decorators.py index 69142b6c30c..a6a6cae3a2b 100644 --- a/packages/service-library/tests/test_decorators.py +++ b/packages/service-library/tests/test_decorators.py @@ -11,12 +11,13 @@ def raise_my_exception(): assert not raise_my_exception() + def test_safe_return_mutables(): - some_mutable_return = ['some', 'defaults'] + some_mutable_return = ["some", "defaults"] @safe_return(if_fails_return=some_mutable_return) def return_mutable(): raise RuntimeError("Runtime is default") - assert return_mutable() == some_mutable_return # contains the same - assert not (return_mutable() is some_mutable_return) # but is not the same + assert return_mutable() == some_mutable_return # contains the same + assert not (return_mutable() is some_mutable_return) # but is not the same diff --git a/packages/service-library/tests/test_openapi_validation.py b/packages/service-library/tests/test_openapi_validation.py index 195ea584025..9d53d216eba 100644 --- a/packages/service-library/tests/test_openapi_validation.py +++ b/packages/service-library/tests/test_openapi_validation.py @@ -11,9 +11,11 @@ from servicelib import openapi from servicelib.application_keys import APP_OPENAPI_SPECS_KEY -from servicelib.rest_middlewares import (envelope_middleware_factory, - error_middleware_factory, - validate_middleware_factory) +from servicelib.rest_middlewares import ( + envelope_middleware_factory, + error_middleware_factory, + validate_middleware_factory, +) from servicelib.rest_responses import is_enveloped, unwrap_envelope from servicelib.rest_routing import create_routes_from_namespace from tutils import Handlers @@ -26,6 +28,7 @@ async def specs(loop, here): specs = await openapi.create_openapi_specs(openapi_path) return specs + @pytest.fixture def client(loop, aiohttp_client, specs): app = web.Application() @@ -48,19 +51,12 @@ def client(loop, aiohttp_client, specs): return loop.run_until_complete(aiohttp_client(app)) - -@pytest.mark.parametrize("path", [ - "/health", - "/dict", - "/envelope", - "/list", - "/attobj", - "/string", - "/number", -]) +@pytest.mark.parametrize( + "path", ["/health", "/dict", "/envelope", "/list", "/attobj", "/string", "/number",] +) async def test_validate_handlers(path, client, specs): base = openapi.get_base_path(specs) - response = await client.get(base+path) + response = await client.get(base + path) payload = await response.json() assert is_enveloped(payload) @@ -70,12 +66,12 @@ async def test_validate_handlers(path, client, specs): assert data - -#"/mixed" FIXME: openapi core bug reported in https://github.com/p1c2u/openapi-core/issues/153 +# "/mixed" FIXME: openapi core bug reported in https://github.com/p1c2u/openapi-core/issues/153 # Raises AssertionError: assert not {'errors': [{'code': 'InvalidMediaTypeValue', 'field': None, 'message': 'Mimetype invalid: Value not valid for schema', 'resource': None}], 'logs': [], 'status': 503} @pytest.mark.xfail( reason="openapi core bug reported in https://github.com/p1c2u/openapi-core/issues/153", strict=True, - raises=AssertionError) + raises=AssertionError, +) async def test_validate_handlers_mixed(client, specs): - await test_validate_handlers('/mixed', client, specs) + await test_validate_handlers("/mixed", client, specs) diff --git a/packages/service-library/tests/test_package.py b/packages/service-library/tests/test_package.py index 03ba51b4e47..d07cdf8258e 100644 --- a/packages/service-library/tests/test_package.py +++ b/packages/service-library/tests/test_package.py @@ -21,8 +21,8 @@ def pylintrc(osparc_simcore_root_dir): def test_run_pylint(pylintrc, package_dir): try: - AUTODETECT=0 - cmd = f'pylint --jobs={AUTODETECT} --rcfile {pylintrc} -v {package_dir}'.split() + AUTODETECT = 0 + cmd = f"pylint --jobs={AUTODETECT} --rcfile {pylintrc} -v {package_dir}".split() assert subprocess.check_call(cmd) == 0 except subprocess.CalledProcessError as err: pytest.fail("Linting error. Linter existed with code %d" % err.returncode) @@ -32,12 +32,12 @@ def test_no_pdbs_in_place(package_dir): # TODO: add also test_dir excluding this function!? # TODO: it can be commented! # TODO: add check on other undesired code strings?! - MATCH = re.compile(r'pdb.set_trace()') + MATCH = re.compile(r"pdb.set_trace()") EXCLUDE = ["__pycache__", ".git"] for root, dirs, files in os.walk(package_dir): for name in files: if name.endswith(".py"): - pypth = (Path(root) / name) + pypth = Path(root) / name code = pypth.read_text() found = MATCH.findall(code) # TODO: should return line number diff --git a/packages/service-library/tests/test_rest_middlewares.py b/packages/service-library/tests/test_rest_middlewares.py index 58dc42a4a26..180b295071a 100644 --- a/packages/service-library/tests/test_rest_middlewares.py +++ b/packages/service-library/tests/test_rest_middlewares.py @@ -5,8 +5,10 @@ from aiohttp import web from servicelib import openapi from servicelib.application_keys import APP_OPENAPI_SPECS_KEY -from servicelib.rest_middlewares import (envelope_middleware_factory, - error_middleware_factory) +from servicelib.rest_middlewares import ( + envelope_middleware_factory, + error_middleware_factory, +) from servicelib.rest_responses import is_enveloped, unwrap_envelope from servicelib.rest_routing import create_routes_from_namespace from tutils import Handlers @@ -37,22 +39,25 @@ def client(loop, aiohttp_client, specs): app.middlewares.append(error_middleware_factory(base)) app.middlewares.append(envelope_middleware_factory(base)) - return loop.run_until_complete(aiohttp_client(app)) -@pytest.mark.parametrize("path,expected_data", [ - ("/health", Handlers.get('health')), - ("/dict", Handlers.get('dict')), - ("/envelope", Handlers.get('envelope')['data']), - ("/list", Handlers.get('list')), - ("/attobj", Handlers.get('attobj')), - ("/string", Handlers.get('string')), - ("/number", Handlers.get('number')), - ("/mixed", Handlers.get('mixed')) -]) + +@pytest.mark.parametrize( + "path,expected_data", + [ + ("/health", Handlers.get("health")), + ("/dict", Handlers.get("dict")), + ("/envelope", Handlers.get("envelope")["data"]), + ("/list", Handlers.get("list")), + ("/attobj", Handlers.get("attobj")), + ("/string", Handlers.get("string")), + ("/number", Handlers.get("number")), + ("/mixed", Handlers.get("mixed")), + ], +) async def test_envelope_middleware(path, expected_data, client, specs): base = openapi.get_base_path(specs) - response = await client.get(base+path) + response = await client.get(base + path) payload = await response.json() assert is_enveloped(payload) diff --git a/packages/service-library/tests/test_rest_routing.py b/packages/service-library/tests/test_rest_routing.py index cd714e0fc0e..fb4bc5e5f86 100644 --- a/packages/service-library/tests/test_rest_routing.py +++ b/packages/service-library/tests/test_rest_routing.py @@ -4,10 +4,12 @@ import pytest from servicelib import openapi -from servicelib.rest_routing import (create_routes_from_namespace, - get_handlers_from_namespace, - iter_path_operations, - map_handlers_with_operations) +from servicelib.rest_routing import ( + create_routes_from_namespace, + get_handlers_from_namespace, + iter_path_operations, + map_handlers_with_operations, +) from tutils import Handlers @@ -23,13 +25,11 @@ def test_filtered_routing(specs): handlers = Handlers() found = get_handlers_from_namespace(handlers) - hdl_sel = { name:hdl - for name, hdl in found.items() - if "i" in name - } - opr_iter = ( (mth, url, opname, _tags) - for mth, url, opname, _tags in iter_path_operations(specs) - if "i" in opname + hdl_sel = {name: hdl for name, hdl in found.items() if "i" in name} + opr_iter = ( + (mth, url, opname, _tags) + for mth, url, opname, _tags in iter_path_operations(specs) + if "i" in opname ) routes = map_handlers_with_operations(hdl_sel, opr_iter, strict=True) @@ -45,7 +45,7 @@ def test_create_routes_from_namespace(specs): # not - strict try: routes = create_routes_from_namespace(specs, handlers, strict=False) - except Exception: # pylint: disable=W0703 + except Exception: # pylint: disable=W0703 pytest.fail("Non-strict failed", pytrace=True) # strict @@ -67,10 +67,10 @@ def test_prepends_basepath(specs): try: handlers = Handlers() routes = create_routes_from_namespace(specs, handlers, strict=False) - except Exception: # pylint: disable=W0703 + except Exception: # pylint: disable=W0703 pytest.fail("Non-strict failed", pytrace=True) basepath = openapi.get_base_path(specs) for route in routes: assert route.path.startswith(basepath) - assert route.handler.__name__[len("get_"):] in route.path + assert route.handler.__name__[len("get_") :] in route.path diff --git a/packages/service-library/tests/test_sandbox.py b/packages/service-library/tests/test_sandbox.py index b6cbd40f6e5..18d0f9ae519 100644 --- a/packages/service-library/tests/test_sandbox.py +++ b/packages/service-library/tests/test_sandbox.py @@ -12,12 +12,14 @@ def multi_doc_oas(here): assert openapi_path.exists() return openapi_path + @pytest.fixture def single_doc_oas(here): openapi_path = here / "data" / "oas3" / "petstore.yaml" assert openapi_path.exists() return openapi_path + async def test_multi_doc_openapi_specs(multi_doc_oas, single_doc_oas): try: # specs created out of multiple documents @@ -26,11 +28,12 @@ async def test_multi_doc_openapi_specs(multi_doc_oas, single_doc_oas): # a single-document spec single_doc_specs = await openapi.create_openapi_specs(single_doc_oas) - except Exception: # pylint: disable=W0703 + except Exception: # pylint: disable=W0703 pytest.fail("Failed specs validation") - assert single_doc_specs.paths.keys() == multi_doc_specs.paths.keys() - assert single_doc_specs.paths['/tags'].operations['get'].operation_id == \ - multi_doc_specs.paths['/tags'].operations['get'].operation_id + assert ( + single_doc_specs.paths["/tags"].operations["get"].operation_id + == multi_doc_specs.paths["/tags"].operations["get"].operation_id + ) diff --git a/packages/service-library/tests/tutils.py b/packages/service-library/tests/tutils.py index 9b4668fdc81..29a2165e2d4 100644 --- a/packages/service-library/tests/tutils.py +++ b/packages/service-library/tests/tutils.py @@ -8,41 +8,41 @@ from aiohttp import web from servicelib.rest_codecs import DataEncoder + @attr.s(auto_attribs=True) class Data: - x: int=3 - y: str="foo" + x: int = 3 + y: str = "foo" class Handlers: - async def get_health_wrong(self, request: web.Request): out = { - 'name':__name__.split('.')[0], - 'version': "1.0", - 'status': 'SERVICE_RUNNING', - 'invalid_entry': 125 + "name": __name__.split(".")[0], + "version": "1.0", + "status": "SERVICE_RUNNING", + "invalid_entry": 125, } return out async def get_health(self, request: web.Request): out = { - 'name':__name__.split('.')[0], - 'version': "1.0", - 'status': 'SERVICE_RUNNING', - 'api_version': "1.0" + "name": __name__.split(".")[0], + "version": "1.0", + "status": "SERVICE_RUNNING", + "api_version": "1.0", } return out async def get_dict(self, request: web.Request): - return {'x':3, 'y':"3"} + return {"x": 3, "y": "3"} async def get_envelope(self, request: web.Request): - data = {'x':3, 'y':"3"} + data = {"x": 3, "y": "3"} return {"error": None, "data": data} async def get_list(self, request: web.Request): - return [ {'x':3, 'y':"3"} ]*3 + return [{"x": 3, "y": "3"}] * 3 async def get_attobj(self, request: web.Request): return Data(3, "3") @@ -54,15 +54,13 @@ async def get_number(self, request: web.Request): return 3 async def get_mixed(self, request: web.Request): - data = [{'x': 3, 'y': "3", 'z': [Data(3, "3")]*2}]*3 + data = [{"x": 3, "y": "3", "z": [Data(3, "3")] * 2}] * 3 return data - - @classmethod def get(cls, suffix, process=True): handlers = cls() - coro = getattr(handlers, "get_"+suffix) + coro = getattr(handlers, "get_" + suffix) loop = asyncio.get_event_loop() data = loop.run_until_complete(coro(None)) diff --git a/packages/service-library/tests/with_postgres/conftest.py b/packages/service-library/tests/with_postgres/conftest.py index dc3b3b29c59..86ef3646067 100644 --- a/packages/service-library/tests/with_postgres/conftest.py +++ b/packages/service-library/tests/with_postgres/conftest.py @@ -12,33 +12,31 @@ current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def docker_compose_file() -> Path: # overrides fixture from https://github.com/AndreLouisCaron/pytest-docker - return current_dir / 'docker-compose.yml' + return current_dir / "docker-compose.yml" -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def postgres_service(docker_services, docker_ip, docker_compose_file) -> DataSourceName: # container environment with open(docker_compose_file) as fh: config = yaml.safe_load(fh) - environ = config['services']['postgres']['environment'] + environ = config["services"]["postgres"]["environment"] dsn = DataSourceName( - user=environ['POSTGRES_USER'], - password=environ['POSTGRES_PASSWORD'], + user=environ["POSTGRES_USER"], + password=environ["POSTGRES_PASSWORD"], host=docker_ip, - port=docker_services.port_for('postgres', 5432), - database=environ['POSTGRES_DB'], - application_name="test-app" + port=docker_services.port_for("postgres", 5432), + database=environ["POSTGRES_DB"], + application_name="test-app", ) # Wait until service is responsive. docker_services.wait_until_responsive( - check=lambda: is_postgres_responsive(dsn), - timeout=30.0, - pause=0.1, + check=lambda: is_postgres_responsive(dsn), timeout=30.0, pause=0.1, ) return dsn diff --git a/packages/service-library/tests/with_postgres/test_aiopg_utils.py b/packages/service-library/tests/with_postgres/test_aiopg_utils.py index 301b58e5b23..e74ff4019c4 100644 --- a/packages/service-library/tests/with_postgres/test_aiopg_utils.py +++ b/packages/service-library/tests/with_postgres/test_aiopg_utils.py @@ -14,34 +14,48 @@ import sqlalchemy as sa from aiohttp import web import asyncio -from servicelib.aiopg_utils import (DatabaseError, DataSourceName, - PostgresRetryPolicyUponOperation, - create_pg_engine, init_pg_tables, - is_pg_responsive, retry_pg_api) +from servicelib.aiopg_utils import ( + DatabaseError, + DataSourceName, + PostgresRetryPolicyUponOperation, + create_pg_engine, + init_pg_tables, + is_pg_responsive, + retry_pg_api, +) current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent metadata = sa.MetaData() -tbl = sa.Table('tbl', metadata, - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('val', sa.String(255))) - - +tbl = sa.Table( + "tbl", + metadata, + sa.Column("id", sa.Integer, primary_key=True), + sa.Column("val", sa.String(255)), +) @pytest.fixture -async def postgres_service_with_fake_data(request, loop, postgres_service: DataSourceName)-> DataSourceName: +async def postgres_service_with_fake_data( + request, loop, postgres_service: DataSourceName +) -> DataSourceName: async def _create_table(engine: aiopg.sa.Engine): async with engine.acquire() as conn: - await conn.execute(f'DROP TABLE IF EXISTS {tbl.name}') - await conn.execute(f'''CREATE TABLE {tbl.name} ( + await conn.execute(f"DROP TABLE IF EXISTS {tbl.name}") + await conn.execute( + f"""CREATE TABLE {tbl.name} ( id serial PRIMARY KEY, - val varchar(255))''') + val varchar(255))""" + ) dsn = deepcopy(postgres_service) - dsn.application_name = f"setup {request.module.__name__}.{request.function.__name__}" + dsn.application_name = ( + f"setup {request.module.__name__}.{request.function.__name__}" + ) - async with aiopg.sa.create_engine(dsn.to_uri(), application_name=dsn.application_name) as engine: + async with aiopg.sa.create_engine( + dsn.to_uri(), application_name=dsn.application_name + ) as engine: await _create_table(engine) dsn.application_name = f"{request.module.__name__}.{request.function.__name__}" @@ -52,8 +66,8 @@ def test_dsn_uri_with_query(postgres_service_with_fake_data): uri = postgres_service_with_fake_data.to_uri(with_query=True) try: sa_engine = sa.create_engine(uri, echo=True, echo_pool=True) - assert sa_engine.name == 'postgresql' - assert sa_engine.driver == 'psycopg2' + assert sa_engine.name == "postgresql" + assert sa_engine.driver == "psycopg2" # if url is wrong, these will fail metadata.create_all(sa_engine) @@ -93,6 +107,7 @@ async def test_create_pg_engine(postgres_service_with_fake_data): except ValueError: assert engine4.closed + @pytest.mark.skip(reason="for documentation only and needs a swarm") async def test_engine_when_idle_for_some_time(): # NOTE: this test needs a docker swarm and a running postgres service @@ -102,30 +117,31 @@ async def test_engine_when_idle_for_some_time(): host="127.0.0.1", port=5432, database="db", - application_name="test-app" + application_name="test-app", ) engine = await create_pg_engine(dsn, minsize=1, maxsize=1) init_pg_tables(dsn, metadata) # import pdb; pdb.set_trace() - assert not engine.closed # does not mean anything!!! + assert not engine.closed # does not mean anything!!! # pylint: disable=no-value-for-parameter async with engine.acquire() as conn: # writes - await conn.execute(tbl.insert().values(val=f'first')) + await conn.execute(tbl.insert().values(val=f"first")) # by default docker swarm kills connections that are idle for more than 15 minutes await asyncio.sleep(901) # import pdb; pdb.set_trace() async with engine.acquire() as conn: - await conn.execute(tbl.insert().values(val=f'third')) + await conn.execute(tbl.insert().values(val=f"third")) # import pdb; pdb.set_trace() - async def test_engine_when_pg_not_reachable(): - dsn = DataSourceName(database='db', user='foo', password='foo', host='localhost', port=123) + dsn = DataSourceName( + database="db", user="foo", password="foo", host="localhost", port=123 + ) with pytest.raises(psycopg2.OperationalError): await create_pg_engine(dsn) @@ -143,7 +159,9 @@ async def test_retry_pg_api_policy(postgres_service_with_fake_data, caplog): dsn = postgres_service_with_fake_data.to_uri() app_name = postgres_service_with_fake_data.application_name - async with aiopg.sa.create_engine(dsn, application_name=app_name, echo=True) as engine: + async with aiopg.sa.create_engine( + dsn, application_name=app_name, echo=True + ) as engine: # goes await dec_go(engine, gid=0) @@ -156,12 +174,17 @@ async def test_retry_pg_api_policy(postgres_service_with_fake_data, caplog): assert "Postgres service non-responsive, responding 503" in caplog.text print(dec_go.retry.statistics) - assert dec_go.total_retry_count() == PostgresRetryPolicyUponOperation.ATTEMPTS_COUNT+1 + assert ( + dec_go.total_retry_count() + == PostgresRetryPolicyUponOperation.ATTEMPTS_COUNT + 1 + ) # goes and keeps count of all retrials await dec_go(engine, gid=2) - assert dec_go.total_retry_count() == PostgresRetryPolicyUponOperation.ATTEMPTS_COUNT+2 - + assert ( + dec_go.total_retry_count() + == PostgresRetryPolicyUponOperation.ATTEMPTS_COUNT + 2 + ) # TODO: review tests below @@ -170,10 +193,10 @@ async def test_engine_when_pg_refuses(postgres_service_with_fake_data): dsn = postgres_service_with_fake_data dsn.password = "Wrong pass" - #async with create_pg_engine(dsn) as engine: + # async with create_pg_engine(dsn) as engine: engine = await create_pg_engine(dsn) - assert not engine.closed # does not mean anything!!! + assert not engine.closed # does not mean anything!!! # acquiring connection must fail with pytest.raises(RuntimeError) as execinfo: @@ -192,11 +215,9 @@ async def test_connections(postgres_service_with_fake_data): ## number of seconds after which connection is recycled, helps to deal with stale connections in pool, default value is -1, means recycling logic is disabled. POOL_RECYCLE_SECS = 2 - async def conn_callback(conn): print(f"Opening {conn.raw}") - async with aiopg.sa.create_engine( dsn, minsize=20, @@ -204,10 +225,13 @@ async def conn_callback(conn): # timeout=1, pool_recycle=POOL_RECYCLE_SECS, echo=True, - enable_json=True, enable_hstore=True, enable_uuid=True, + enable_json=True, + enable_hstore=True, + enable_uuid=True, on_connect=conn_callback, # extra kwargs in https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS - application_name=app_name) as engine: + application_name=app_name, + ) as engine: # used and free connections # size_before = engine.size @@ -221,6 +245,7 @@ async def conn_callback(conn): # HELPERS ------------ + @retry_pg_api async def dec_go(*args, **kargs): return await go(*args, **kargs) @@ -231,14 +256,13 @@ async def go(engine: aiopg.sa.Engine, gid="", raise_cls=None): async with engine.acquire() as conn: # writes async with conn.begin(): - await conn.execute(tbl.insert().values(val=f'first-{gid}')) - await conn.execute(tbl.insert().values(val=f'second-{gid}')) + await conn.execute(tbl.insert().values(val=f"first-{gid}")) + await conn.execute(tbl.insert().values(val=f"second-{gid}")) if raise_cls is not None: raise raise_cls - # reads async for row in conn.execute(tbl.select()): print(row.id, row.val) - assert any(prefix in row.val for prefix in ('first', 'second')) + assert any(prefix in row.val for prefix in ("first", "second")) diff --git a/packages/simcore-sdk/Makefile b/packages/simcore-sdk/Makefile index bd91accd86d..2e0efa3ad9a 100644 --- a/packages/simcore-sdk/Makefile +++ b/packages/simcore-sdk/Makefile @@ -6,13 +6,14 @@ # - In windows, only WSL is supported # # by sanderegg, pcrespov -.DEFAULT_GOAL := help +# +include ../../scripts/common.Makefile .PHONY: install-dev install-prod install-ci -install-dev install-prod install-ci: openapi-specs ## install app in development/production or CI mode +install-dev install-prod install-ci: openapi-specs _check_venv_active ## install app in development/production or CI mode # installing in $(subst install-,,$@) mode - @pip3 install -r requirements/$(subst install-,,$@).txt + python -m pip install -r requirements/$(subst install-,,$@).txt .PHONY: tests-unit tests-integration tests tests-unit: ## runs unit tests @@ -28,19 +29,3 @@ tests-integration: ## runs integration tests against local+production images tests: tests-unit tests-integration ## runs all tests # running tests - -.PHONY: clean -clean: ## cleans all unversioned files in project and temp files create by this makefile - # Cleaning unversioned - @git clean -ndxf -e .vscode/ - @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @git clean -dxf -e .vscode/ - -.PHONY: help -# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html -help: ## this colorful help - @echo "Recipes for '$(notdir $(CURDIR))':" - @echo "" - @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" diff --git a/scripts/check_requirements.sh b/scripts/check_requirements.bash old mode 100644 new mode 100755 similarity index 64% rename from scripts/check_requirements.sh rename to scripts/check_requirements.bash index 92fd3194ef2..06e3396578d --- a/scripts/check_requirements.sh +++ b/scripts/check_requirements.bash @@ -2,4 +2,4 @@ # lists all python packages used throughout all the repository that are not tied to a specific version -find . \( -name "requirements.txt" -o -name "common.txt" -o -name "devel.txt" -o -name "prod.txt" \) | xargs -I % grep -v "\-r " % | sort |uniq | awk '$0 !~ /==/' +find . \( -name "requirements.txt" -o -name "common.txt" -o -name "devel.txt" -o -name "prod.txt" \) | xargs -I % grep -v "\-r " % | sort |uniq | awk '$0 !~ /==/' diff --git a/scripts/code-climate.sh b/scripts/code-climate.bash similarity index 96% rename from scripts/code-climate.sh rename to scripts/code-climate.bash index 1cf7b7aa442..570f575a43a 100755 --- a/scripts/code-climate.sh +++ b/scripts/code-climate.bash @@ -19,7 +19,8 @@ docker run \ codeclimate/codeclimate "$@" -if [ -z "$@" ];then +if test -z "$@" +then echo "----" echo "Listing other engines (in dockers)" docker images codeclimate/* diff --git a/scripts/common.Makefile b/scripts/common.Makefile new file mode 100644 index 00000000000..dad682d6978 --- /dev/null +++ b/scripts/common.Makefile @@ -0,0 +1,120 @@ +# +# These are common target and recipes +# This file is included at the top of every Makefile +# $(CURDIR) in this file refers to the directory where this file is included +# +# SEE https://mattandre.ws/2016/05/makefile-inheritance/ +# + +# defaults +.DEFAULT_GOAL := help + +# Use bash not sh +SHELL := /bin/bash + +# Some handy flag variables +ifeq ($(filter Windows_NT,$(OS)),) +IS_WSL := $(if $(findstring Microsoft,$(shell uname -a)),WSL,) +IS_OSX := $(filter Darwin,$(shell uname -a)) +IS_LINUX:= $(if $(or $(IS_WSL),$(IS_OSX)),,$(filter Linux,$(shell uname -a))) +endif +IS_WIN := $(strip $(if $(or $(IS_LINUX),$(IS_OSX),$(IS_WSL)),,$(OS))) + +# version control +VCS_URL := $(shell git config --get remote.origin.url) +VCS_REF := $(shell git rev-parse --short HEAD) +NOW_TIMESTAMP := $(shell date -u +"%Y-%m-%dT%H:%M:%SZ") + + +$(if $(IS_WIN),\ +$(error Windows is not supported in all recipes. Use WSL instead. Follow instructions in README.md),) + + +# +# COMMON TASKS +# + + +.PHONY: help +# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html +help: + @echo "usage: make [target] ..." + @echo "" + @echo "Targets for '$(notdir $(CURDIR))':" + @echo "" + @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) + @echo "" + + +devenv: ## build development environment (using main services/docker-compose-build.yml) + @$(MAKE) --directory ${REPO_BASE_DIR} --no-print-directory $@ + + +GIT_CLEAN_ARGS = -dxf -e .vscode +clean: ## cleans all unversioned files in project and temp files create by this makefile + # Cleaning unversioned + @git clean -n $(GIT_CLEAN_ARGS) + @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] + @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] + @git clean $(GIT_CLEAN_ARGS) + + +info: ## displays basic info + # system + @echo ' OS : $(IS_LINUX)$(IS_OSX)$(IS_WSL)$(IS_WIN)' + @echo ' CURDIR : ${CURDIR}' + @echo ' NOW_TIMESTAMP : ${NOW_TIMESTAMP}' + @echo ' VCS_URL : ${VCS_URL}' + @echo ' VCS_REF : ${VCS_REF}' + # installed + @pip list + # version + @cat setup.py | grep name= + @cat setup.py | grep version= + + +.PHONY: autoformat +autoformat: ## runs black python formatter on this service's code [https://black.readthedocs.io/en/stable/] + # auto formatting with black + @python3 -m black --verbose \ + --exclude "/(\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|\.svn|_build|buck-out|build|dist|migration|client-sdk)/" \ + $(CURDIR) + + +.PHONY: version-patch version-minor version-major +version-patch: ## commits version with bug fixes not affecting the cookiecuter config + $(_bumpversion) +version-minor: ## commits version with backwards-compatible API addition or changes (i.e. can replay) + $(_bumpversion) +version-major: ## commits version with backwards-INcompatible addition or changes + $(_bumpversion) + + +buil%: ## builds docker image (using main services/docker-compose-build.yml) + # building docker image for ${APP_NAME} + @$(MAKE) --directory ${REPO_BASE_DIR} --no-print-directory build target=${APP_NAME} + +# FIXME: +#.PHONY: build build-nc build-devel build-devel-nc build-cache build-cache-nc +#build build-nc build-devel build-devel-nc build-cache build-cache-nc: ## docker image build in many flavours +# # building docker image for ${APP_NAME} ... +# @$(MAKE) --directory ${REPO_BASE_DIR} $@ target=${APP_NAME} + +.PHONY: shell +shell: ## runs shell in production container + @$(MAKE) --directory ${REPO_BASE_DIR} --no-print-directory shell target=${APP_NAME} + +# +# SUBTASKS +# + +.PHONY: _check_venv_active +_check_venv_active: + # checking whether virtual environment was activated + @python3 -c "import sys; assert sys.base_prefix!=sys.prefix" + + +define _bumpversion + # upgrades as $(subst version-,,$@) version, commits and tags + @bump2version --verbose --list $(subst version-,,$@) +endef diff --git a/scripts/shellcheck b/scripts/shellcheck.bash similarity index 100% rename from scripts/shellcheck rename to scripts/shellcheck.bash diff --git a/scripts/upgrade_test_requirements.sh b/scripts/upgrade_test_requirements.bash similarity index 67% rename from scripts/upgrade_test_requirements.sh rename to scripts/upgrade_test_requirements.bash index 4226be7d031..7104212f8be 100755 --- a/scripts/upgrade_test_requirements.sh +++ b/scripts/upgrade_test_requirements.bash @@ -5,6 +5,6 @@ for path_to_req_test in $(find ../ -type f -name '_test.txt') do - rm --verbose $path_to_req_test - make --directory $(dirname -- $path_to_req_test) + rm --verbose "$path_to_req_test" + make --directory "$(dirname -- "$path_to_req_test")" done diff --git a/scripts/url-encoder.sh b/scripts/url-encoder.bash similarity index 100% rename from scripts/url-encoder.sh rename to scripts/url-encoder.bash diff --git a/services/catalog/Makefile b/services/catalog/Makefile index c722f864807..d333b222119 100644 --- a/services/catalog/Makefile +++ b/services/catalog/Makefile @@ -1,10 +1,7 @@ # # Targets for DEVELOPMENT of Components Catalog Service # - -# Makefile config -.DEFAULT_GOAL := help -SHELL = /bin/bash +include ../../scripts/common.Makefile # Custom variables APP_NAME := $(notdir $(CURDIR)) @@ -15,28 +12,18 @@ REPO_BASE_DIR = $(abspath $(CURDIR)/../../) VENV_DIR ?= $(abspath $(REPO_BASE_DIR)/.venv) -.PHONY: devenv -devenv: ## build development environment (using main services/docker-compose-build.yml) - @$(MAKE) --directory ${REPO_BASE_DIR} $@ - - .PHONY: requirements requirements: ## compiles pip requirements (.in -> .txt) @$(MAKE) --directory requirements all -.check-venv-active: - # checking whether virtual environment was activated - @python3 -c "import sys; assert sys.base_prefix!=sys.prefix" - .PHONY: install-dev install-prod install-ci -install-dev install-prod install-ci: requirements .check-venv-active ## install app in development/production or CI mode +install-dev install-prod install-ci: requirements _check_venv_active ## install app in development/production or CI mode # installing in $(subst install-,,$@) mode pip-sync requirements/$(subst install-,,$@).txt PHONY: tests-unit tests-integration tests - tests: tests-unit tests-integration tests-unit: ## runs unit tests @@ -77,64 +64,15 @@ build build-nc build-devel build-devel-nc build-cache build-cache-nc: ## docker @$(MAKE) --directory ${REPO_BASE_DIR} $@ target=${APP_NAME} -.PHONY: autoformat -autoformat: ## runs black python formatter on this service's code [https://black.readthedocs.io/en/stable/] - # auto formatting with black - @python3 -m black --verbose $(CURDIR) - - .PHONY: openapi-specs openapi-specs: install-dev ## TODO: implementing a way to serialize openapi python3 -c "from simcore_service_catalog.main import *; dump_openapi()" -define _bumpversion - # upgrades as $(subst version-,,$@) version, commits and tags - @bump2version --verbose --list $(subst version-,,$@) -endef - -version-patch: ## commits version with bug fixes not affecting the cookiecuter config - $(_bumpversion) -version-minor: ## commits version with backwards-compatible API addition or changes (i.e. can replay) - $(_bumpversion) -version-major: ## commits version with backwards-INcompatible addition or changes - $(_bumpversion) - - .PHONY: replay - replay: .cookiecutterrc ## re-applies cookiecutter # Replaying ../cookiecutter-simcore-py-fastapi/ ... @cookiecutter --no-input --overwrite-if-exists \ --config-file=$< \ --output-dir="$(abspath $(CURDIR)/..)" \ "../cookiecutter-simcore-py-fastapi/" - - -.PHONY: info -info: ## displays information - # installed - @pip list - # version - @cat VERSION - - -.PHONY: clean clean-all -git_clean_args = -dxf -e .vscode - -clean: ## cleans all unversioned files in project and temp files create by this makefile - # Cleaning unversioned - @git clean -n $(git_clean_args) - @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @git clean $(git_clean_args) - - -#----------------------------------- -.PHONY: help -# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html -help: ## this colorful help - @echo "Recipes for '$(notdir $(CURDIR))':" - @echo "" - @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" diff --git a/services/director/Makefile b/services/director/Makefile index 722f41e5018..4c2544348db 100644 --- a/services/director/Makefile +++ b/services/director/Makefile @@ -1,4 +1,8 @@ -.DEFAULT_GOAL := help +# +# Targets for DEVELOPMENT for Director service +# +include ../../scripts/common.Makefile + REPO_BASE_DIR = $(abspath $(CURDIR)/../../) VENV_DIR ?= $(abspath $(REPO_BASE_DIR)/.venv) @@ -6,21 +10,15 @@ VENV_DIR ?= $(abspath $(REPO_BASE_DIR)/.venv) APP_NAME := $(notdir $(CURDIR)) - -.PHONY: devenv -devenv: ## build development environment (using main services/docker-compose-build.yml) - @$(MAKE) -C ${REPO_BASE_DIR} $@ - - .PHONY: openapi-specs openapi-specs: ## updates and validates openapi specifications $(MAKE) -C $(CURDIR)/src/simcore_service_${APP_NAME}/api $@ .PHONY: install-dev install-prod install-ci -install-dev install-prod install-ci: openapi-specs ## install app in development/production or CI mode +install-dev install-prod install-ci: openapi-specs _check_venv_active ## install app in development/production or CI mode # installing in $(subst install-,,$@) mode - @$(VENV_DIR)/bin/pip3 install -r requirements/$(subst install-,,$@).txt + python -m pip install -r requirements/$(subst install-,,$@).txt .PHONY: tests @@ -31,40 +29,4 @@ tests: ## runs unit tests .PHONY: build build: openapi-specs ## builds docker image (using main services/docker-compose-build.yml) - @$(MAKE) -C ${REPO_BASE_DIR} target=${APP_NAME} $@ - - -.PHONY: shell -shell: ## runs shell in production container - @$(MAKE) --directory ${REPO_BASE_DIR} $@ target=${APP_NAME} - -.PHONY: version-patch version-minor -version-patch version-minor: ## commits version as patch (bug fixes not affecting the API), minor/minor (backwards-compatible/INcompatible API addition or changes) - # upgrades as $(subst version-,,$@) version, commits and tags - @bump2version --verbose --list $(subst version-,,$@) - - -.PHONY: info -info: ## displays - # installed - @pip list - # version - @cat setup.py | grep version= - - -.PHONY: clean -clean: ## cleans all unversioned files in project and temp files create by this makefile - # Cleaning unversioned - @git clean -ndxf -e .vscode/ - @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @git clean -dxf -e .vscode/ - - -.PHONY: help -# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html -help: ## this colorful help - @echo "Recipes for '${APP_NAME}':" - @echo "" - @awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" + @make --no-print-directory build-super diff --git a/services/storage/Makefile b/services/storage/Makefile index 14cc5e5f391..b4dac456df9 100644 --- a/services/storage/Makefile +++ b/services/storage/Makefile @@ -1,25 +1,21 @@ -.DEFAULT_GOAL := help +# +# Targets for DEVELOPMENT for Storage service +# +include ../../scripts/common.Makefile REPO_BASE_DIR = $(abspath $(CURDIR)/../../) -VENV_DIR ?= $(abspath $(REPO_BASE_DIR)/.venv) - APP_NAME := $(notdir $(CURDIR)) -.PHONY: devenv -devenv: ## build development environment (using main services/docker-compose-build.yml) - @$(MAKE) -C ${REPO_BASE_DIR} $@ - - .PHONY: openapi-specs openapi-specs: ## updates and validates openapi specifications $(MAKE) -C $(CURDIR)/src/simcore_service_${APP_NAME}/api $@ .PHONY: install-dev install-prod install-ci -install-dev install-prod install-ci: openapi-specs ## install app in development/production or CI mode +install-dev install-prod install-ci: openapi-specs _check_venv_active ## install app in development/production or CI mode # installing in $(subst install-,,$@) mode - @$(VENV_DIR)/bin/pip3 install -r requirements/$(subst install-,,$@).txt + python -m pip install -r requirements/$(subst install-,,$@).txt .PHONY: tests @@ -28,39 +24,6 @@ tests: ## runs unit tests @pytest -vv --exitfirst --failed-first --durations=10 --pdb $(CURDIR)/tests - .PHONY: build build: openapi-specs ## builds docker image (using main services/docker-compose-build.yml) - @$(MAKE) -C ${REPO_BASE_DIR} target=${APP_NAME} $@ - - -.PHONY: version-patch version-minor -version-patch version-minor: ## commits version as patch (bug fixes not affecting the API), minor/minor (backwards-compatible/INcompatible API addition or changes) - # upgrades as $(subst version-,,$@) version, commits and tags - @bump2version --verbose --list $(subst version-,,$@) - - -.PHONY: info -info: ## displays - # installed - @pip list - # version - @cat setup.py | grep version= - - -.PHONY: clean -clean: ## cleans all unversioned files in project and temp files create by this makefile - # Cleaning unversioned - @git clean -ndxf -e .vscode/ - @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @git clean -dxf -e .vscode/ - - -.PHONY: help -# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html -help: ## this colorful help - @echo "Recipes for '${APP_NAME}':" - @echo "" - @awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_- ]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" + @$(MAKE) -C ${REPO_BASE_DIR} $@ target=${APP_NAME} diff --git a/services/storage/docker/healthcheck.py b/services/storage/docker/healthcheck.py index de57f4f6abb..5a5edba5230 100644 --- a/services/storage/docker/healthcheck.py +++ b/services/storage/docker/healthcheck.py @@ -27,9 +27,14 @@ ok = os.environ.get("SC_BOOT_MODE").lower() == "debug" # Queries host -ok = ok or urlopen("{host}{baseurl}".format( - host=sys.argv[1], - baseurl=os.environ.get("SIMCORE_NODE_BASEPATH", "")) # adds a base-path if defined in environ - ).getcode() == 200 - -sys.exit(SUCCESS if ok else UNHEALTHY) \ No newline at end of file +ok = ( + ok + or urlopen( + "{host}{baseurl}".format( + host=sys.argv[1], baseurl=os.environ.get("SIMCORE_NODE_BASEPATH", "") + ) # adds a base-path if defined in environ + ).getcode() + == 200 +) + +sys.exit(SUCCESS if ok else UNHEALTHY) diff --git a/services/storage/setup.cfg b/services/storage/setup.cfg index d24645148a7..ec3dbde71a3 100644 --- a/services/storage/setup.cfg +++ b/services/storage/setup.cfg @@ -14,8 +14,6 @@ replace = version='{new_version}' [bumpversion:file:./src/simcore_service_storage/api/v0/openapi.yaml] -[bumpversion:file:src/simcore_service_storage/__version__.py] - [bumpversion:file:client-sdk/codegen_config.json] search = "packageVersion":"{current_version}" replace = "packageVersion":"{new_version}" diff --git a/services/storage/setup.py b/services/storage/setup.py index ffc7cfeafb6..8c12d6ba77c 100644 --- a/services/storage/setup.py +++ b/services/storage/setup.py @@ -7,46 +7,49 @@ here = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent if sys.version_info < (3, 6): - raise RuntimeError("Requires 3.6, got %s. Did you forget to activate virtualenv?" % sys.version_info) + raise RuntimeError( + "Requires 3.6, got %s. Did you forget to activate virtualenv?" + % sys.version_info + ) -def read_reqs( reqs_path: Path): - return re.findall(r'(^[^#-][\w]+[-~>=<.\w]+)', reqs_path.read_text(), re.MULTILINE) +def read_reqs(reqs_path: Path): + return re.findall(r"(^[^#-][\w]+[-~>=<.\w]+)", reqs_path.read_text(), re.MULTILINE) -install_requirements = read_reqs( here / "requirements" / "_base.txt" ) + [ +install_requirements = read_reqs(here / "requirements" / "_base.txt") + [ "s3wrapper==0.1.0", "simcore-postgres-database", "simcore-sdk==0.1.0", - "simcore-service-library" + "simcore-service-library", ] -test_requirements = read_reqs( here / "requirements" / "_test.txt" ) +test_requirements = read_reqs(here / "requirements" / "_test.txt") setup_config = dict( - name='simcore-service-storage', - version='0.2.1', - description='Service to manage data storage in simcore', - author='Manuel Guidon (mguidon)', - python_requires='>3.6, <3.7', - packages=find_packages(where='src'), - package_dir={'': 'src'}, + name="simcore-service-storage", + version="0.2.1", + description="Service to manage data storage in simcore", + author="Manuel Guidon (mguidon)", + python_requires=">3.6, <3.7", + packages=find_packages(where="src"), + package_dir={"": "src"}, include_package_data=True, - install_requires= install_requirements, + install_requires=install_requirements, tests_require=test_requirements, package_data={ - '': [ - 'api/v0/openapi.yaml', - 'api/v0/schemas/*.json', - 'data/*.json', - 'data/*.yml', - 'data/*.yaml', - ], + "": [ + "api/v0/openapi.yaml", + "api/v0/schemas/*.json", + "data/*.json", + "data/*.yml", + "data/*.yaml", + ], }, entry_points={ - 'console_scripts': [ - 'simcore-service-storage = simcore_service_storage.cli:main', + "console_scripts": [ + "simcore-service-storage = simcore_service_storage.cli:main", ], }, ) @@ -57,7 +60,8 @@ def main(): """ setup(**setup_config) - return 0 # syccessful termination + return 0 # syccessful termination + if __name__ == "__main__": raise SystemExit(main()) diff --git a/services/storage/src/simcore_service_storage/__init__.py b/services/storage/src/simcore_service_storage/__init__.py index cb16e69b262..acdd02fe8ff 100644 --- a/services/storage/src/simcore_service_storage/__init__.py +++ b/services/storage/src/simcore_service_storage/__init__.py @@ -3,4 +3,3 @@ """ from .__version__ import __version__ from .cli import main - diff --git a/services/storage/src/simcore_service_storage/__version__.py b/services/storage/src/simcore_service_storage/__version__.py index 1806ab44fd7..c839c64ff0c 100644 --- a/services/storage/src/simcore_service_storage/__version__.py +++ b/services/storage/src/simcore_service_storage/__version__.py @@ -1,39 +1,12 @@ -""" Current version of the simcore_service_storage application. - -This project uses the Semantic Versioning scheme in conjunction with PEP 0440: - - - - - -Major versions introduce significant changes to the API, and backwards -compatibility is not guaranteed. - -Minor versions are for new features and other backwards-compatible changes to the API. - -Patch versions are for bug fixes and internal code changes that do not affect the API. - -Pre-release and development versions are denoted appending a hyphen, i.e. 0.2.1-dev - -Build metadata (e.g. git commit id, build id, ...) can be appended with a plus, i.e. 0.2.1-dev+asd21ff - -Package version is defined in the setup.py following the principle of single-sourcing (option 5): - +""" Current version of the simcore_service_storage application and its API """ import pkg_resources -import semantic_version -# TODO: introduce metadata info from vcs +from semantic_version import Version -try: - # access metadata - __version__ = pkg_resources.get_distribution('simcore_service_storage').version - assert __version__=="0.2.1", "Did you install this package?" -except AssertionError as ee: - import logging - logging.debug(ee) +__version__: str = pkg_resources.get_distribution("simcore_service_storage").version +version = Version(__version__) -def get_version_object(): - return semantic_version.Version(__version__) +api_version_prefix: str = f"v{version.major}" diff --git a/services/storage/src/simcore_service_storage/application.py b/services/storage/src/simcore_service_storage/application.py index a10a47168eb..2c631efb559 100644 --- a/services/storage/src/simcore_service_storage/application.py +++ b/services/storage/src/simcore_service_storage/application.py @@ -21,30 +21,36 @@ def create(config: Dict) -> web.Application: - log.debug("Initializing app with config:\n%s", - json.dumps(config, indent=2, sort_keys=True)) + log.debug( + "Initializing app with config:\n%s", + json.dumps(config, indent=2, sort_keys=True), + ) app = create_safe_application(config) tracing = config["tracing"]["enabled"] if tracing: - setup_tracing(app, "simcore_service_storage", - config["main"]["host"], config["main"]["port"], config["tracing"]) - setup_db(app) # -> postgres service - setup_s3(app) # -> minio service + setup_tracing( + app, + "simcore_service_storage", + config["main"]["host"], + config["main"]["port"], + config["tracing"], + ) + setup_db(app) # -> postgres service + setup_s3(app) # -> minio service setup_dsm(app) # core subsystem. Needs s3 and db setups done - setup_rest(app) # lastly, we expose API to the world + setup_rest(app) # lastly, we expose API to the world if config["main"].get("monitoring_enabled", False): setup_monitoring(app, "simcore_service_storage") return app + def run(config, app=None): log.debug("Serving application ") if not app: app = create(config) - web.run_app(app, - host=config["main"]["host"], - port=config["main"]["port"]) + web.run_app(app, host=config["main"]["host"], port=config["main"]["port"]) diff --git a/services/storage/src/simcore_service_storage/cli.py b/services/storage/src/simcore_service_storage/cli.py index 3d43916ff6c..17781d94deb 100644 --- a/services/storage/src/simcore_service_storage/cli.py +++ b/services/storage/src/simcore_service_storage/cli.py @@ -34,16 +34,15 @@ def create_environ(skip_system_environ=False): # project-related environment variables here = os.path.dirname(__file__) - environ['THIS_PACKAGE_DIR'] = here + environ["THIS_PACKAGE_DIR"] = here rootdir = search_osparc_repo_dir(start=here) if rootdir is not None: - environ['OSPARC_SIMCORE_REPO_ROOTDIR'] = str(rootdir) + environ["OSPARC_SIMCORE_REPO_ROOTDIR"] = str(rootdir) return environ - def setup(_parser): cli_config.add_cli_options(_parser) return _parser @@ -62,7 +61,9 @@ def parse(args, _parser): return config -parser = argparse.ArgumentParser(description='Service to manage data storage in simcore.') +parser = argparse.ArgumentParser( + description="Service to manage data storage in simcore." +) setup(parser) diff --git a/services/storage/src/simcore_service_storage/cli_config.py b/services/storage/src/simcore_service_storage/cli_config.py index 2dafcd3a7aa..0886bbe9c92 100644 --- a/services/storage/src/simcore_service_storage/cli_config.py +++ b/services/storage/src/simcore_service_storage/cli_config.py @@ -1,4 +1,3 @@ - import argparse import logging import os @@ -13,7 +12,6 @@ log = logging.getLogger(__name__) - def add_cli_options(argument_parser=None): """ Adds settings group to cli with options: @@ -28,13 +26,13 @@ def add_cli_options(argument_parser=None): argument_parser = argparse.ArgumentParser() commandline.standard_argparse_options( - argument_parser.add_argument_group('settings'), - default_config=DEFAULT_CONFIG) + argument_parser.add_argument_group("settings"), default_config=DEFAULT_CONFIG + ) return argument_parser -def config_from_options(options, vars=None): # pylint: disable=W0622 +def config_from_options(options, vars=None): # pylint: disable=W0622 if vars is None: vars = os.environ @@ -43,7 +41,7 @@ def config_from_options(options, vars=None): # pylint: disable=W0622 if resources.exists(resource_name): options.config = resources.get_path(resource_name) else: - resource_name = RSC_CONFIG_DIR_KEY + '/' + resource_name + resource_name = RSC_CONFIG_DIR_KEY + "/" + resource_name if resources.exists(resource_name): options.config = resources.get_path(resource_name) @@ -51,7 +49,8 @@ def config_from_options(options, vars=None): # pylint: disable=W0622 return commandline.config_from_options(options, trafaret=schema, vars=vars) -def read_and_validate(filepath, vars=None): # pylint: disable=W0622 + +def read_and_validate(filepath, vars=None): # pylint: disable=W0622 if vars is None: vars = os.environ # NOTE: vars=os.environ in signature freezes default to os.environ before it gets diff --git a/services/storage/src/simcore_service_storage/config_schema.py b/services/storage/src/simcore_service_storage/config_schema.py index e55687d5ad2..465a7058562 100644 --- a/services/storage/src/simcore_service_storage/config_schema.py +++ b/services/storage/src/simcore_service_storage/config_schema.py @@ -5,29 +5,38 @@ from . import rest_config -app_schema = T.Dict({ - T.Key("host", default="0.0.0.0"): T.IP, - "port": T.Int(), - "log_level": T.Enum("DEBUG", "WARNING", "INFO", "ERROR", "CRITICAL", "FATAL", "NOTSET"), - "testing": T.Bool(), - T.Key("max_workers", default=8, optional=True) : T.Int(), - T.Key("monitoring_enabled", default=False): T.Or(T.Bool(), T.Int), # Int added to use environs - T.Key("test_datcore", optional=True): T.Dict({ - "token_key": T.String(), - "token_secret": T.String() - }), - T.Key("disable_services", default=[], optional=True): T.List(T.String()) -}) +app_schema = T.Dict( + { + T.Key("host", default="0.0.0.0"): T.IP, + "port": T.Int(), + "log_level": T.Enum( + "DEBUG", "WARNING", "INFO", "ERROR", "CRITICAL", "FATAL", "NOTSET" + ), + "testing": T.Bool(), + T.Key("max_workers", default=8, optional=True): T.Int(), + T.Key("monitoring_enabled", default=False): T.Or( + T.Bool(), T.Int + ), # Int added to use environs + T.Key("test_datcore", optional=True): T.Dict( + {"token_key": T.String(), "token_secret": T.String()} + ), + T.Key("disable_services", default=[], optional=True): T.List(T.String()), + } +) -schema = T.Dict({ - "version": T.String(), - T.Key("main"): app_schema, - T.Key("postgres"): db.CONFIG_SCHEMA, - T.Key("s3"): s3.CONFIG_SCHEMA, - addon_section(rest_config.CONFIG_SECTION_NAME, optional=True): rest_config.schema, - T.Key("tracing"): tracing_schema -}) +schema = T.Dict( + { + "version": T.String(), + T.Key("main"): app_schema, + T.Key("postgres"): db.CONFIG_SCHEMA, + T.Key("s3"): s3.CONFIG_SCHEMA, + addon_section( + rest_config.CONFIG_SECTION_NAME, optional=True + ): rest_config.schema, + T.Key("tracing"): tracing_schema, + } +) # TODO: config submodule that knows about schema with web.Application intpu parameters diff --git a/services/storage/src/simcore_service_storage/datcore.py b/services/storage/src/simcore_service_storage/datcore.py index f12696f44bf..3ecc8c5cc37 100644 --- a/services/storage/src/simcore_service_storage/datcore.py +++ b/services/storage/src/simcore_service_storage/datcore.py @@ -3,16 +3,17 @@ requires Blackfynn, check Makefile env2 """ -import logging # pylint: skip-file + +import logging import os import urllib +from contextlib import suppress from pathlib import Path from typing import List from blackfynn import Blackfynn from blackfynn.models import BaseCollection, Collection, DataPackage - from simcore_service_storage.models import (DatasetMetaData, FileMetaData, FileMetaDataEx) from simcore_service_storage.settings import DATCORE_ID, DATCORE_STR @@ -22,14 +23,17 @@ DatasetMetaDataVec = List[DatasetMetaData] -#FIXME: W0611:Unused IOAPI imported from blackfynn.api.transfers -#from blackfynn.api.transfers import IOAPI +# FIXME: W0611:Unused IOAPI imported from blackfynn.api.transfers +# from blackfynn.api.transfers import IOAPI -#FIXME: W0212:Access to a protected member _api of a client class +# FIXME: W0212:Access to a protected member _api of a client class # pylint: disable=W0212 -def _get_collection_id(folder: BaseCollection, _collections: List[str], collection_id: str)-> str: + +def _get_collection_id( + folder: BaseCollection, _collections: List[str], collection_id: str +) -> str: if not len(_collections): return collection_id @@ -50,8 +54,13 @@ def _get_collection_id(folder: BaseCollection, _collections: List[str], collecti class DatcoreClient(object): def __init__(self, api_token=None, api_secret=None, host=None, streaming_host=None): - self.client = Blackfynn(profile=None, api_token=api_token, api_secret=api_secret, - host=host, streaming_host=streaming_host) + self.client = Blackfynn( + profile=None, + api_token=api_token, + api_secret=api_secret, + host=host, + streaming_host=streaming_host, + ) def profile(self): """ @@ -87,7 +96,7 @@ def _destination_from_id(self, destination_id: str): return destination - def list_files_recursively(self, dataset_filter: str=""): + def list_files_recursively(self, dataset_filter: str = ""): files = [] for dataset in self.client.datasets(): @@ -96,39 +105,45 @@ def list_files_recursively(self, dataset_filter: str=""): return files - def list_files_raw_dataset(self, dataset_id: str)->List[FileMetaDataEx]: - files = [] # raw packages - _files = [] # fmds - data = {} # map to keep track of parents-child + def list_files_raw_dataset(self, dataset_id: str) -> List[FileMetaDataEx]: + files = [] # raw packages + _files = [] # fmds + data = {} # map to keep track of parents-child - cursor = '' + cursor = "" page_size = 1000 api = self.client._api.datasets dataset = self.client.get_dataset(dataset_id) if dataset is not None: while True: - resp = api._get(api._uri('/{id}/packages?cursor={cursor}&pageSize={pageSize}&includeSourceFiles={includeSourceFiles}', id=dataset_id, - cursor=cursor, pageSize=page_size, includeSourceFiles=False)) - for package in resp.get('packages', list()): - id = package['content']['id'] + resp = api._get( + api._uri( + "/{id}/packages?cursor={cursor}&pageSize={pageSize}&includeSourceFiles={includeSourceFiles}", + id=dataset_id, + cursor=cursor, + pageSize=page_size, + includeSourceFiles=False, + ) + ) + for package in resp.get("packages", list()): + id = package["content"]["id"] data[id] = package files.append(package) - cursor = resp.get('cursor') + cursor = resp.get("cursor") if cursor is None: break - for f in files: - if f['content']['packageType'] != 'Collection': - filename = f['content']['name'] + if f["content"]["packageType"] != "Collection": + filename = f["content"]["name"] file_path = "" - file_id = f['content']['nodeId'] + file_id = f["content"]["nodeId"] _f = f - while 'parentId' in _f['content'].keys(): - parentid = _f['content']['parentId'] + while "parentId" in _f["content"].keys(): + parentid = _f["content"]["parentId"] _f = data[parentid] - file_path = _f['content']['name'] +"/" + file_path + file_path = _f["content"]["name"] + "/" + file_path bucket_name = dataset.name file_name = filename @@ -136,23 +151,33 @@ def list_files_raw_dataset(self, dataset_id: str)->List[FileMetaDataEx]: object_name = str(Path(file_path) / file_name) file_uuid = str(Path(bucket_name) / object_name) - created_at = f['content']['createdAt'] - last_modified = f['content']['updatedAt'] + created_at = f["content"]["createdAt"] + last_modified = f["content"]["updatedAt"] parent_id = dataset_id - if 'parentId' in f['content']: - parentId = f['content']['parentId'] - parent_id = data[parentId]['content']['nodeId'] - - fmd = FileMetaData(bucket_name=bucket_name, file_name=file_name, object_name=object_name, - location=DATCORE_STR, location_id=DATCORE_ID, file_uuid=file_uuid, file_id=file_id, - raw_file_path=file_uuid, display_file_path=file_uuid, created_at=created_at, - last_modified=last_modified, file_size=file_size) + if "parentId" in f["content"]: + parentId = f["content"]["parentId"] + parent_id = data[parentId]["content"]["nodeId"] + + fmd = FileMetaData( + bucket_name=bucket_name, + file_name=file_name, + object_name=object_name, + location=DATCORE_STR, + location_id=DATCORE_ID, + file_uuid=file_uuid, + file_id=file_id, + raw_file_path=file_uuid, + display_file_path=file_uuid, + created_at=created_at, + last_modified=last_modified, + file_size=file_size, + ) fmdx = FileMetaDataEx(fmd=fmd, parent_id=parent_id) _files.append(fmdx) return _files - def list_files_raw(self, dataset_filter: str="")->List[FileMetaDataEx]: + def list_files_raw(self, dataset_filter: str = "") -> List[FileMetaDataEx]: _files = [] for dataset in self.client.datasets(): @@ -160,10 +185,12 @@ def list_files_raw(self, dataset_filter: str="")->List[FileMetaDataEx]: return _files - def list_dataset_files_recursively(self, files: List[FileMetaData], base: BaseCollection, current_root: Path): + def list_dataset_files_recursively( + self, files: List[FileMetaData], base: BaseCollection, current_root: Path + ): for item in base: if isinstance(item, Collection): - _current_root = current_root / Path(item.name) + _current_root = current_root / Path(item.name) self.list_dataset_files_recursively(files, item, _current_root) else: parts = current_root.parts @@ -172,11 +199,11 @@ def list_dataset_files_recursively(self, files: List[FileMetaData], base: BaseCo file_size = 0 # lets assume we have only one file if item.files: - file_name = Path(item.files[0].as_dict()['content']['s3key']).name - file_size = item.files[0].as_dict()['content']['size'] + file_name = Path(item.files[0].as_dict()["content"]["s3key"]).name + file_size = item.files[0].as_dict()["content"]["size"] # if this is in the root directory, the object_name is the filename only if len(parts) > 1: - object_name = str(Path(*list(parts)[1:])/ Path(file_name)) + object_name = str(Path(*list(parts)[1:]) / Path(file_name)) else: object_name = str(Path(file_name)) @@ -184,13 +211,22 @@ def list_dataset_files_recursively(self, files: List[FileMetaData], base: BaseCo file_id = item.id created_at = item.created_at last_modified = item.updated_at - fmd = FileMetaData(bucket_name=bucket_name, file_name=file_name, object_name=object_name, - location=DATCORE_STR, location_id=DATCORE_ID, file_uuid=file_uuid, file_id=file_id, - raw_file_path=file_uuid, display_file_path=file_uuid, created_at=created_at, - last_modified=last_modified, file_size=file_size) + fmd = FileMetaData( + bucket_name=bucket_name, + file_name=file_name, + object_name=object_name, + location=DATCORE_STR, + location_id=DATCORE_ID, + file_uuid=file_uuid, + file_id=file_id, + raw_file_path=file_uuid, + display_file_path=file_uuid, + created_at=created_at, + last_modified=last_modified, + file_size=file_size, + ) files.append(fmd) - def create_dataset(self, ds_name, force_delete=False): """ Creates a new dataset for the current user and returns it. Returns existing one @@ -202,13 +238,11 @@ def create_dataset(self, ds_name, force_delete=False): """ ds = None - try: + with suppress(Exception): ds = self.client.get_dataset(ds_name) if force_delete: ds.delete() ds = None - except Exception: # pylint: disable=W0703 - pass if ds is None: ds = self.client.create_dataset(ds_name) @@ -225,10 +259,8 @@ def get_dataset(self, ds_name, create_if_not_exists=False): """ ds = None - try: + with suppress(Exception): ds = self.client.get_dataset(ds_name) - except Exception: # pylint: disable=W0703 - pass if ds is None and create_if_not_exists: ds = self.client.create_dataset(ds_name) @@ -259,7 +291,7 @@ def exists_dataset(self, ds_name): ds = self.get_dataset(ds_name) return ds is not None - def upload_file(self, destination: str, filepath: str, meta_data = None): + def upload_file(self, destination: str, filepath: str, meta_data=None): """ Uploads a file to a given dataset/collection given its filepath on the host. Optionally adds some meta data @@ -303,7 +335,7 @@ def _update_meta_data(self, package, meta_data): """ for key in meta_data.keys(): - package.set_property(key, meta_data[key], category='simcore') + package.set_property(key, meta_data[key], category="simcore") package.update() @@ -334,9 +366,11 @@ def download_link(self, destination, filename): # pylint: disable = E1101 for item in collection: if isinstance(item, DataPackage): - if Path(item.files[0].as_dict()['content']['s3key']).name == filename: + if Path(item.files[0].as_dict()["content"]["s3key"]).name == filename: file_desc = self.client._api.packages.get_sources(item.id)[0] - url = self.client._api.packages.get_presigned_url_for_file(item.id, file_desc.id) + url = self.client._api.packages.get_presigned_url_for_file( + item.id, file_desc.id + ) return url return "" @@ -349,10 +383,12 @@ def download_link_by_id(self, file_id): filename = "" package = self.client.get(file_id) if package is not None: - filename = Path(package.files[0].as_dict()['content']['s3key']).name + filename = Path(package.files[0].as_dict()["content"]["s3key"]).name file_desc = self.client._api.packages.get_sources(file_id)[0] - url = self.client._api.packages.get_presigned_url_for_file(file_id, file_desc.id) + url = self.client._api.packages.get_presigned_url_for_file( + file_id, file_desc.id + ) return url, filename @@ -388,7 +424,7 @@ def delete_file(self, destination, filename): collection.update() for item in collection: if isinstance(item, DataPackage): - if Path(item.files[0].as_dict()['content']['s3key']).name == filename: + if Path(item.files[0].as_dict()["content"]["s3key"]).name == filename: self.client.delete(item) return True @@ -436,7 +472,6 @@ def update_meta_data(self, dataset, filename, meta_data): if package is not None: self._update_meta_data(package, meta_data) - def get_meta_data(self, dataset, filename): """ Returns metadata for a file @@ -472,10 +507,10 @@ def delete_meta_data(self, dataset, filename, keys=None): if package is not None: if keys is None: for p in package.properties: - package.remove_property(p.key, category='simcore') + package.remove_property(p.key, category="simcore") else: for k in keys: - package.remove_property(k, category='simcore') + package.remove_property(k, category="simcore") def search(self, what, max_count): """ @@ -508,14 +543,15 @@ def upload_file_to_id(self, destination_id: str, filepath: str): files = [filepath] try: - result = self.client._api.io.upload_files(destination, files, display_progress=True) - if result and result[0] and 'package' in result[0][0]: - _id = result[0][0]['package']['content']['id'] + result = self.client._api.io.upload_files( + destination, files, display_progress=True + ) + if result and result[0] and "package" in result[0][0]: + _id = result[0][0]["package"]["content"]["id"] except Exception: logger.exception("Error uploading file to datcore") - return _id def create_collection(self, destination_id: str, collection_name: str): @@ -539,7 +575,7 @@ def create_collection(self, destination_id: str, collection_name: str): return _id - def list_datasets(self)->DatasetMetaDataVec: + def list_datasets(self) -> DatasetMetaDataVec: data = [] for dataset in self.client.datasets(): dmd = DatasetMetaData(dataset_id=dataset.id, display_name=dataset.name) diff --git a/services/storage/src/simcore_service_storage/datcore_wrapper.py b/services/storage/src/simcore_service_storage/datcore_wrapper.py index 05137975acf..1491a918ca6 100644 --- a/services/storage/src/simcore_service_storage/datcore_wrapper.py +++ b/services/storage/src/simcore_service_storage/datcore_wrapper.py @@ -17,41 +17,49 @@ CURRENT_DIR = Path(__file__).resolve().parent logger = logging.getLogger(__name__) -#FIXME: W0703: Catching too general exception Exception (broad-except) +# FIXME: W0703: Catching too general exception Exception (broad-except) # pylint: disable=W0703 -#TODO: Use async callbacks for retreival of progress and pass via rabbit to server +# TODO: Use async callbacks for retreival of progress and pass via rabbit to server def make_async(func): @wraps(func) async def async_wrapper(self, *args, **kwargs): - blocking_task = self.loop.run_in_executor(self.pool, func, self, *args, **kwargs) + blocking_task = self.loop.run_in_executor( + self.pool, func, self, *args, **kwargs + ) _completed, _pending = await asyncio.wait([blocking_task]) results = [t.result() for t in _completed] # TODO: does this always work? return results[0] + return async_wrapper + class DatcoreWrapper: """ Wrapper to call the python2 api from datcore This can go away now. Next cleanup round... """ + # pylint: disable=R0913 # Too many arguments - def __init__(self, api_token: str, api_secret: str, loop: object, pool: ThreadPoolExecutor): + def __init__( + self, api_token: str, api_secret: str, loop: object, pool: ThreadPoolExecutor + ): self.api_token = api_token self.api_secret = api_secret self.loop = loop self.pool = pool - self.d_client = DatcoreClient(api_token=api_token, api_secret=api_secret, - host='https://api.blackfynn.io') + self.d_client = DatcoreClient( + api_token=api_token, api_secret=api_secret, host="https://api.blackfynn.io" + ) @make_async - def list_files_recursively(self)->FileMetaDataVec: #pylint: disable=W0613 + def list_files_recursively(self) -> FileMetaDataVec: # pylint: disable=W0613 files = [] try: files = self.d_client.list_files_recursively() @@ -61,7 +69,7 @@ def list_files_recursively(self)->FileMetaDataVec: #pylint: disable=W0613 return files @make_async - def list_files_raw(self)->FileMetaDataExVec: #pylint: disable=W0613 + def list_files_raw(self) -> FileMetaDataExVec: # pylint: disable=W0613 files = [] try: files = self.d_client.list_files_raw() @@ -71,7 +79,9 @@ def list_files_raw(self)->FileMetaDataExVec: #pylint: disable=W0613 return files @make_async - def list_files_raw_dataset(self, dataset_id: str)->FileMetaDataExVec: #pylint: disable=W0613 + def list_files_raw_dataset( + self, dataset_id: str + ) -> FileMetaDataExVec: # pylint: disable=W0613 files = [] try: files = self.d_client.list_files_raw_dataset(dataset_id) @@ -140,14 +150,16 @@ def delete_test_dataset(self, dataset): logger.exception("Error deleting test dataset") @make_async - def upload_file(self, destination: str, local_path: str, meta_data: FileMetaData = None): + def upload_file( + self, destination: str, local_path: str, meta_data: FileMetaData = None + ): json_meta = "" if meta_data: json_meta = json.dumps(attr.asdict(meta_data)) try: str_meta = json_meta result = False - if str_meta : + if str_meta: meta_data = json.loads(str_meta) result = self.d_client.upload_file(destination, local_path, meta_data) else: diff --git a/services/storage/src/simcore_service_storage/db.py b/services/storage/src/simcore_service_storage/db.py index b2f736219c7..59119b3d657 100644 --- a/services/storage/src/simcore_service_storage/db.py +++ b/services/storage/src/simcore_service_storage/db.py @@ -1,10 +1,14 @@ import logging from aiohttp import web -from servicelib.aiopg_utils import (DataSourceName, - PostgresRetryPolicyUponInitialization, - create_pg_engine, init_pg_tables, - is_pg_responsive, raise_if_not_responsive) +from servicelib.aiopg_utils import ( + DataSourceName, + PostgresRetryPolicyUponInitialization, + create_pg_engine, + init_pg_tables, + is_pg_responsive, + raise_if_not_responsive, +) from tenacity import Retrying from .models import metadata @@ -12,26 +16,25 @@ log = logging.getLogger(__name__) -THIS_SERVICE_NAME = 'postgres' +THIS_SERVICE_NAME = "postgres" async def pg_engine(app: web.Application): pg_cfg = app[APP_CONFIG_KEY][THIS_SERVICE_NAME] dsn = DataSourceName( - application_name=f'{__name__}_{id(app)}', - database=pg_cfg['database'], - user=pg_cfg['user'], - password=pg_cfg['password'], - host=pg_cfg['host'], - port=pg_cfg['port'] - ) + application_name=f"{__name__}_{id(app)}", + database=pg_cfg["database"], + user=pg_cfg["user"], + password=pg_cfg["password"], + host=pg_cfg["host"], + port=pg_cfg["port"], + ) log.info("Creating pg engine for %s", dsn) for attempt in Retrying(**PostgresRetryPolicyUponInitialization(log).kwargs): with attempt: - engine = await create_pg_engine(dsn, - minsize=pg_cfg['minsize'], - maxsize=pg_cfg['maxsize'] + engine = await create_pg_engine( + dsn, minsize=pg_cfg["minsize"], maxsize=pg_cfg["maxsize"] ) await raise_if_not_responsive(engine) @@ -39,10 +42,10 @@ async def pg_engine(app: web.Application): log.info("Initializing tables for %s", dsn) init_pg_tables(dsn, schema=metadata) - assert engine # nosec + assert engine # nosec app[APP_DB_ENGINE_KEY] = engine - yield # ---------- + yield # ---------- if engine is not app.get(APP_DB_ENGINE_KEY): log.critical("app does not hold right db engine. Somebody has changed it??") @@ -50,11 +53,15 @@ async def pg_engine(app: web.Application): if engine: engine.close() await engine.wait_closed() - log.debug("engine '%s' after shutdown: closed=%s, size=%d", engine.dsn, engine.closed, engine.size) - + log.debug( + "engine '%s' after shutdown: closed=%s, size=%d", + engine.dsn, + engine.closed, + engine.size, + ) -async def is_service_responsive(app:web.Application): +async def is_service_responsive(app: web.Application): """ Returns true if the app can connect to db service """ @@ -63,7 +70,7 @@ async def is_service_responsive(app:web.Application): def setup_db(app: web.Application): - disable_services = app[APP_CONFIG_KEY].get("main", {}).get("disable_services",[]) + disable_services = app[APP_CONFIG_KEY].get("main", {}).get("disable_services", []) if THIS_SERVICE_NAME in disable_services: app[APP_DB_ENGINE_KEY] = None diff --git a/services/storage/src/simcore_service_storage/db_tokens.py b/services/storage/src/simcore_service_storage/db_tokens.py index 271a9705bf0..e54dab43d45 100644 --- a/services/storage/src/simcore_service_storage/db_tokens.py +++ b/services/storage/src/simcore_service_storage/db_tokens.py @@ -1,4 +1,3 @@ - import logging from typing import Tuple @@ -18,7 +17,7 @@ @retry(**PostgresRetryPolicyUponOperation(log).kwargs) async def _get_tokens_from_db(engine, userid): async with engine.acquire() as conn: - stmt = sa.select([tokens, ]).where(tokens.c.user_id == userid) + stmt = sa.select([tokens,]).where(tokens.c.user_id == userid) result = await conn.execute(stmt) row = await result.first() data = dict(row) if row else {} @@ -32,17 +31,22 @@ async def get_api_token_and_secret(request: web.Request, userid) -> Tuple[str, s # defaults from config if any, othewise None defaults = request.app[APP_CONFIG_KEY]["main"].get("test_datcore", {}) - api_token, api_secret = defaults.get('api_token'), defaults.get('api_secret') + api_token, api_secret = defaults.get("api_token"), defaults.get("api_secret") if engine: try: data = await _get_tokens_from_db(engine, userid) except DbApiError: # NOTE this shall not log as error since is a possible outcome with an alternative - log.warning("Cannot retrieve tokens for user %s in pgdb %s", userid, engine, exc_info=True) + log.warning( + "Cannot retrieve tokens for user %s in pgdb %s", + userid, + engine, + exc_info=True, + ) else: - data = data.get('token_data', {}) - api_token = data.get('token_key', api_token) - api_secret = data.get('token_secret', api_secret) + data = data.get("token_data", {}) + api_token = data.get("token_key", api_token) + api_secret = data.get("token_secret", api_secret) return api_token, api_secret diff --git a/services/storage/src/simcore_service_storage/dsm.py b/services/storage/src/simcore_service_storage/dsm.py index 24d6638e68d..0c57defb7e2 100644 --- a/services/storage/src/simcore_service_storage/dsm.py +++ b/services/storage/src/simcore_service_storage/dsm.py @@ -24,13 +24,26 @@ from servicelib.client_session import get_client_session from .datcore_wrapper import DatcoreWrapper -from .models import (DatasetMetaData, FileMetaData, FileMetaDataEx, - _location_from_id, file_meta_data, projects, - user_to_projects) +from .models import ( + DatasetMetaData, + FileMetaData, + FileMetaDataEx, + _location_from_id, + file_meta_data, + projects, + user_to_projects, +) from .s3 import get_config_s3 -from .settings import (APP_CONFIG_KEY, APP_DB_ENGINE_KEY, APP_DSM_KEY, - APP_S3_KEY, DATCORE_ID, DATCORE_STR, SIMCORE_S3_ID, - SIMCORE_S3_STR) +from .settings import ( + APP_CONFIG_KEY, + APP_DB_ENGINE_KEY, + APP_DSM_KEY, + APP_S3_KEY, + DATCORE_ID, + DATCORE_STR, + SIMCORE_S3_ID, + SIMCORE_S3_STR, +) # pylint: disable=no-value-for-parameter @@ -48,6 +61,7 @@ FileMetaDataExVec = List[FileMetaDataEx] DatasetMetaDataVec = List[DatasetMetaData] + async def _setup_dsm(app: web.Application): cfg = app[APP_CONFIG_KEY] @@ -64,16 +78,20 @@ async def _setup_dsm(app: web.Application): bucket_name = s3_cfg["bucket_name"] testing = main_cfg["testing"] - dsm = DataStorageManager(s3_client, engine, loop, pool, bucket_name, not testing, app) + dsm = DataStorageManager( + s3_client, engine, loop, pool, bucket_name, not testing, app + ) app[APP_DSM_KEY] = dsm yield - #clean up + # clean up + def setup_dsm(app: web.Application): app.cleanup_ctx.append(_setup_dsm) + @attr.s(auto_attribs=True) class DatCoreApiToken: api_token: str = None @@ -82,6 +100,7 @@ class DatCoreApiToken: def to_tuple(self): return (self.api_token, self.api_secret) + @attr.s(auto_attribs=True) class DataStorageManager: """ Data storage manager @@ -112,42 +131,39 @@ class DataStorageManager: https://blog.minio.io/part-5-5-publish-minio-events-via-postgresql-50f6cc7a7346 https://docs.minio.io/docs/minio-bucket-notification-guide.html """ + s3_client: S3Client engine: Engine loop: object pool: ThreadPoolExecutor simcore_bucket_name: str has_project_db: bool - app: web.Application=None + app: web.Application = None - datcore_tokens: Dict[str, DatCoreApiToken]=attr.Factory(dict) + datcore_tokens: Dict[str, DatCoreApiToken] = attr.Factory(dict) # TODO: perhaps can be used a cache? add a lifetime? - - def _get_datcore_tokens(self, user_id: str)->Tuple[str, str]: - token = self.datcore_tokens.get(user_id, DatCoreApiToken()) # pylint: disable=E1101 + def _get_datcore_tokens(self, user_id: str) -> Tuple[str, str]: + # pylint: disable=no-member + token = self.datcore_tokens.get( + user_id, DatCoreApiToken() + ) return token.to_tuple() async def locations(self, user_id: str): locs = [] - simcore_s3 = { - "name" : SIMCORE_S3_STR, - "id" : SIMCORE_S3_ID - } + simcore_s3 = {"name": SIMCORE_S3_STR, "id": SIMCORE_S3_ID} locs.append(simcore_s3) ping_ok = await self.ping_datcore(user_id=user_id) if ping_ok: - datcore = { - "name" : DATCORE_STR, - "id" : DATCORE_ID - } + datcore = {"name": DATCORE_STR, "id": DATCORE_ID} locs.append(datcore) return locs @classmethod - def location_from_id(cls, location_id : str): + def location_from_id(cls, location_id: str): return _location_from_id(location_id) async def ping_datcore(self, user_id: str) -> bool: @@ -175,7 +191,9 @@ async def ping_datcore(self, user_id: str) -> bool: # pylint: disable=too-many-arguments # pylint: disable=too-many-branches # pylint: disable=too-many-statements - async def list_files(self, user_id: str, location: str, uuid_filter: str ="", regex: str="") -> FileMetaDataExVec: + async def list_files( + self, user_id: str, location: str, uuid_filter: str = "", regex: str = "" + ) -> FileMetaDataExVec: """ Returns a list of file paths Works for simcore.s3 and datcore @@ -187,7 +205,9 @@ async def list_files(self, user_id: str, location: str, uuid_filter: str ="", re data = [] if location == SIMCORE_S3_STR: async with self.engine.acquire() as conn: - query = sa.select([file_meta_data]).where(file_meta_data.c.user_id == user_id) + query = sa.select([file_meta_data]).where( + file_meta_data.c.user_id == user_id + ) async for row in conn.execute(query): result_dict = dict(zip(row._result_proxy.keys, row._row)) d = FileMetaData(**result_dict) @@ -201,16 +221,19 @@ async def list_files(self, user_id: str, location: str, uuid_filter: str ="", re try: async with self.engine.acquire() as conn: joint_table = user_to_projects.join(projects) - query = sa.select([projects]).select_from(joint_table)\ + query = ( + sa.select([projects]) + .select_from(joint_table) .where(user_to_projects.c.user_id == user_id) + ) async for row in conn.execute(query): proj_data = dict(row.items()) uuid_name_dict[proj_data["uuid"]] = proj_data["name"] - wb = proj_data['workbench'] + wb = proj_data["workbench"] for node in wb.keys(): - uuid_name_dict[node] = wb[node]['label'] + uuid_name_dict[node] = wb[node]["label"] except DBAPIError as _err: logger.exception("Error querying database for project names") @@ -227,20 +250,34 @@ async def list_files(self, user_id: str, location: str, uuid_filter: str ="", re if d.node_id in uuid_name_dict: d.node_name = uuid_name_dict[d.node_id] - d.raw_file_path = str(Path(d.project_id) / Path(d.node_id) / Path(d.file_name)) + d.raw_file_path = str( + Path(d.project_id) / Path(d.node_id) / Path(d.file_name) + ) d.display_file_path = d.raw_file_path d.file_id = d.file_uuid if d.node_name and d.project_name: - d.display_file_path = str(Path(d.project_name) / Path(d.node_name) / Path(d.file_name)) + d.display_file_path = str( + Path(d.project_name) + / Path(d.node_name) + / Path(d.file_name) + ) async with self.engine.acquire() as conn: - query = file_meta_data.update().\ - where(and_(file_meta_data.c.node_id==d.node_id, - file_meta_data.c.user_id==d.user_id)).\ - values(project_name=d.project_name, - node_name = d.node_name, + query = ( + file_meta_data.update() + .where( + and_( + file_meta_data.c.node_id == d.node_id, + file_meta_data.c.user_id == d.user_id, + ) + ) + .values( + project_name=d.project_name, + node_name=d.node_name, raw_file_path=d.raw_file_path, file_id=d.file_id, - display_file_path=d.display_file_path) + display_file_path=d.display_file_path, + ) + ) await conn.execute(query) clean_data.append(dx) @@ -251,21 +288,40 @@ async def list_files(self, user_id: str, location: str, uuid_filter: str ="", re # MaG: This is inefficient: Do this automatically when file is modified _loop = asyncio.get_event_loop() session = aiobotocore.get_session(loop=_loop) - async with session.create_client('s3', endpoint_url=self.s3_client.endpoint_url, aws_access_key_id=self.s3_client.access_key, - aws_secret_access_key=self.s3_client.secret_key) as client: - responses = await asyncio.gather(*[client.list_objects_v2(Bucket=_d.bucket_name, Prefix=_d.object_name) for _d in [__d.fmd for __d in data]]) + async with session.create_client( + "s3", + endpoint_url=self.s3_client.endpoint_url, + aws_access_key_id=self.s3_client.access_key, + aws_secret_access_key=self.s3_client.secret_key, + ) as client: + responses = await asyncio.gather( + *[ + client.list_objects_v2( + Bucket=_d.bucket_name, Prefix=_d.object_name + ) + for _d in [__d.fmd for __d in data] + ] + ) for dx, resp in zip(data, responses): - if 'Contents' in resp: + if "Contents" in resp: clean_data.append(dx) d = dx.fmd - d.file_size = resp['Contents'][0]['Size'] - d.last_modified = str(resp['Contents'][0]['LastModified']) + d.file_size = resp["Contents"][0]["Size"] + d.last_modified = str(resp["Contents"][0]["LastModified"]) async with self.engine.acquire() as conn: - query = file_meta_data.update().\ - where(and_(file_meta_data.c.node_id==d.node_id, - file_meta_data.c.user_id==d.user_id)).\ - values(file_size=d.file_size, - last_modified=d.last_modified) + query = ( + file_meta_data.update() + .where( + and_( + file_meta_data.c.node_id == d.node_id, + file_meta_data.c.user_id == d.user_id, + ) + ) + .values( + file_size=d.file_size, + last_modified=d.last_modified, + ) + ) await conn.execute(query) data = clean_data @@ -298,11 +354,15 @@ async def list_files(self, user_id: str, location: str, uuid_filter: str ="", re return data - async def list_files_dataset(self, user_id: str, location: str, dataset_id: str)->FileMetaDataVec: + async def list_files_dataset( + self, user_id: str, location: str, dataset_id: str + ) -> FileMetaDataVec: # this is a cheap shot, needs fixing once storage/db is in sync data = [] if location == SIMCORE_S3_STR: - data = await self.list_files(user_id, location, uuid_filter=dataset_id+"/") + data = await self.list_files( + user_id, location, uuid_filter=dataset_id + "/" + ) elif location == DATCORE_STR: api_token, api_secret = self._get_datcore_tokens(user_id) dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) @@ -324,12 +384,17 @@ async def list_datasets(self, user_id: str, location: str) -> DatasetMetaDataVec try: async with self.engine.acquire() as conn: joint_table = user_to_projects.join(projects) - query = sa.select([projects]).select_from(joint_table)\ - .where(user_to_projects.c.user_id == user_id) + query = ( + sa.select([projects]) + .select_from(joint_table) + .where(user_to_projects.c.user_id == user_id) + ) async for row in conn.execute(query): proj_data = dict(row.items()) - dmd = DatasetMetaData(dataset_id=proj_data["uuid"], - display_name=proj_data["name"]) + dmd = DatasetMetaData( + dataset_id=proj_data["uuid"], + display_name=proj_data["name"], + ) data.append(dmd) except DBAPIError as _err: logger.exception("Error querying database for project names") @@ -340,12 +405,18 @@ async def list_datasets(self, user_id: str, location: str) -> DatasetMetaDataVec return data - async def list_file(self, user_id: str, location: str, file_uuid: str) -> FileMetaDataEx: + async def list_file( + self, user_id: str, location: str, file_uuid: str + ) -> FileMetaDataEx: if location == SIMCORE_S3_STR: # TODO: get engine from outside async with self.engine.acquire() as conn: - query = sa.select([file_meta_data]).where(and_(file_meta_data.c.user_id == user_id, - file_meta_data.c.file_uuid == file_uuid)) + query = sa.select([file_meta_data]).where( + and_( + file_meta_data.c.user_id == user_id, + file_meta_data.c.file_uuid == file_uuid, + ) + ) async for row in conn.execute(query): result_dict = dict(zip(row._result_proxy.keys, row._row)) d = FileMetaData(**result_dict) @@ -354,7 +425,7 @@ async def list_file(self, user_id: str, location: str, file_uuid: str) -> FileMe elif location == DATCORE_STR: api_token, api_secret = self._get_datcore_tokens(user_id) _dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) - data = [] #await _dcw.list_file(file_uuid) + data = [] # await _dcw.list_file(file_uuid) return data async def delete_file(self, user_id: str, location: str, file_uuid: str): @@ -371,14 +442,20 @@ async def delete_file(self, user_id: str, location: str, file_uuid: str): if location == SIMCORE_S3_STR: to_delete = [] async with self.engine.acquire() as conn: - query = sa.select([file_meta_data]).where(file_meta_data.c.file_uuid == file_uuid) + query = sa.select([file_meta_data]).where( + file_meta_data.c.file_uuid == file_uuid + ) async for row in conn.execute(query): result_dict = dict(zip(row._result_proxy.keys, row._row)) d = FileMetaData(**result_dict) # make sure this is the current user if d.user_id == user_id: - if self.s3_client.remove_objects(d.bucket_name, [d.object_name]): - stmt = file_meta_data.delete().where(file_meta_data.c.file_uuid == file_uuid) + if self.s3_client.remove_objects( + d.bucket_name, [d.object_name] + ): + stmt = file_meta_data.delete().where( + file_meta_data.c.file_uuid == file_uuid + ) to_delete.append(stmt) async with self.engine.acquire() as conn: @@ -388,11 +465,13 @@ async def delete_file(self, user_id: str, location: str, file_uuid: str): elif location == DATCORE_STR: api_token, api_secret = self._get_datcore_tokens(user_id) dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) - #destination, filename = _parse_datcore(file_uuid) + # destination, filename = _parse_datcore(file_uuid) file_id = file_uuid return await dcw.delete_file_by_id(file_id) - async def upload_file_to_datcore(self, user_id: str, local_file_path: str, destination_id: str): # pylint: disable=W0613 + async def upload_file_to_datcore( + self, user_id: str, local_file_path: str, destination_id: str + ): # pylint: disable=W0613 # uploads a locally available file to dat core given the storage path, optionally attached some meta data api_token, api_secret = self._get_datcore_tokens(user_id) dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) @@ -400,16 +479,16 @@ async def upload_file_to_datcore(self, user_id: str, local_file_path: str, desti # actually we have to query the master db - async def upload_link(self, user_id: str, file_uuid: str): - @retry(**postgres_service_retry_policy_kwargs) async def _execute_query(): async with self.engine.acquire() as conn: fmd = FileMetaData() fmd.simcore_from_uuid(file_uuid, self.simcore_bucket_name) fmd.user_id = user_id - query = sa.select([file_meta_data]).where(file_meta_data.c.file_uuid == file_uuid) + query = sa.select([file_meta_data]).where( + file_meta_data.c.file_uuid == file_uuid + ) # if file already exists, we might want to update a time-stamp rows = await conn.execute(query) exists = await rows.scalar() @@ -431,7 +510,9 @@ async def copy_file_s3_s3(self, user_id: str, dest_uuid: str, source_uuid: str): from_object_name = source_uuid from_bucket_object_name = os.path.join(from_bucket, from_object_name) # FIXME: This is not async! - self.s3_client.copy_object(to_bucket_name, to_object_name, from_bucket_object_name) + self.s3_client.copy_object( + to_bucket_name, to_object_name, from_bucket_object_name + ) # update db async with self.engine.acquire() as conn: fmd = FileMetaData() @@ -440,7 +521,9 @@ async def copy_file_s3_s3(self, user_id: str, dest_uuid: str, source_uuid: str): ins = file_meta_data.insert().values(**vars(fmd)) await conn.execute(ins) - async def copy_file_s3_datcore(self, user_id: str, dest_uuid: str, source_uuid: str): + async def copy_file_s3_datcore( + self, user_id: str, dest_uuid: str, source_uuid: str + ): # source is s3, get link and copy to datcore bucket_name = self.simcore_bucket_name object_name = source_uuid @@ -451,40 +534,60 @@ async def copy_file_s3_datcore(self, user_id: str, dest_uuid: str, source_uuid: session = get_client_session(self.app) async with session.get(url) as resp: if resp.status == 200: - f = await aiofiles.open(local_file_path, mode='wb') + f = await aiofiles.open(local_file_path, mode="wb") await f.write(await resp.read()) await f.close() # and then upload - await self.upload_file_to_datcore(user_id=user_id, local_file_path=local_file_path, - destination_id=dest_uuid) + await self.upload_file_to_datcore( + user_id=user_id, + local_file_path=local_file_path, + destination_id=dest_uuid, + ) shutil.rmtree(tmp_dirpath) - async def copy_file_datcore_s3(self, user_id: str, dest_uuid: str, source_uuid: str, filename_missing: bool=False): + async def copy_file_datcore_s3( + self, + user_id: str, + dest_uuid: str, + source_uuid: str, + filename_missing: bool = False, + ): # 2 steps: Get download link for local copy, the upload link to s3 # TODO: This should be a redirect stream! - dc_link, filename = await self.download_link_datcore(user_id=user_id, file_id=source_uuid) + dc_link, filename = await self.download_link_datcore( + user_id=user_id, file_id=source_uuid + ) if filename_missing: - dest_uuid = str(Path(dest_uuid)/ filename) + dest_uuid = str(Path(dest_uuid) / filename) s3_upload_link = await self.upload_link(user_id, dest_uuid) tmp_dirpath = tempfile.mkdtemp() - local_file_path = os.path.join(tmp_dirpath,filename) + local_file_path = os.path.join(tmp_dirpath, filename) session = get_client_session(self.app) async with session.get(dc_link) as resp: if resp.status == 200: - f = await aiofiles.open(local_file_path, mode='wb') + f = await aiofiles.open(local_file_path, mode="wb") await f.write(await resp.read()) await f.close() s3_upload_link = URL(s3_upload_link) - async with session.put(s3_upload_link, data=Path(local_file_path).open('rb')) as resp: + async with session.put( + s3_upload_link, data=Path(local_file_path).open("rb") + ) as resp: if resp.status > 299: _response_text = await resp.text() return dest_uuid - async def copy_file(self, user_id: str, dest_location: str, dest_uuid: str, source_location: str, source_uuid: str): + async def copy_file( + self, + user_id: str, + dest_location: str, + dest_uuid: str, + source_location: str, + source_uuid: str, + ): if source_location == SIMCORE_S3_STR: if dest_location == DATCORE_STR: await self.copy_file_s3_datcore(user_id, dest_uuid, source_uuid) @@ -496,21 +599,23 @@ async def copy_file(self, user_id: str, dest_location: str, dest_uuid: str, sour if dest_location == SIMCORE_S3_STR: await self.copy_file_datcore_s3(user_id, dest_uuid, source_uuid) - async def download_link_s3(self, file_uuid: str)->str: + async def download_link_s3(self, file_uuid: str) -> str: link = None bucket_name = self.simcore_bucket_name object_name = file_uuid link = self.s3_client.create_presigned_get_url(bucket_name, object_name) return link - async def download_link_datcore(self, user_id: str, file_id: str)->Dict[str,str]: + async def download_link_datcore(self, user_id: str, file_id: str) -> Dict[str, str]: link = "" api_token, api_secret = self._get_datcore_tokens(user_id) dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) link, filename = await dcw.download_link_by_id(file_id) return link, filename - async def deep_copy_project_simcore_s3(self, user_id: str, source_project, destination_project, node_mapping): + async def deep_copy_project_simcore_s3( + self, user_id: str, source_project, destination_project, node_mapping + ): """ Parses a given source project and copies all related files to the destination project Since all files are organized as @@ -534,21 +639,27 @@ async def deep_copy_project_simcore_s3(self, user_id: str, source_project, desti # build up naming map based on labels uuid_name_dict = {} uuid_name_dict[dest_folder] = destination_project["name"] - for src_node_id, src_node in source_project['workbench'].items(): + for src_node_id, src_node in source_project["workbench"].items(): new_node_id = node_mapping.get(src_node_id) if new_node_id is not None: - uuid_name_dict[new_node_id] = src_node['label'] + uuid_name_dict[new_node_id] = src_node["label"] # Step 1: List all objects for this project replace them with the destination object name and do a copy at the same time collect some names _loop = asyncio.get_event_loop() session = aiobotocore.get_session(loop=_loop) - async with session.create_client('s3', endpoint_url=self.s3_client.endpoint_url, aws_access_key_id=self.s3_client.access_key, - aws_secret_access_key=self.s3_client.secret_key) as client: - response = await client.list_objects_v2(Bucket=self.simcore_bucket_name, Prefix=source_folder) + async with session.create_client( + "s3", + endpoint_url=self.s3_client.endpoint_url, + aws_access_key_id=self.s3_client.access_key, + aws_secret_access_key=self.s3_client.secret_key, + ) as client: + response = await client.list_objects_v2( + Bucket=self.simcore_bucket_name, Prefix=source_folder + ) if "Contents" in response: - for f in response['Contents']: - source_object_name = f['Key'] + for f in response["Contents"]: + source_object_name = f["Key"] source_object_parts = Path(source_object_name).parts if len(source_object_parts) == 3: @@ -556,92 +667,130 @@ async def deep_copy_project_simcore_s3(self, user_id: str, source_project, desti new_node_id = node_mapping.get(old_node_id) if new_node_id is not None: old_filename = source_object_parts[2] - dest_object_name = str(Path(dest_folder) / new_node_id / old_filename) - copy_source = {'Bucket' : self.simcore_bucket_name, 'Key': source_object_name} - response = await client.copy_object(CopySource=copy_source, Bucket=self.simcore_bucket_name, Key=dest_object_name) + dest_object_name = str( + Path(dest_folder) / new_node_id / old_filename + ) + copy_source = { + "Bucket": self.simcore_bucket_name, + "Key": source_object_name, + } + response = await client.copy_object( + CopySource=copy_source, + Bucket=self.simcore_bucket_name, + Key=dest_object_name, + ) else: # This may happen once we have shared/home folders logger.info("len(object.parts != 3") - # Step 2: List all references in outputs that point to datcore and copy over - for node_id, node in destination_project['workbench'].items(): + for node_id, node in destination_project["workbench"].items(): outputs = node.get("outputs") if outputs is not None: for _output_key, output in outputs.items(): - if "store" in output and output["store"]==DATCORE_ID: + if "store" in output and output["store"] == DATCORE_ID: src = output["path"] dest = str(Path(dest_folder) / node_id) logger.info("Need to copy %s to %s", src, dest) - dest = await self.copy_file_datcore_s3(user_id=user_id, dest_uuid=dest, source_uuid=src, filename_missing=True) + dest = await self.copy_file_datcore_s3( + user_id=user_id, + dest_uuid=dest, + source_uuid=src, + filename_missing=True, + ) # and change the dest project accordingly output["store"] = SIMCORE_S3_ID - output['path'] = dest - elif "store" in output and output["store"]==SIMCORE_S3_ID: - source = output['path'] - dest = dest = str(Path(dest_folder) / node_id / Path(source).name) + output["path"] = dest + elif "store" in output and output["store"] == SIMCORE_S3_ID: + source = output["path"] + dest = dest = str( + Path(dest_folder) / node_id / Path(source).name + ) output["store"] = SIMCORE_S3_ID - output['path'] = dest + output["path"] = dest # step 3: list files first to create fmds session = aiobotocore.get_session(loop=_loop) fmds = [] - async with session.create_client('s3', endpoint_url=self.s3_client.endpoint_url, aws_access_key_id=self.s3_client.access_key, - aws_secret_access_key=self.s3_client.secret_key) as client: - response = await client.list_objects_v2(Bucket=self.simcore_bucket_name, Prefix=dest_folder+"/") - if 'Contents' in response: - for f in response['Contents']: + async with session.create_client( + "s3", + endpoint_url=self.s3_client.endpoint_url, + aws_access_key_id=self.s3_client.access_key, + aws_secret_access_key=self.s3_client.secret_key, + ) as client: + response = await client.list_objects_v2( + Bucket=self.simcore_bucket_name, Prefix=dest_folder + "/" + ) + if "Contents" in response: + for f in response["Contents"]: fmd = FileMetaData() fmd.simcore_from_uuid(f["Key"], self.simcore_bucket_name) fmd.project_name = uuid_name_dict.get(dest_folder, "Untitled") fmd.node_name = uuid_name_dict.get(fmd.node_id, "Untitled") fmd.raw_file_path = fmd.file_uuid - fmd.display_file_path = str(Path(fmd.project_name) / fmd.node_name / fmd.file_name) + fmd.display_file_path = str( + Path(fmd.project_name) / fmd.node_name / fmd.file_name + ) fmd.user_id = user_id - fmd.file_size = f['Size'] - fmd.last_modified = str(f['LastModified']) + fmd.file_size = f["Size"] + fmd.last_modified = str(f["LastModified"]) fmds.append(fmd) - # step 4 sync db async with self.engine.acquire() as conn: for fmd in fmds: - query = sa.select([file_meta_data]).where(file_meta_data.c.file_uuid == fmd.file_uuid) + query = sa.select([file_meta_data]).where( + file_meta_data.c.file_uuid == fmd.file_uuid + ) # if file already exists, we might w rows = await conn.execute(query) exists = await rows.scalar() if exists: - delete_me = file_meta_data.delete().where(file_meta_data.c.file_uuid == fmd.file_uuid) + delete_me = file_meta_data.delete().where( + file_meta_data.c.file_uuid == fmd.file_uuid + ) await conn.execute(delete_me) ins = file_meta_data.insert().values(**vars(fmd)) await conn.execute(ins) - async def delete_project_simcore_s3(self, user_id: str, project_id: str, node_id: Optional[str]) -> web.Response: + async def delete_project_simcore_s3( + self, user_id: str, project_id: str, node_id: Optional[str] + ) -> web.Response: """ Deletes all files from a given node in a project in simcore.s3 and updated db accordingly. If node_id is not given, then all the project files db entries are deleted. """ async with self.engine.acquire() as conn: delete_me = file_meta_data.delete().where( - and_(file_meta_data.c.user_id == user_id, - file_meta_data.c.project_id == project_id - )) + and_( + file_meta_data.c.user_id == user_id, + file_meta_data.c.project_id == project_id, + ) + ) if node_id: delete_me = delete_me.where(file_meta_data.c.node_id == node_id) await conn.execute(delete_me) _loop = asyncio.get_event_loop() session = aiobotocore.get_session(loop=_loop) - async with session.create_client('s3', endpoint_url=self.s3_client.endpoint_url, aws_access_key_id=self.s3_client.access_key, - aws_secret_access_key=self.s3_client.secret_key) as client: - response = await client.list_objects_v2(Bucket=self.simcore_bucket_name, - Prefix=f"{project_id}/{node_id}/" if node_id else f"{project_id}/" - ) + async with session.create_client( + "s3", + endpoint_url=self.s3_client.endpoint_url, + aws_access_key_id=self.s3_client.access_key, + aws_secret_access_key=self.s3_client.secret_key, + ) as client: + response = await client.list_objects_v2( + Bucket=self.simcore_bucket_name, + Prefix=f"{project_id}/{node_id}/" if node_id else f"{project_id}/", + ) if "Contents" in response: objects_to_delete = [] - for f in response['Contents']: - objects_to_delete.append( { 'Key': f['Key'] }) + for f in response["Contents"]: + objects_to_delete.append({"Key": f["Key"]}) if objects_to_delete: - response = await client.delete_objects(Bucket=self.simcore_bucket_name, Delete={'Objects' : objects_to_delete}) + response = await client.delete_objects( + Bucket=self.simcore_bucket_name, + Delete={"Objects": objects_to_delete}, + ) return response diff --git a/services/storage/src/simcore_service_storage/handlers.py b/services/storage/src/simcore_service_storage/handlers.py index ce024294bab..6e673481a39 100644 --- a/services/storage/src/simcore_service_storage/handlers.py +++ b/services/storage/src/simcore_service_storage/handlers.py @@ -14,77 +14,73 @@ log = logging.getLogger(__name__) -file_schema = FileMetaDataSchema() +file_schema = FileMetaDataSchema() files_schema = FileMetaDataSchema(many=True) - async def check_health(request: web.Request): - log.debug("CHECK HEALTH INCOMING PATH %s",request.path) + log.debug("CHECK HEALTH INCOMING PATH %s", request.path) await extract_and_validate(request) return { - 'name':__name__.split('.')[0], - 'version': __version__, - 'status': 'SERVICE_RUNNING' + "name": __name__.split(".")[0], + "version": __version__, + "status": "SERVICE_RUNNING", } async def check_action(request: web.Request): params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert body, "body %s" % body # nosec - if params['action'] == 'fail': + if params["action"] == "fail": raise ValueError("some randome failure") # echo's input FIXME: convert to dic # FIXME: output = fake_schema.dump(body) output = { - "path_value" : params.get('action'), - "query_value": query.get('data'), - "body_value" :{ - "key1": 1, #body.body_value.key1, - "key2": 0 #body.body_value.key2, - } + "path_value": params.get("action"), + "query_value": query.get("data"), + "body_value": { + "key1": 1, # body.body_value.key1, + "key2": 0, # body.body_value.key2, + }, } return output async def get_storage_locations(request: web.Request): - log.debug("CHECK LOCATION PATH %s %s",request.path, request.url) + log.debug("CHECK LOCATION PATH %s %s", request.path, request.url) params, query, body = await extract_and_validate(request) - assert not params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert not params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec - assert query["user_id"] # nosec + assert query["user_id"] # nosec user_id = query["user_id"] dsm = await _prepare_storage_manager(params, query, request) locs = await dsm.locations(user_id) - return { - 'error': None, - 'data': locs - } + return {"error": None, "data": locs} async def get_datasets_metadata(request: web.Request): - log.debug("GET METADATA DATASETS %s %s",request.path, request.url) + log.debug("GET METADATA DATASETS %s %s", request.path, request.url) params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec - assert params["location_id"] # nosec - assert query["user_id"] # nosec + assert params["location_id"] # nosec + assert query["user_id"] # nosec location_id = params["location_id"] user_id = query["user_id"] @@ -95,22 +91,20 @@ async def get_datasets_metadata(request: web.Request): # To implement data = await dsm.list_datasets(user_id, location) - return { - 'error': None, - 'data': data - } + return {"error": None, "data": data} + async def get_files_metadata(request: web.Request): - log.debug("GET FILES METADATA %s %s",request.path, request.url) + log.debug("GET FILES METADATA %s %s", request.path, request.url) params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec - assert params["location_id"] # nosec - assert query["user_id"] # nosec + assert params["location_id"] # nosec + assert query["user_id"] # nosec location_id = params["location_id"] user_id = query["user_id"] @@ -121,32 +115,32 @@ async def get_files_metadata(request: web.Request): log.debug("list files %s %s %s", user_id, location, uuid_filter) - data = await dsm.list_files(user_id=user_id, location=location, uuid_filter=uuid_filter) + data = await dsm.list_files( + user_id=user_id, location=location, uuid_filter=uuid_filter + ) data_as_dict = [] for d in data: - log.info("DATA %s",attr.asdict(d.fmd)) - data_as_dict.append({**attr.asdict(d.fmd), 'parent_id': d.parent_id}) + log.info("DATA %s", attr.asdict(d.fmd)) + data_as_dict.append({**attr.asdict(d.fmd), "parent_id": d.parent_id}) - envelope = { - 'error': None, - 'data': data_as_dict - } + envelope = {"error": None, "data": data_as_dict} return envelope + async def get_files_metadata_dataset(request: web.Request): - log.debug("GET FILES METADATA DATASET %s %s",request.path, request.url) + log.debug("GET FILES METADATA DATASET %s %s", request.path, request.url) params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec - assert params["location_id"] # nosec - assert params["dataset_id"] # nosec - assert query["user_id"] # nosec + assert params["location_id"] # nosec + assert params["dataset_id"] # nosec + assert query["user_id"] # nosec location_id = params["location_id"] user_id = query["user_id"] @@ -158,17 +152,16 @@ async def get_files_metadata_dataset(request: web.Request): log.debug("list files %s %s %s", user_id, location, dataset_id) - data = await dsm.list_files_dataset(user_id=user_id, location=location, dataset_id=dataset_id) + data = await dsm.list_files_dataset( + user_id=user_id, location=location, dataset_id=dataset_id + ) data_as_dict = [] for d in data: - log.info("DATA %s",attr.asdict(d.fmd)) - data_as_dict.append({**attr.asdict(d.fmd), 'parent_id': d.parent_id}) + log.info("DATA %s", attr.asdict(d.fmd)) + data_as_dict.append({**attr.asdict(d.fmd), "parent_id": d.parent_id}) - envelope = { - 'error': None, - 'data': data_as_dict - } + envelope = {"error": None, "data": data_as_dict} return envelope @@ -176,13 +169,13 @@ async def get_files_metadata_dataset(request: web.Request): async def get_file_metadata(request: web.Request): params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec - assert params["location_id"] # nosec - assert params["fileId"] # nosec - assert query["user_id"] # nosec + assert params["location_id"] # nosec + assert params["fileId"] # nosec + assert query["user_id"] # nosec location_id = params["location_id"] user_id = query["user_id"] @@ -194,9 +187,9 @@ async def get_file_metadata(request: web.Request): data = await dsm.list_file(user_id=user_id, location=location, file_uuid=file_uuid) envelope = { - 'error': None, - 'data': {**attr.asdict(data.fmd), 'parent_id': data.parent_id} - } + "error": None, + "data": {**attr.asdict(data.fmd), "parent_id": data.parent_id}, + } return envelope @@ -204,13 +197,13 @@ async def get_file_metadata(request: web.Request): async def update_file_meta_data(request: web.Request): params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec - assert params["location_id"] # nosec - assert params["fileId"] # nosec - assert query["user_id"] # nosec + assert params["location_id"] # nosec + assert params["fileId"] # nosec + assert query["user_id"] # nosec location_id = params["location_id"] _user_id = query["user_id"] @@ -223,13 +216,13 @@ async def update_file_meta_data(request: web.Request): async def download_file(request: web.Request): params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec - assert params["location_id"] # nosec - assert params["fileId"] # nosec - assert query["user_id"] # nosec + assert params["location_id"] # nosec + assert params["fileId"] # nosec + assert query["user_id"] # nosec location_id = params["location_id"] user_id = query["user_id"] @@ -242,20 +235,15 @@ async def download_file(request: web.Request): else: link, _filename = await dsm.download_link_datcore(user_id, file_uuid) - return { - 'error': None, - 'data': { - "link": link - } - } + return {"error": None, "data": {"link": link}} async def upload_file(request: web.Request): params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec location_id = params["location_id"] user_id = query["user_id"] @@ -268,29 +256,29 @@ async def upload_file(request: web.Request): source_uuid = query["extra_source"] source_id = query["extra_location"] source_location = dsm.location_from_id(source_id) - link = await dsm.copy_file(user_id=user_id, dest_location=location, - dest_uuid=file_uuid, source_location=source_location, source_uuid=source_uuid) + link = await dsm.copy_file( + user_id=user_id, + dest_location=location, + dest_uuid=file_uuid, + source_location=source_location, + source_uuid=source_uuid, + ) else: link = await dsm.upload_link(user_id=user_id, file_uuid=file_uuid) - return { - 'error': None, - 'data': { - "link":link - } - } + return {"error": None, "data": {"link": link}} async def delete_file(request: web.Request): params, query, body = await extract_and_validate(request) - assert params, "params %s" % params # nosec - assert query, "query %s" % query # nosec - assert not body, "body %s" % body # nosec + assert params, "params %s" % params # nosec + assert query, "query %s" % query # nosec + assert not body, "body %s" % body # nosec - assert params["location_id"] # nosec - assert params["fileId"] # nosec - assert query["user_id"] # nosec + assert params["location_id"] # nosec + assert params["fileId"] # nosec + assert query["user_id"] # nosec location_id = params["location_id"] user_id = query["user_id"] @@ -298,57 +286,62 @@ async def delete_file(request: web.Request): dsm = await _prepare_storage_manager(params, query, request) location = dsm.location_from_id(location_id) - _discard = await dsm.delete_file(user_id=user_id, location=location, file_uuid=file_uuid) + _discard = await dsm.delete_file( + user_id=user_id, location=location, file_uuid=file_uuid + ) - return { - 'error': None, - 'data': None - } + return {"error": None, "data": None} async def create_folders_from_project(request: web.Request): - #FIXME: Update openapi-core. Fails with additionalProperties https://github.com/p1c2u/openapi-core/issues/124. Fails with project + # FIXME: Update openapi-core. Fails with additionalProperties https://github.com/p1c2u/openapi-core/issues/124. Fails with project # params, query, body = await extract_and_validate(request) user_id = request.query.get("user_id") body = await request.json() - source_project = body.get('source', {}) - destination_project = body.get('destination', {}) - nodes_map = body.get('nodes_map', {}) + source_project = body.get("source", {}) + destination_project = body.get("destination", {}) + nodes_map = body.get("nodes_map", {}) - assert set(nodes_map.keys()) == set(source_project['workbench'].keys()) # nosec - assert set(nodes_map.values()) == set(destination_project['workbench'].keys()) # nosec + assert set(nodes_map.keys()) == set(source_project["workbench"].keys()) # nosec + assert set(nodes_map.values()) == set( # nosec + destination_project["workbench"].keys() # nosec + ) # nosec # TODO: validate project with jsonschema instead?? - params = { "location_id" : SIMCORE_S3_ID } - query = { "user_id": user_id} + params = {"location_id": SIMCORE_S3_ID} + query = {"user_id": user_id} dsm = await _prepare_storage_manager(params, query, request) - await dsm.deep_copy_project_simcore_s3(user_id, source_project, destination_project, nodes_map) + await dsm.deep_copy_project_simcore_s3( + user_id, source_project, destination_project, nodes_map + ) + + raise web.HTTPCreated( + text=json.dumps(destination_project), content_type="application/json" + ) - raise web.HTTPCreated(text=json.dumps(destination_project), - content_type='application/json') async def delete_folders_of_project(request: web.Request): - folder_id = request.match_info['folder_id'] + folder_id = request.match_info["folder_id"] user_id = request.query.get("user_id") node_id = request.query.get("node_id", None) - params = { "location_id" : SIMCORE_S3_ID } - query = { "user_id": user_id} + params = {"location_id": SIMCORE_S3_ID} + query = {"user_id": user_id} dsm = await _prepare_storage_manager(params, query, request) await dsm.delete_project_simcore_s3(user_id, folder_id, node_id) - raise web.HTTPNoContent(content_type='application/json') - - - + raise web.HTTPNoContent(content_type="application/json") # HELPERS ----------------------------------------------------- INIT_STR = "init" -async def _prepare_storage_manager(params, query, request: web.Request) -> DataStorageManager: + +async def _prepare_storage_manager( + params, query, request: web.Request +) -> DataStorageManager: dsm = request.app[APP_DSM_KEY] user_id = query.get("user_id") diff --git a/services/storage/src/simcore_service_storage/models.py b/services/storage/src/simcore_service_storage/models.py index 7552a81fdb8..d5caa05d8b4 100644 --- a/services/storage/src/simcore_service_storage/models.py +++ b/services/storage/src/simcore_service_storage/models.py @@ -7,17 +7,21 @@ import attr -from simcore_postgres_database.storage_models import (file_meta_data, metadata, - projects, tokens, - user_to_projects, users) -from simcore_service_storage.settings import (DATCORE_STR, SIMCORE_S3_ID, - SIMCORE_S3_STR) +from simcore_postgres_database.storage_models import ( + file_meta_data, + metadata, + projects, + tokens, + user_to_projects, + users, +) +from simcore_service_storage.settings import DATCORE_STR, SIMCORE_S3_ID, SIMCORE_S3_STR -#FIXME: W0611:Unused UUID imported from sqlalchemy.dialects.postgresql -#from sqlalchemy.dialects.postgresql import UUID +# FIXME: W0611:Unused UUID imported from sqlalchemy.dialects.postgresql +# from sqlalchemy.dialects.postgresql import UUID -#FIXME: R0902: Too many instance attributes (11/7) (too-many-instance-attributes) -#pylint: disable=R0902 +# FIXME: R0902: Too many instance attributes (11/7) (too-many-instance-attributes) +# pylint: disable=R0902 def _parse_datcore(file_uuid: str) -> Tuple[str, str]: @@ -29,19 +33,15 @@ def _parse_datcore(file_uuid: str) -> Tuple[str, str]: return destination, file_name + def _locations(): # TODO: so far this is hardcoded - simcore_s3 = { - "name" : SIMCORE_S3_STR, - "id" : 0 - } - datcore = { - "name" : DATCORE_STR, - "id" : 1 - } + simcore_s3 = {"name": SIMCORE_S3_STR, "id": 0} + datcore = {"name": DATCORE_STR, "id": 1} return [simcore_s3, datcore] -def _location_from_id(location_id : str) ->str: + +def _location_from_id(location_id: str) -> str: # TODO create a map to sync _location_from_id and _location_from_str loc_str = "undefined" if location_id == "0": @@ -51,7 +51,8 @@ def _location_from_id(location_id : str) ->str: return loc_str -def _location_from_str(location : str) ->str: + +def _location_from_str(location: str) -> str: intstr = "undefined" if location == SIMCORE_S3_STR: intstr = "0" @@ -60,10 +61,12 @@ def _location_from_str(location : str) ->str: return intstr + @attr.s(auto_attribs=True) class DatasetMetaData: - dataset_id: str="" - display_name: str="" + dataset_id: str = "" + display_name: str = "" + class FileMetaData: """ This is a proposal, probably no everything is needed. @@ -110,7 +113,8 @@ class FileMetaData: state: on of OK, UPLOADING, DELETED """ - #pylint: disable=attribute-defined-outside-init + + # pylint: disable=attribute-defined-outside-init def simcore_from_uuid(self, file_uuid: str, bucket_name: str): parts = file_uuid.split("/") if len(parts) == 3: @@ -124,31 +128,35 @@ def simcore_from_uuid(self, file_uuid: str, bucket_name: str): self.file_uuid = file_uuid self.file_id = file_uuid self.raw_file_path = self.file_uuid - self.display_file_path = str(Path("not") / Path("yet") / Path("implemented")) + self.display_file_path = str( + Path("not") / Path("yet") / Path("implemented") + ) self.created_at = str(datetime.datetime.now()) self.last_modified = self.created_at self.file_size = -1 def __str__(self): d = attr.asdict(self) - _str ="" + _str = "" for _d in d: _str += " {0: <25}: {1}\n".format(_d, str(d[_d])) return _str attr.s( - these={c.name:attr.ib(default=None) for c in file_meta_data.c}, + these={c.name: attr.ib(default=None) for c in file_meta_data.c}, init=True, - kw_only=True)(FileMetaData) + kw_only=True, +)(FileMetaData) @attr.s(auto_attribs=True) -class FileMetaDataEx(): +class FileMetaDataEx: """Extend the base type by some additional attributes that shall not end up in the db """ + fmd: FileMetaData - parent_id: str="" + parent_id: str = "" def __str__(self): _str = str(self.fmd) @@ -164,5 +172,5 @@ def __str__(self): "FileMetaDataEx", "projects", "users", - "user_to_projects" + "user_to_projects", ] diff --git a/services/storage/src/simcore_service_storage/resources.py b/services/storage/src/simcore_service_storage/resources.py index 4d249f1ffec..a7a22e67d4e 100644 --- a/services/storage/src/simcore_service_storage/resources.py +++ b/services/storage/src/simcore_service_storage/resources.py @@ -12,7 +12,4 @@ ) -__all__ = ( - 'resources', - 'RSC_CONFIG_DIR_KEY' -) +__all__ = ("resources", "RSC_CONFIG_DIR_KEY") diff --git a/services/storage/src/simcore_service_storage/rest.py b/services/storage/src/simcore_service_storage/rest.py index 1efae24187e..05e3a989b05 100644 --- a/services/storage/src/simcore_service_storage/rest.py +++ b/services/storage/src/simcore_service_storage/rest.py @@ -32,7 +32,7 @@ def setup(app: web.Application): """ log.debug("Setting up %s ...", __name__) - spec_path = resources.get_path('api/v0/openapi.yaml') + spec_path = resources.get_path("api/v0/openapi.yaml") with spec_path.open() as fh: spec_dict = yaml.safe_load(fh) api_specs = openapi_core.create_spec(spec_dict, spec_path.as_uri()) @@ -44,7 +44,7 @@ def setup(app: web.Application): routes = rest_routes.create(api_specs) app.router.add_routes(routes) - log.debug("routes:\n\t%s", "\n\t".join(map(str, routes)) ) + log.debug("routes:\n\t%s", "\n\t".join(map(str, routes))) # Enable error, validation and envelop middleware on API routes base_path = get_base_path(api_specs) @@ -54,6 +54,4 @@ def setup(app: web.Application): # alias setup_rest = setup -__all__ = ( - 'setup_rest' -) +__all__ = "setup_rest" diff --git a/services/storage/src/simcore_service_storage/rest_config.py b/services/storage/src/simcore_service_storage/rest_config.py index d835198d6c6..c05e30c90ea 100644 --- a/services/storage/src/simcore_service_storage/rest_config.py +++ b/services/storage/src/simcore_service_storage/rest_config.py @@ -8,12 +8,8 @@ from .settings import APP_OPENAPI_SPECS_KEY -CONFIG_SECTION_NAME: str = 'rest' +CONFIG_SECTION_NAME: str = "rest" schema: T.Dict = minimal_addon_schema() -__all__ = ( - 'APP_OPENAPI_SPECS_KEY', - 'CONFIG_SECTION_NAME', - 'schema' -) +__all__ = ("APP_OPENAPI_SPECS_KEY", "CONFIG_SECTION_NAME", "schema") diff --git a/services/storage/src/simcore_service_storage/rest_models.py b/services/storage/src/simcore_service_storage/rest_models.py index 177ef3153da..f3b2893fe6e 100644 --- a/services/storage/src/simcore_service_storage/rest_models.py +++ b/services/storage/src/simcore_service_storage/rest_models.py @@ -6,6 +6,7 @@ # NOTE: using these, optional and required fields are always transmitted! # NOTE: make some attrs nullable by default!? + class FileMetaDataSchema(Schema): filename = fields.Str() version = fields.Str() @@ -14,5 +15,4 @@ class FileMetaDataSchema(Schema): storage_location = fields.Str() - # TODO: fix __all__ diff --git a/services/storage/src/simcore_service_storage/rest_routes.py b/services/storage/src/simcore_service_storage/rest_routes.py index 2cb95fb11ad..69f80b59595 100644 --- a/services/storage/src/simcore_service_storage/rest_routes.py +++ b/services/storage/src/simcore_service_storage/rest_routes.py @@ -18,7 +18,7 @@ def create(specs: OpenApiSpec) -> List[web.RouteDef]: # TODO: consider the case in which server creates routes for both v0 and v1!!! # TODO: should this be taken from servers instead? - BASEPATH = '/v' + specs.info.version.split('.')[0] + BASEPATH = "/v" + specs.info.version.split(".")[0] log.debug("creating %s ", __name__) routes = [] @@ -27,57 +27,66 @@ def create(specs: OpenApiSpec) -> List[web.RouteDef]: # routes = auto_routing(specs, handlers) # diagnostics -- - path, handle = '/', handlers.check_health - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(BASEPATH+path, handle, name=operation_id) ) - - path, handle = '/check/{action}', handlers.check_action - operation_id = specs.paths[path].operations['post'].operation_id - routes.append( web.post(BASEPATH+path, handle, name=operation_id) ) - - path, handle = '/locations', handlers.get_storage_locations - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(BASEPATH+path, handle, name=operation_id) ) - - path, handle = '/locations/{location_id}/files/metadata', handlers.get_files_metadata - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(BASEPATH+path, handle, name=operation_id) ) - - path, handle = '/locations/{location_id}/datasets', handlers.get_datasets_metadata - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(BASEPATH+path, handle, name=operation_id) ) - - path, handle = '/locations/{location_id}/files/{fileId}/metadata', handlers.get_file_metadata - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(BASEPATH+path, handle, name=operation_id) ) - - path, handle = '/locations/{location_id}/datasets/{dataset_id}/metadata', handlers.get_files_metadata_dataset - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(BASEPATH+path, handle, name=operation_id) ) + path, handle = "/", handlers.check_health + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) + + path, handle = "/check/{action}", handlers.check_action + operation_id = specs.paths[path].operations["post"].operation_id + routes.append(web.post(BASEPATH + path, handle, name=operation_id)) + + path, handle = "/locations", handlers.get_storage_locations + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) + + path, handle = ( + "/locations/{location_id}/files/metadata", + handlers.get_files_metadata, + ) + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) + + path, handle = "/locations/{location_id}/datasets", handlers.get_datasets_metadata + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) + + path, handle = ( + "/locations/{location_id}/files/{fileId}/metadata", + handlers.get_file_metadata, + ) + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) + + path, handle = ( + "/locations/{location_id}/datasets/{dataset_id}/metadata", + handlers.get_files_metadata_dataset, + ) + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) # TODO: Implements update # path, handle = '/{location_id}/files/{fileId}/metadata', handlers.update_file_metadata # operation_id = specs.paths[path].operations['patch'].operation_id # routes.append( web.patch(BASEPATH+path, handle, name=operation_id) ) - path, handle = '/locations/{location_id}/files/{fileId}', handlers.download_file - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(BASEPATH+path, handle, name=operation_id) ) + path, handle = "/locations/{location_id}/files/{fileId}", handlers.download_file + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) - path, handle = '/locations/{location_id}/files/{fileId}', handlers.delete_file - operation_id = specs.paths[path].operations['delete'].operation_id - routes.append( web.delete(BASEPATH+path, handle, name=operation_id) ) + path, handle = "/locations/{location_id}/files/{fileId}", handlers.delete_file + operation_id = specs.paths[path].operations["delete"].operation_id + routes.append(web.delete(BASEPATH + path, handle, name=operation_id)) - path, handle = '/locations/{location_id}/files/{fileId}', handlers.upload_file - operation_id = specs.paths[path].operations['put'].operation_id - routes.append( web.put(BASEPATH+path, handle, name=operation_id) ) + path, handle = "/locations/{location_id}/files/{fileId}", handlers.upload_file + operation_id = specs.paths[path].operations["put"].operation_id + routes.append(web.put(BASEPATH + path, handle, name=operation_id)) - path, handle = '/simcore-s3/folders', handlers.create_folders_from_project - operation_id = specs.paths[path].operations['post'].operation_id - routes.append( web.post(BASEPATH+path, handle, name=operation_id) ) + path, handle = "/simcore-s3/folders", handlers.create_folders_from_project + operation_id = specs.paths[path].operations["post"].operation_id + routes.append(web.post(BASEPATH + path, handle, name=operation_id)) - path, handle = '/simcore-s3/folders/{folder_id}', handlers.delete_folders_of_project - operation_id = specs.paths[path].operations['delete'].operation_id - routes.append( web.delete(BASEPATH+path, handle, name=operation_id) ) + path, handle = "/simcore-s3/folders/{folder_id}", handlers.delete_folders_of_project + operation_id = specs.paths[path].operations["delete"].operation_id + routes.append(web.delete(BASEPATH + path, handle, name=operation_id)) return routes diff --git a/services/storage/src/simcore_service_storage/s3.py b/services/storage/src/simcore_service_storage/s3.py index 0042b7b5426..f54bc7d5a81 100644 --- a/services/storage/src/simcore_service_storage/s3.py +++ b/services/storage/src/simcore_service_storage/s3.py @@ -15,7 +15,8 @@ log = logging.getLogger(__name__) -_SERVICE_NAME = 's3' +_SERVICE_NAME = "s3" + async def _setup_s3_bucket(app): log.debug("setup %s.setup.cleanup_ctx", __name__) @@ -24,10 +25,12 @@ async def _setup_s3_bucket(app): s3_client = app[APP_S3_KEY] cfg = app[APP_CONFIG_KEY] - @retry(wait=wait_fixed(RETRY_WAIT_SECS), + @retry( + wait=wait_fixed(RETRY_WAIT_SECS), stop=stop_after_attempt(RETRY_COUNT), before_sleep=before_sleep_log(log, logging.WARNING), - reraise=True) + reraise=True, + ) async def do_create_bucket(): s3_cfg = cfg[_SERVICE_NAME] s3_bucket = s3_cfg["bucket_name"] @@ -36,7 +39,7 @@ async def do_create_bucket(): try: await do_create_bucket() - except Exception: #pylint: disable=broad-except + except Exception: # pylint: disable=broad-except log.exception("Impossible to create s3 bucket. Stoping") # ok, failures_count = False, 0 @@ -60,7 +63,7 @@ def setup(app: web.Application): """ minio/s3 service setup""" log.debug("Setting up %s ...", __name__) - disable_services = app[APP_CONFIG_KEY].get("main", {}).get("disable_services",[]) + disable_services = app[APP_CONFIG_KEY].get("main", {}).get("disable_services", []) if _SERVICE_NAME in disable_services: log.warning("Service '%s' explicitly disabled in config", _SERVICE_NAME) @@ -73,11 +76,14 @@ def setup(app: web.Application): s3_secret_key = s3_cfg["secret_key"] s3_secure = s3_cfg["secure"] - s3_client = S3Client(s3_endpoint, s3_access_key, s3_secret_key, secure=s3_secure == 1) + s3_client = S3Client( + s3_endpoint, s3_access_key, s3_secret_key, secure=s3_secure == 1 + ) app[APP_S3_KEY] = s3_client app.cleanup_ctx.append(_setup_s3_bucket) + def get_config(app: web.Application) -> Dict: cfg = app[APP_CONFIG_KEY][_SERVICE_NAME] return cfg diff --git a/services/storage/src/simcore_service_storage/settings.py b/services/storage/src/simcore_service_storage/settings.py index 97b9f984947..c6c2e23f92b 100644 --- a/services/storage/src/simcore_service_storage/settings.py +++ b/services/storage/src/simcore_service_storage/settings.py @@ -21,7 +21,7 @@ # IMPORTANT: lowest level module # I order to avoid cyclic dependences, please # DO NOT IMPORT ANYTHING from . (except for __version__) -from .__version__ import get_version_object +from .__version__ import version log = logging.getLogger(__name__) @@ -32,40 +32,42 @@ CONNECT_TIMEOUT_SECS = 30 ## VERSION----------------------------- -service_version = get_version_object() +service_version = version ## CONFIGURATION FILES------------------ -DEFAULT_CONFIG='docker-prod-config.yaml' +DEFAULT_CONFIG = "docker-prod-config.yaml" -APP_CONFIG_KEY = application_keys.APP_CONFIG_KEY # app-storage-key for config object -RSC_CONFIG_DIR_KEY = "data" # resource folder +APP_CONFIG_KEY = application_keys.APP_CONFIG_KEY # app-storage-key for config object +RSC_CONFIG_DIR_KEY = "data" # resource folder # DSM specific constants -SIMCORE_S3_ID = 0 -SIMCORE_S3_STR = "simcore.s3" +SIMCORE_S3_ID = 0 +SIMCORE_S3_STR = "simcore.s3" -DATCORE_ID = 1 -DATCORE_STR = "datcore" +DATCORE_ID = 1 +DATCORE_STR = "datcore" # RSC=resource -RSC_CONFIG_DIR_KEY = "data" +RSC_CONFIG_DIR_KEY = "data" RSC_CONFIG_SCHEMA_KEY = RSC_CONFIG_DIR_KEY + "/config-schema-v1.json" # REST API ---------------------------- -API_MAJOR_VERSION = service_version.major # NOTE: syncs with service key +API_MAJOR_VERSION = service_version.major # NOTE: syncs with service key API_VERSION_TAG = "v{:.0f}".format(API_MAJOR_VERSION) -APP_OPENAPI_SPECS_KEY = application_keys.APP_OPENAPI_SPECS_KEY # app-storage-key for openapi specs object +APP_OPENAPI_SPECS_KEY = ( + application_keys.APP_OPENAPI_SPECS_KEY +) # app-storage-key for openapi specs object # DATABASE ---------------------------- -APP_DB_ENGINE_KEY = __name__ + '.db_engine' +APP_DB_ENGINE_KEY = __name__ + ".db_engine" # DATA STORAGE MANAGER ---------------------------------- -APP_DSM_THREADPOOL = __name__ + '.dsm_threadpool' +APP_DSM_THREADPOOL = __name__ + ".dsm_threadpool" APP_DSM_KEY = __name__ + ".DSM" APP_S3_KEY = __name__ + ".S3_CLIENT" diff --git a/services/storage/src/simcore_service_storage/utils.py b/services/storage/src/simcore_service_storage/utils.py index eddd27711b4..4e7bc86a56c 100644 --- a/services/storage/src/simcore_service_storage/utils.py +++ b/services/storage/src/simcore_service_storage/utils.py @@ -11,12 +11,15 @@ RETRY_COUNT = 20 CONNECT_TIMEOUT_SECS = 30 + @tenacity.retry( wait=tenacity.wait_fixed(RETRY_WAIT_SECS), stop=tenacity.stop_after_attempt(RETRY_COUNT), - before_sleep=tenacity.before_sleep_log(logger, logging.INFO) - ) -async def assert_enpoint_is_ok(session: ClientSession, url: URL, expected_response:int =200): + before_sleep=tenacity.before_sleep_log(logger, logging.INFO), +) +async def assert_enpoint_is_ok( + session: ClientSession, url: URL, expected_response: int = 200 +): """ Tenace check to GET given url endpoint Typically used to check connectivity to a given service @@ -33,5 +36,6 @@ async def assert_enpoint_is_ok(session: ClientSession, url: URL, expected_respon if resp.status != expected_response: raise AssertionError(f"{resp.status} != {expected_response}") + def is_url(location): return bool(URL(str(location)).host) diff --git a/services/storage/tests/_test_rawdatcore.py b/services/storage/tests/_test_rawdatcore.py index 9160fc11b5e..a3ea625bf5e 100644 --- a/services/storage/tests/_test_rawdatcore.py +++ b/services/storage/tests/_test_rawdatcore.py @@ -22,19 +22,19 @@ fd, path = tempfile.mkstemp() try: - with os.fdopen(fd, 'w') as tmp: + with os.fdopen(fd, "w") as tmp: # do stuff with temp file - tmp.write('stuff') + tmp.write("stuff") f = client.upload_file(destination, path) - f = client.delete_file(destination,Path(path).name) + f = client.delete_file(destination, Path(path).name) finally: os.remove(path) - files = [] + files = [] if True: dataset = client.get_dataset("mag") - # dataset.print_tree() + # dataset.print_tree() client.list_dataset_files_recursively(files, dataset, Path(dataset.name)) else: files = client.list_files_recursively() @@ -42,12 +42,11 @@ fd, path = tempfile.mkstemp() try: - with os.fdopen(fd, 'w') as tmp: + with os.fdopen(fd, "w") as tmp: # do stuff with temp file - tmp.write('stuff') + tmp.write("stuff") - - print(fd,path) + print(fd, path) destination_path = Path("mag/level1/level2/bla.txt") parts = destination_path.parts assert len(parts) > 1 @@ -72,7 +71,7 @@ def _get_collection_id(folder, _collections, collection_id): return _get_collection_id(folder, _collections, collection_id) my_id = "" - my_id =_get_collection_id(destination, collections, my_id) + my_id = _get_collection_id(destination, collections, my_id) package = client.client.get(my_id) client.upload_file(package, path) print(my_id) diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index d2082496641..84dc5d0f92e 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -24,54 +24,55 @@ from simcore_service_storage.dsm import DataStorageManager, DatCoreApiToken from simcore_service_storage.models import FileMetaData from simcore_service_storage.settings import SIMCORE_S3_STR -from utils import (ACCESS_KEY, BUCKET_NAME, DATABASE, PASS, SECRET_KEY, USER, - USER_ID) +from utils import ACCESS_KEY, BUCKET_NAME, DATABASE, PASS, SECRET_KEY, USER, USER_ID current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -sys.path.append(str(current_dir / 'helpers')) +sys.path.append(str(current_dir / "helpers")) - -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def here(): return current_dir -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def package_dir(here): dirpath = Path(simcore_service_storage.__file__).parent assert dirpath.exists() return dirpath -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def osparc_simcore_root_dir(here): root_dir = here.parent.parent.parent - assert root_dir.exists() and any(root_dir.glob("services")), "Is this service within osparc-simcore repo?" + assert root_dir.exists() and any( + root_dir.glob("services") + ), "Is this service within osparc-simcore repo?" return root_dir -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def osparc_api_specs_dir(osparc_simcore_root_dir): dirpath = osparc_simcore_root_dir / "api" / "specs" assert dirpath.exists() return dirpath -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def docker_compose_file(here): """ Overrides pytest-docker fixture """ old = os.environ.copy() # docker-compose reads these environs - os.environ['POSTGRES_DB'] = DATABASE - os.environ['POSTGRES_USER'] = USER - os.environ['POSTGRES_PASSWORD'] = PASS - os.environ['POSTGRES_ENDPOINT'] = "FOO" # TODO: update config schema!! - os.environ['MINIO_ACCESS_KEY'] = ACCESS_KEY - os.environ['MINIO_SECRET_KEY'] = SECRET_KEY + os.environ["POSTGRES_DB"] = DATABASE + os.environ["POSTGRES_USER"] = USER + os.environ["POSTGRES_PASSWORD"] = PASS + os.environ["POSTGRES_ENDPOINT"] = "FOO" # TODO: update config schema!! + os.environ["MINIO_ACCESS_KEY"] = ACCESS_KEY + os.environ["MINIO_SECRET_KEY"] = SECRET_KEY - dc_path = here / 'docker-compose.yml' + dc_path = here / "docker-compose.yml" assert dc_path.exists() yield str(dc_path) @@ -79,50 +80,48 @@ def docker_compose_file(here): os.environ = old -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def postgres_service(docker_services, docker_ip): - url = 'postgresql://{user}:{password}@{host}:{port}/{database}'.format( + url = "postgresql://{user}:{password}@{host}:{port}/{database}".format( user=USER, password=PASS, database=DATABASE, host=docker_ip, - port=docker_services.port_for('postgres', 5432), + port=docker_services.port_for("postgres", 5432), ) # Wait until service is responsive. docker_services.wait_until_responsive( - check=lambda: utils.is_postgres_responsive(url), - timeout=30.0, - pause=0.1, + check=lambda: utils.is_postgres_responsive(url), timeout=30.0, pause=0.1, ) postgres_service = { - 'user': USER, - 'password': PASS, - 'database': DATABASE, - 'host': docker_ip, - 'port': docker_services.port_for('postgres', 5432), - 'minsize':1, - 'maxsize':4 + "user": USER, + "password": PASS, + "database": DATABASE, + "host": docker_ip, + "port": docker_services.port_for("postgres", 5432), + "minsize": 1, + "maxsize": 4, } return postgres_service -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def postgres_service_url(postgres_service, docker_services, docker_ip): - postgres_service_url = 'postgresql://{user}:{password}@{host}:{port}/{database}'.format( + postgres_service_url = "postgresql://{user}:{password}@{host}:{port}/{database}".format( user=USER, password=PASS, database=DATABASE, host=docker_ip, - port=docker_services.port_for('postgres', 5432), + port=docker_services.port_for("postgres", 5432), ) return postgres_service_url -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") async def postgres_engine(loop, postgres_service_url): postgres_engine = await create_engine(postgres_service_url) @@ -133,28 +132,25 @@ async def postgres_engine(loop, postgres_service_url): await postgres_engine.wait_closed() -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def minio_service(docker_services, docker_ip): - # Build URL to service listening on random port. - url = 'http://%s:%d/' % ( - docker_ip, - docker_services.port_for('minio', 9000), - ) + # Build URL to service listening on random port. + url = "http://%s:%d/" % (docker_ip, docker_services.port_for("minio", 9000),) # Wait until service is responsive. docker_services.wait_until_responsive( - check=lambda: utils.is_responsive(url, 403), - timeout=30.0, - pause=0.1, + check=lambda: utils.is_responsive(url, 403), timeout=30.0, pause=0.1, ) return { - 'endpoint': '{ip}:{port}'.format(ip=docker_ip, port=docker_services.port_for('minio', 9000)), - 'access_key': ACCESS_KEY, - 'secret_key': SECRET_KEY, - 'bucket_name': BUCKET_NAME, - 'secure' : 0 + "endpoint": "{ip}:{port}".format( + ip=docker_ip, port=docker_services.port_for("minio", 9000) + ), + "access_key": ACCESS_KEY, + "secret_key": SECRET_KEY, + "bucket_name": BUCKET_NAME, + "secure": 0, } @@ -163,7 +159,10 @@ def s3_client(minio_service): from s3wrapper.s3_client import S3Client s3_client = S3Client( - endpoint=minio_service['endpoint'], access_key=minio_service["access_key"], secret_key=minio_service["secret_key"]) + endpoint=minio_service["endpoint"], + access_key=minio_service["access_key"], + secret_key=minio_service["secret_key"], + ) return s3_client @@ -174,14 +173,17 @@ def _create_files(count): for _i in range(count): name = str(uuid.uuid4()) filepath = os.path.normpath( - str(tmpdir_factory.mktemp('data').join(name + ".txt"))) - with open(filepath, 'w') as fout: + str(tmpdir_factory.mktemp("data").join(name + ".txt")) + ) + with open(filepath, "w") as fout: fout.write("Hello world\n") filepaths.append(filepath) return filepaths + return _create_files + @pytest.fixture(scope="function") def dsm_mockup_complete_db(postgres_service_url, s3_client) -> Tuple[str, str]: utils.create_full_tables(url=postgres_service_url) @@ -190,21 +192,21 @@ def dsm_mockup_complete_db(postgres_service_url, s3_client) -> Tuple[str, str]: file_1 = { "project_id": "161b8782-b13e-5840-9ae2-e2250c231001", "node_id": "ad9bda7f-1dc5-5480-ab22-5fef4fc53eac", - "filename": "outputController.dat" - } - f = utils.data_dir() /Path("outputController.dat") + "filename": "outputController.dat", + } + f = utils.data_dir() / Path("outputController.dat") object_name = "{project_id}/{node_id}/{filename}".format(**file_1) s3_client.upload_file(bucket_name, object_name, f) file_2 = { "project_id": "161b8782-b13e-5840-9ae2-e2250c231001", "node_id": "a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8", - "filename": "notebooks.zip" - } - f = utils.data_dir() /Path("notebooks.zip") + "filename": "notebooks.zip", + } + f = utils.data_dir() / Path("notebooks.zip") object_name = "{project_id}/{node_id}/{filename}".format(**file_2) s3_client.upload_file(bucket_name, object_name, f) - yield (file_1,file_2) + yield (file_1, file_2) utils.drop_all_tables(url=postgres_service_url) @@ -218,13 +220,20 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory): s3_client.create_bucket(bucket_name, delete_contents_if_exists=True) # TODO: use pip install Faker - users = ['alice', 'bob', 'chuck', 'dennis'] - - projects = ['astronomy', 'biology', 'chemistry', - 'dermatology', 'economics', 'futurology', 'geology'] + users = ["alice", "bob", "chuck", "dennis"] + + projects = [ + "astronomy", + "biology", + "chemistry", + "dermatology", + "economics", + "futurology", + "geology", + ] location = SIMCORE_S3_STR - nodes = ['alpha', 'beta', 'gamma', 'delta'] + nodes = ["alpha", "beta", "gamma", "delta"] N = 100 files = mock_files_factory(count=N) @@ -241,43 +250,41 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory): node = nodes[idx] node_id = idx + 10000 file_name = str(counter) - object_name = Path(str(project_id), str( - node_id), str(counter)).as_posix() + object_name = Path(str(project_id), str(node_id), str(counter)).as_posix() file_uuid = Path(object_name).as_posix() raw_file_path = file_uuid - display_file_path = str(Path(project_name)/Path(node)/Path(file_name)) + display_file_path = str(Path(project_name) / Path(node) / Path(file_name)) created_at = str(datetime.datetime.now()) file_size = 1234 assert s3_client.upload_file(bucket_name, object_name, _file) - d = {'file_uuid': file_uuid, - 'location_id': "0", - 'location': location, - 'bucket_name': bucket_name, - 'object_name': object_name, - 'project_id': str(project_id), - 'project_name': project_name, - 'node_id': str(node_id), - 'node_name': node, - 'file_name': file_name, - 'user_id': str(user_id), - 'user_name': user_name, - "file_id": str(uuid.uuid4()), - "raw_file_path": file_uuid, - "display_file_path": display_file_path, - "created_at": created_at, - "last_modified": created_at, - "file_size": file_size, - } + d = { + "file_uuid": file_uuid, + "location_id": "0", + "location": location, + "bucket_name": bucket_name, + "object_name": object_name, + "project_id": str(project_id), + "project_name": project_name, + "node_id": str(node_id), + "node_name": node, + "file_name": file_name, + "user_id": str(user_id), + "user_name": user_name, + "file_id": str(uuid.uuid4()), + "raw_file_path": file_uuid, + "display_file_path": display_file_path, + "created_at": created_at, + "last_modified": created_at, + "file_size": file_size, + } counter = counter + 1 data[object_name] = FileMetaData(**d) # pylint: disable=no-member - utils.insert_metadata(postgres_service_url, - data[object_name]) - + utils.insert_metadata(postgres_service_url, data[object_name]) total_count = 0 for _obj in s3_client.list_objects_v2(bucket_name, recursive=True): @@ -320,7 +327,8 @@ async def datcore_testbucket(loop, mock_files_factory): def dsm_fixture(s3_client, postgres_engine, loop): pool = ThreadPoolExecutor(3) dsm_fixture = DataStorageManager( - s3_client, postgres_engine, loop, pool, BUCKET_NAME, False) + s3_client, postgres_engine, loop, pool, BUCKET_NAME, False + ) api_token = os.environ.get("BF_API_KEY", "none") api_secret = os.environ.get("BF_API_SECRET", "none") @@ -348,17 +356,27 @@ async def datcore_structured_testbucket(loop, mock_files_factory): # create first level folder collection_id1 = await dcw.create_collection(dataset_id, "level1") # upload second file - file_id2 = await dcw.upload_file_to_id(collection_id1, os.path.normpath(tmp_files[1])) + file_id2 = await dcw.upload_file_to_id( + collection_id1, os.path.normpath(tmp_files[1]) + ) # create 3rd level folder collection_id2 = await dcw.create_collection(collection_id1, "level2") - file_id3 = await dcw.upload_file_to_id(collection_id2, os.path.normpath(tmp_files[2])) - - yield { 'dataset_id' : dataset_id, - 'coll1_id' : collection_id1, 'coll2_id' : collection_id2, - 'file_id1' : file_id1, 'filename1' : tmp_files[0], - 'file_id2' : file_id2, 'filename2' : tmp_files[1], - 'file_id3' : file_id3, 'filename3' : tmp_files[2], - 'dcw' : dcw } + file_id3 = await dcw.upload_file_to_id( + collection_id2, os.path.normpath(tmp_files[2]) + ) + + yield { + "dataset_id": dataset_id, + "coll1_id": collection_id1, + "coll2_id": collection_id2, + "file_id1": file_id1, + "filename1": tmp_files[0], + "file_id2": file_id2, + "filename2": tmp_files[1], + "file_id3": file_id3, + "filename3": tmp_files[2], + "dcw": dcw, + } await dcw.delete_test_dataset(BUCKET_NAME) diff --git a/services/storage/tests/helpers/utils_assert.py b/services/storage/tests/helpers/utils_assert.py index 0e4f2e6ac23..b33aca63138 100644 --- a/services/storage/tests/helpers/utils_assert.py +++ b/services/storage/tests/helpers/utils_assert.py @@ -5,10 +5,13 @@ from servicelib.rest_responses import unwrap_envelope -async def assert_status(response: web.Response, expected_cls:web.HTTPException, expected_msg: str=None): +async def assert_status( + response: web.Response, expected_cls: web.HTTPException, expected_msg: str = None +): data, error = unwrap_envelope(await response.json()) - assert response.status == expected_cls.status_code, \ - f"got {response.status}, expected {expected_cls.status_code}:\n data:{data},\n error:{error}" + assert ( + response.status == expected_cls.status_code + ), f"got {response.status}, expected {expected_cls.status_code}:\n data:{data},\n error:{error}" if issubclass(expected_cls, web.HTTPError): do_assert_error(data, error, expected_cls, expected_msg) @@ -25,20 +28,26 @@ async def assert_status(response: web.Response, expected_cls:web.HTTPException, return data, error -async def assert_error(response: web.Response, expected_cls:web.HTTPException, expected_msg: str=None): + +async def assert_error( + response: web.Response, expected_cls: web.HTTPException, expected_msg: str = None +): data, error = unwrap_envelope(await response.json()) return do_assert_error(data, error, expected_cls, expected_msg) -def do_assert_error(data, error, expected_cls:web.HTTPException, expected_msg: str=None): + +def do_assert_error( + data, error, expected_cls: web.HTTPException, expected_msg: str = None +): assert not data, pformat(data) assert error, pformat(error) # TODO: improve error messages - assert len(error['errors']) == 1 + assert len(error["errors"]) == 1 - err = error['errors'][0] + err = error["errors"][0] if expected_msg: - assert expected_msg in err['message'] - assert expected_cls.__name__ == err['code'] + assert expected_msg in err["message"] + assert expected_cls.__name__ == err["code"] return data, error diff --git a/services/storage/tests/helpers/utils_project.py b/services/storage/tests/helpers/utils_project.py index b8d59f5ba76..11d70f508fe 100644 --- a/services/storage/tests/helpers/utils_project.py +++ b/services/storage/tests/helpers/utils_project.py @@ -8,15 +8,15 @@ def clone_project_data(project: Dict) -> Tuple[Dict, Dict]: # Update project id # NOTE: this can be re-assigned by dbapi if not unique - project_copy_uuid = uuidlib.uuid1() # random project id - project_copy['uuid'] = str(project_copy_uuid) + project_copy_uuid = uuidlib.uuid1() # random project id + project_copy["uuid"] = str(project_copy_uuid) # Workbench nodes shall be unique within the project context def _create_new_node_uuid(old_uuid): - return str( uuidlib.uuid5(project_copy_uuid, str(old_uuid)) ) + return str(uuidlib.uuid5(project_copy_uuid, str(old_uuid))) nodes_map = {} - for node_uuid in project.get('workbench', {}).keys(): + for node_uuid in project.get("workbench", {}).keys(): nodes_map[node_uuid] = _create_new_node_uuid(node_uuid) def _replace_uuids(node): @@ -34,5 +34,5 @@ def _replace_uuids(node): node[key] = _replace_uuids(value) return node - project_copy['workbench'] = _replace_uuids(project_copy.get('workbench', {})) + project_copy["workbench"] = _replace_uuids(project_copy.get("workbench", {})) return project_copy, nodes_map diff --git a/services/storage/tests/test_configs.py b/services/storage/tests/test_configs.py index e332aadc953..cc1b1caa200 100644 --- a/services/storage/tests/test_configs.py +++ b/services/storage/tests/test_configs.py @@ -14,8 +14,9 @@ from simcore_service_storage.cli import create_environ, parse, setup_parser from simcore_service_storage.resources import resources -THIS_SERVICE = 'storage' -CONFIG_DIR = 'data' +THIS_SERVICE = "storage" +CONFIG_DIR = "data" + @pytest.fixture("session") def env_devel_file(osparc_simcore_root_dir): @@ -30,9 +31,10 @@ def services_docker_compose_file(osparc_simcore_root_dir): assert dcpath.exists() return dcpath + @pytest.fixture("session") def devel_environ(env_devel_file): - PATTERN_ENVIRON_EQUAL= re.compile(r"^(\w+)=(.*)$") + PATTERN_ENVIRON_EQUAL = re.compile(r"^(\w+)=(.*)$") env_devel = {} with env_devel_file.open() as f: for line in f: @@ -44,7 +46,9 @@ def devel_environ(env_devel_file): @pytest.fixture("session") -def container_environ(services_docker_compose_file, devel_environ, osparc_simcore_root_dir): +def container_environ( + services_docker_compose_file, devel_environ, osparc_simcore_root_dir +): """ Creates a dict with the environment variables inside of a webserver container """ @@ -53,12 +57,12 @@ def container_environ(services_docker_compose_file, devel_environ, osparc_simcor dc = yaml.safe_load(f) container_environ = create_environ(skip_system_environ=True) - container_environ.update({ - 'OSPARC_SIMCORE_REPO_ROOTDIR':str(osparc_simcore_root_dir) - }) + container_environ.update( + {"OSPARC_SIMCORE_REPO_ROOTDIR": str(osparc_simcore_root_dir)} + ) environ_items = dc["services"][THIS_SERVICE].get("environment", list()) - MATCH = re.compile(r'\$\{(\w+)+') + MATCH = re.compile(r"\$\{(\w+)+") for item in environ_items: key, value = item.split("=") @@ -72,13 +76,14 @@ def container_environ(services_docker_compose_file, devel_environ, osparc_simcor return container_environ -@pytest.mark.parametrize("configfile", [str(n) - for n in resources.listdir(CONFIG_DIR) if n.endswith(("yaml", "yml")) - ]) +@pytest.mark.parametrize( + "configfile", + [str(n) for n in resources.listdir(CONFIG_DIR) if n.endswith(("yaml", "yml"))], +) def test_config_files(configfile, container_environ, capsys): parser = setup_parser(argparse.ArgumentParser("test-parser")) - with mock.patch('os.environ', container_environ): + with mock.patch("os.environ", container_environ): cmd = ["-c", configfile] try: config = parse(cmd, parser) @@ -86,6 +91,7 @@ def test_config_files(configfile, container_environ, capsys): except SystemExit as err: pytest.fail(capsys.readouterr().err) - for key, value in config.items(): - assert value!='None', "Use instead Null in {} for {}".format(configfile, key) + assert value != "None", "Use instead Null in {} for {}".format( + configfile, key + ) diff --git a/services/storage/tests/test_datcore.py b/services/storage/tests/test_datcore.py index 37a06106e73..90bcac0c3ca 100644 --- a/services/storage/tests/test_datcore.py +++ b/services/storage/tests/test_datcore.py @@ -24,6 +24,7 @@ async def test_datcore_ping(loop): responsive = await dcw.ping() assert responsive + async def test_datcore_list_files_recursively(loop): if not utils.has_datcore_tokens(): return @@ -35,6 +36,7 @@ async def test_datcore_list_files_recursively(loop): f = await dcw.list_files_recursively() assert len(f) + async def test_datcore_list_files_raw(loop): if not utils.has_datcore_tokens(): return @@ -47,7 +49,6 @@ async def test_datcore_list_files_raw(loop): assert len(f) - async def test_datcore_nested_download_link(loop): if not utils.has_datcore_tokens(): return @@ -60,4 +61,4 @@ async def test_datcore_nested_download_link(loop): filename = "initial_WTstates.txt" f = await dcw.download_link(destination, filename) - assert(f) + assert f diff --git a/services/storage/tests/test_dsm.py b/services/storage/tests/test_dsm.py index deade321d32..5994f81ffbf 100644 --- a/services/storage/tests/test_dsm.py +++ b/services/storage/tests/test_dsm.py @@ -20,13 +20,13 @@ import utils from simcore_service_storage.models import FileMetaData -from simcore_service_storage.settings import (DATCORE_STR, SIMCORE_S3_ID, - SIMCORE_S3_STR) +from simcore_service_storage.settings import DATCORE_STR, SIMCORE_S3_ID, SIMCORE_S3_STR from utils import BUCKET_NAME, USER_ID, has_datcore_tokens def test_mockup(dsm_mockup_db): - assert len(dsm_mockup_db)==100 + assert len(dsm_mockup_db) == 100 + # Too many branches (13/12) (too-many-branches) # pylint: disable=R0912 @@ -55,7 +55,7 @@ async def test_dsm_s3(dsm_mockup_db, dsm_fixture): data_as_dict.append(attr.asdict(d)) if write_data: - with open("example.json", 'w') as _f: + with open("example.json", "w") as _f: json.dump(data_as_dict, _f) # Get files from bob from the project biology @@ -66,27 +66,38 @@ async def test_dsm_s3(dsm_mockup_db, dsm_fixture): break assert not bob_id == 0 - data = await dsm.list_files(user_id=bob_id, location=SIMCORE_S3_STR, regex="biology") - data1 = await dsm.list_files(user_id=bob_id, location=SIMCORE_S3_STR, regex="astronomy") + data = await dsm.list_files( + user_id=bob_id, location=SIMCORE_S3_STR, regex="biology" + ) + data1 = await dsm.list_files( + user_id=bob_id, location=SIMCORE_S3_STR, regex="astronomy" + ) data = data + data1 bobs_biostromy_files = [] for d in dsm_mockup_db.keys(): md = dsm_mockup_db[d] - if md.user_id == bob_id and (md.project_name == "biology" or md.project_name == "astronomy"): + if md.user_id == bob_id and ( + md.project_name == "biology" or md.project_name == "astronomy" + ): bobs_biostromy_files.append(md) assert len(data) == len(bobs_biostromy_files) - # among bobs bio files, filter by project/node, take first one - uuid_filter = os.path.join(bobs_biostromy_files[0].project_id, bobs_biostromy_files[0].node_id) - filtered_data = await dsm.list_files(user_id=bob_id, location=SIMCORE_S3_STR, uuid_filter=str(uuid_filter)) + uuid_filter = os.path.join( + bobs_biostromy_files[0].project_id, bobs_biostromy_files[0].node_id + ) + filtered_data = await dsm.list_files( + user_id=bob_id, location=SIMCORE_S3_STR, uuid_filter=str(uuid_filter) + ) assert filtered_data[0].fmd == bobs_biostromy_files[0] for dx in data: d = dx.fmd - await dsm.delete_file(user_id=d.user_id, location=SIMCORE_S3_STR, file_uuid=d.file_uuid) + await dsm.delete_file( + user_id=d.user_id, location=SIMCORE_S3_STR, file_uuid=d.file_uuid + ) # now we should have less items new_size = 0 @@ -103,7 +114,6 @@ def _create_file_meta_for_s3(postgres_url, s3_client, tmp_file): bucket_name = BUCKET_NAME s3_client.create_bucket(bucket_name, delete_contents_if_exists=True) - # create file and upload filename = os.path.basename(tmp_file) project_id = "22" @@ -116,31 +126,35 @@ def _create_file_meta_for_s3(postgres_url, s3_client, tmp_file): created_at = str(datetime.datetime.now()) file_size = 1234 - d = { 'object_name' : os.path.join(str(project_id), str(node_id), str(file_name)), - 'bucket_name' : bucket_name, - 'file_name' : filename, - 'user_id' : USER_ID, - 'user_name' : "starbucks", - 'location' : SIMCORE_S3_STR, - 'location_id' : SIMCORE_S3_ID, - 'project_id' : project_id, - 'project_name' : project_name, - 'node_id' : node_id, - 'node_name' : node_name, - 'file_uuid' : file_uuid, - 'file_id' : file_uuid, - 'raw_file_path' : file_uuid, - 'display_file_path' : display_name, - 'created_at' : created_at, - 'last_modified' : created_at, - 'file_size' : file_size - } + d = { + "object_name": os.path.join(str(project_id), str(node_id), str(file_name)), + "bucket_name": bucket_name, + "file_name": filename, + "user_id": USER_ID, + "user_name": "starbucks", + "location": SIMCORE_S3_STR, + "location_id": SIMCORE_S3_ID, + "project_id": project_id, + "project_name": project_name, + "node_id": node_id, + "node_name": node_name, + "file_uuid": file_uuid, + "file_id": file_uuid, + "raw_file_path": file_uuid, + "display_file_path": display_name, + "created_at": created_at, + "last_modified": created_at, + "file_size": file_size, + } fmd = FileMetaData(**d) return fmd -async def test_links_s3(postgres_service_url, s3_client, mock_files_factory, dsm_fixture): + +async def test_links_s3( + postgres_service_url, s3_client, mock_files_factory, dsm_fixture +): utils.create_tables(url=postgres_service_url) tmp_file = mock_files_factory(1)[0] @@ -149,9 +163,9 @@ async def test_links_s3(postgres_service_url, s3_client, mock_files_factory, dsm dsm = dsm_fixture up_url = await dsm.upload_link(fmd.user_id, fmd.file_uuid) - with io.open(tmp_file, 'rb') as fp: + with io.open(tmp_file, "rb") as fp: d = fp.read() - req = urllib.request.Request(up_url, data=d, method='PUT') + req = urllib.request.Request(up_url, data=d, method="PUT") with urllib.request.urlopen(req) as _f: pass @@ -163,7 +177,10 @@ async def test_links_s3(postgres_service_url, s3_client, mock_files_factory, dsm assert filecmp.cmp(tmp_file2, tmp_file) -async def test_copy_s3_s3(postgres_service_url, s3_client, mock_files_factory, dsm_fixture): + +async def test_copy_s3_s3( + postgres_service_url, s3_client, mock_files_factory, dsm_fixture +): utils.create_tables(url=postgres_service_url) tmp_file = mock_files_factory(1)[0] @@ -175,9 +192,9 @@ async def test_copy_s3_s3(postgres_service_url, s3_client, mock_files_factory, d # upload the file up_url = await dsm.upload_link(fmd.user_id, fmd.file_uuid) - with io.open(tmp_file, 'rb') as fp: + with io.open(tmp_file, "rb") as fp: d = fp.read() - req = urllib.request.Request(up_url, data=d, method='PUT') + req = urllib.request.Request(up_url, data=d, method="PUT") with urllib.request.urlopen(req) as _f: pass @@ -187,26 +204,38 @@ async def test_copy_s3_s3(postgres_service_url, s3_client, mock_files_factory, d from_uuid = fmd.file_uuid new_project = "zoology" to_uuid = os.path.join(new_project, fmd.node_id, fmd.file_name) - await dsm.copy_file(user_id=fmd.user_id, dest_location=SIMCORE_S3_STR, dest_uuid=to_uuid, source_location=SIMCORE_S3_STR, source_uuid=from_uuid) + await dsm.copy_file( + user_id=fmd.user_id, + dest_location=SIMCORE_S3_STR, + dest_uuid=to_uuid, + source_location=SIMCORE_S3_STR, + source_uuid=from_uuid, + ) data = await dsm.list_files(user_id=fmd.user_id, location=SIMCORE_S3_STR) assert len(data) == 2 -#NOTE: Below tests directly access the datcore platform, use with care! + +# NOTE: Below tests directly access the datcore platform, use with care! def test_datcore_fixture(datcore_structured_testbucket): if not has_datcore_tokens(): return print(datcore_structured_testbucket) -async def test_dsm_datcore(postgres_service_url, dsm_fixture, datcore_structured_testbucket): + +async def test_dsm_datcore( + postgres_service_url, dsm_fixture, datcore_structured_testbucket +): if not has_datcore_tokens(): return utils.create_tables(url=postgres_service_url) dsm = dsm_fixture user_id = "0" - data = await dsm.list_files(user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME) + data = await dsm.list_files( + user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME + ) # the fixture creates two files assert len(data) == 3 @@ -215,10 +244,19 @@ async def test_dsm_datcore(postgres_service_url, dsm_fixture, datcore_structured print("Deleting", fmd_to_delete.bucket_name, fmd_to_delete.object_name) await dsm.delete_file(user_id, DATCORE_STR, fmd_to_delete.file_id) - data = await dsm.list_files(user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME) + data = await dsm.list_files( + user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME + ) assert len(data) == 2 -async def test_dsm_s3_to_datcore(postgres_service_url, s3_client, mock_files_factory, dsm_fixture, datcore_structured_testbucket): + +async def test_dsm_s3_to_datcore( + postgres_service_url, + s3_client, + mock_files_factory, + dsm_fixture, + datcore_structured_testbucket, +): if not has_datcore_tokens(): return utils.create_tables(url=postgres_service_url) @@ -229,9 +267,9 @@ async def test_dsm_s3_to_datcore(postgres_service_url, s3_client, mock_files_fac dsm = dsm_fixture up_url = await dsm.upload_link(fmd.user_id, fmd.file_uuid) - with io.open(tmp_file, 'rb') as fp: + with io.open(tmp_file, "rb") as fp: d = fp.read() - req = urllib.request.Request(up_url, data=d, method='PUT') + req = urllib.request.Request(up_url, data=d, method="PUT") with urllib.request.urlopen(req) as _f: pass @@ -242,24 +280,41 @@ async def test_dsm_s3_to_datcore(postgres_service_url, s3_client, mock_files_fac urllib.request.urlretrieve(down_url, tmp_file2) assert filecmp.cmp(tmp_file2, tmp_file) # now we have the file locally, upload the file - await dsm.upload_file_to_datcore(user_id=user_id, local_file_path=tmp_file2, destination_id=datcore_structured_testbucket['dataset_id']) + await dsm.upload_file_to_datcore( + user_id=user_id, + local_file_path=tmp_file2, + destination_id=datcore_structured_testbucket["dataset_id"], + ) # and into a deeper strucutre - await dsm.upload_file_to_datcore(user_id=user_id, local_file_path=tmp_file2, destination_id=datcore_structured_testbucket['coll2_id']) - - data = await dsm.list_files(user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME) + await dsm.upload_file_to_datcore( + user_id=user_id, + local_file_path=tmp_file2, + destination_id=datcore_structured_testbucket["coll2_id"], + ) + + data = await dsm.list_files( + user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME + ) # there should now be 5 files assert len(data) == 5 -async def test_dsm_datcore_to_local(postgres_service_url, dsm_fixture, mock_files_factory, datcore_structured_testbucket): + +async def test_dsm_datcore_to_local( + postgres_service_url, dsm_fixture, mock_files_factory, datcore_structured_testbucket +): if not has_datcore_tokens(): return utils.create_tables(url=postgres_service_url) dsm = dsm_fixture user_id = USER_ID - data = await dsm.list_files(user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME) + data = await dsm.list_files( + user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME + ) assert len(data) - url, filename = await dsm.download_link_datcore(user_id, datcore_structured_testbucket['file_id1']) + url, filename = await dsm.download_link_datcore( + user_id, datcore_structured_testbucket["file_id1"] + ) tmp_file = mock_files_factory(1)[0] tmp_file2 = tmp_file + ".fromdatcore" @@ -268,7 +323,14 @@ async def test_dsm_datcore_to_local(postgres_service_url, dsm_fixture, mock_file assert filecmp.cmp(tmp_file2, tmp_file) -async def test_dsm_datcore_to_S3(postgres_service_url, s3_client, dsm_fixture, mock_files_factory, datcore_structured_testbucket): + +async def test_dsm_datcore_to_S3( + postgres_service_url, + s3_client, + dsm_fixture, + mock_files_factory, + datcore_structured_testbucket, +): if not has_datcore_tokens(): return utils.create_tables(url=postgres_service_url) @@ -283,19 +345,28 @@ async def test_dsm_datcore_to_S3(postgres_service_url, s3_client, dsm_fixture, m s3_data = await dsm.list_files(user_id=user_id, location=SIMCORE_S3_STR) assert len(s3_data) == 0 - dc_data = await dsm.list_files(user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME) + dc_data = await dsm.list_files( + user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME + ) assert len(dc_data) == 3 src_fmd = dc_data[0] - await dsm.copy_file(user_id=user_id, dest_location=SIMCORE_S3_STR, dest_uuid=dest_uuid, source_location=DATCORE_STR, - source_uuid=datcore_structured_testbucket["file_id1"]) + await dsm.copy_file( + user_id=user_id, + dest_location=SIMCORE_S3_STR, + dest_uuid=dest_uuid, + source_location=DATCORE_STR, + source_uuid=datcore_structured_testbucket["file_id1"], + ) s3_data = await dsm.list_files(user_id=user_id, location=SIMCORE_S3_STR) assert len(s3_data) == 1 # now download the original file tmp_file1 = tmp_file + ".fromdatcore" - down_url_dc, filename = await dsm.download_link_datcore(user_id, datcore_structured_testbucket["file_id1"]) + down_url_dc, filename = await dsm.download_link_datcore( + user_id, datcore_structured_testbucket["file_id1"] + ) urllib.request.urlretrieve(down_url_dc, tmp_file1) # and the one on s3 @@ -305,7 +376,14 @@ async def test_dsm_datcore_to_S3(postgres_service_url, s3_client, dsm_fixture, m assert filecmp.cmp(tmp_file1, tmp_file2) -async def test_copy_datcore(postgres_service_url, s3_client, dsm_fixture, mock_files_factory, datcore_structured_testbucket): + +async def test_copy_datcore( + postgres_service_url, + s3_client, + dsm_fixture, + mock_files_factory, + datcore_structured_testbucket, +): if not has_datcore_tokens(): return utils.create_tables(url=postgres_service_url) @@ -313,7 +391,9 @@ async def test_copy_datcore(postgres_service_url, s3_client, dsm_fixture, mock_f # the fixture should provide 3 files dsm = dsm_fixture user_id = USER_ID - data = await dsm.list_files(user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME) + data = await dsm.list_files( + user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME + ) assert len(data) == 3 # create temporary file and upload to s3 @@ -321,23 +401,31 @@ async def test_copy_datcore(postgres_service_url, s3_client, dsm_fixture, mock_f fmd = _create_file_meta_for_s3(postgres_service_url, s3_client, tmp_file) up_url = await dsm.upload_link(fmd.user_id, fmd.file_uuid) - with io.open(tmp_file, 'rb') as fp: + with io.open(tmp_file, "rb") as fp: d = fp.read() - req = urllib.request.Request(up_url, data=d, method='PUT') + req = urllib.request.Request(up_url, data=d, method="PUT") with urllib.request.urlopen(req) as _f: pass - #now copy to datcore + # now copy to datcore dat_core_uuid = os.path.join(BUCKET_NAME, fmd.file_name) - await dsm.copy_file(user_id=user_id, dest_location=DATCORE_STR, dest_uuid=datcore_structured_testbucket["coll2_id"], source_location=SIMCORE_S3_STR, - source_uuid=fmd.file_uuid) + await dsm.copy_file( + user_id=user_id, + dest_location=DATCORE_STR, + dest_uuid=datcore_structured_testbucket["coll2_id"], + source_location=SIMCORE_S3_STR, + source_uuid=fmd.file_uuid, + ) - data = await dsm.list_files(user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME) + data = await dsm.list_files( + user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME + ) # there should now be 4 files assert len(data) == 4 + def test_fmd_build(): file_uuid = str(Path("1234") / Path("abcd") / Path("xx.dat")) fmd = FileMetaData() @@ -352,6 +440,7 @@ def test_fmd_build(): assert fmd.location_id == SIMCORE_S3_ID assert fmd.bucket_name == "test-bucket" + async def test_dsm_complete_db(dsm_fixture, dsm_mockup_complete_db): dsm = dsm_fixture _id = "21" @@ -366,19 +455,27 @@ async def test_dsm_complete_db(dsm_fixture, dsm_mockup_complete_db): assert d.project_name assert d.raw_file_path + async def test_delete_data_folders(dsm_fixture, dsm_mockup_complete_db): file_1, file_2 = dsm_mockup_complete_db _id = "21" data = await dsm_fixture.list_files(user_id=_id, location=SIMCORE_S3_STR) - response = await dsm_fixture.delete_project_simcore_s3(user_id=_id, project_id=file_1["project_id"], node_id=file_1["node_id"]) + response = await dsm_fixture.delete_project_simcore_s3( + user_id=_id, project_id=file_1["project_id"], node_id=file_1["node_id"] + ) data = await dsm_fixture.list_files(user_id=_id, location=SIMCORE_S3_STR) assert len(data) == 1 assert data[0].fmd.file_name == file_2["filename"] - response = await dsm_fixture.delete_project_simcore_s3(user_id=_id, project_id=file_1["project_id"], node_id=None) + response = await dsm_fixture.delete_project_simcore_s3( + user_id=_id, project_id=file_1["project_id"], node_id=None + ) data = await dsm_fixture.list_files(user_id=_id, location=SIMCORE_S3_STR) assert not data -async def test_deep_copy_project_simcore_s3(dsm_fixture, s3_client, postgres_service_url, datcore_structured_testbucket): + +async def test_deep_copy_project_simcore_s3( + dsm_fixture, s3_client, postgres_service_url, datcore_structured_testbucket +): if not has_datcore_tokens(): return dsm = dsm_fixture @@ -388,7 +485,7 @@ async def test_deep_copy_project_simcore_s3(dsm_fixture, s3_client, postgres_ser file_name_in_datcore = Path(datcore_structured_testbucket["filename3"]).name user_id = USER_ID - source_project = { + source_project = { "uuid": "template-uuid-4d5e-b80e-401c8066782f", "name": "ISAN: 2D Plot", "description": "2D RawGraphs viewer with one input", @@ -397,53 +494,47 @@ async def test_deep_copy_project_simcore_s3(dsm_fixture, s3_client, postgres_ser "creationDate": "2019-05-24T10:36:57.813Z", "lastChangeDate": "2019-05-24T11:36:12.015Z", "workbench": { - "template-uuid-48eb-a9d2-aaad6b72400a": { - "key": "simcore/services/frontend/file-picker", - "version": "1.0.0", - "label": "File Picker", - "inputs": {}, - "inputNodes": [], - "outputs": { - "outFile": { - "store": 1, - "path": "N:package:ab8c214d-a596-401f-a90c-9c50e3c048b0" - } + "template-uuid-48eb-a9d2-aaad6b72400a": { + "key": "simcore/services/frontend/file-picker", + "version": "1.0.0", + "label": "File Picker", + "inputs": {}, + "inputNodes": [], + "outputs": { + "outFile": { + "store": 1, + "path": "N:package:ab8c214d-a596-401f-a90c-9c50e3c048b0", + } + }, + "progress": 100, + "thumbnail": "", + "position": {"x": 100, "y": 100}, }, - "progress": 100, - "thumbnail": "", - "position": { - "x": 100, - "y": 100 - } - }, - "template-uuid-4c63-a705-03a2c339646c": { - "key": "simcore/services/dynamic/raw-graphs", - "version": "2.8.0", - "label": "2D plot", - "inputs": { - "input_1": { - "nodeUuid": "template-uuid-48eb-a9d2-aaad6b72400a", - "output": "outFile" - } + "template-uuid-4c63-a705-03a2c339646c": { + "key": "simcore/services/dynamic/raw-graphs", + "version": "2.8.0", + "label": "2D plot", + "inputs": { + "input_1": { + "nodeUuid": "template-uuid-48eb-a9d2-aaad6b72400a", + "output": "outFile", + } + }, + "inputNodes": ["template-uuid-48eb-a9d2-aaad6b72400a"], + "outputs": {}, + "progress": 0, + "thumbnail": "", + "position": {"x": 400, "y": 100}, }, - "inputNodes": [ - "template-uuid-48eb-a9d2-aaad6b72400a" - ], - "outputs": {}, - "progress": 0, - "thumbnail": "", - "position": { - "x": 400, - "y": 100 - } - } - } + }, } bucket_name = BUCKET_NAME s3_client.create_bucket(bucket_name, delete_contents_if_exists=True) - source_project["workbench"]["template-uuid-48eb-a9d2-aaad6b72400a"]["outputs"]["outFile"]["path"] = path_in_datcore + source_project["workbench"]["template-uuid-48eb-a9d2-aaad6b72400a"]["outputs"][ + "outFile" + ]["path"] = path_in_datcore destination_project = copy.deepcopy(source_project) source_project_id = source_project["uuid"] @@ -453,28 +544,38 @@ async def test_deep_copy_project_simcore_s3(dsm_fixture, s3_client, postgres_ser node_mapping = {} for node_id, node in source_project["workbench"].items(): - object_name = str(Path(source_project_id) / Path(node_id) / Path(node_id + ".dat")) + object_name = str( + Path(source_project_id) / Path(node_id) / Path(node_id + ".dat") + ) f = utils.data_dir() / Path("notebooks.zip") s3_client.upload_file(bucket_name, object_name, f) key = node_id.replace("template", "deep-copy") destination_project["workbench"][key] = node node_mapping[node_id] = key - status = await dsm.deep_copy_project_simcore_s3(user_id, source_project, destination_project, node_mapping) + status = await dsm.deep_copy_project_simcore_s3( + user_id, source_project, destination_project, node_mapping + ) - new_path = destination_project["workbench"]["deep-copy-uuid-48eb-a9d2-aaad6b72400a"]["outputs"]["outFile"]["path"] + new_path = destination_project["workbench"][ + "deep-copy-uuid-48eb-a9d2-aaad6b72400a" + ]["outputs"]["outFile"]["path"] assert new_path != path_in_datcore assert Path(new_path).name == file_name_in_datcore files = await dsm.list_files(user_id=user_id, location=SIMCORE_S3_STR) assert len(files) == 3 # one of the files in s3 should be the dowloaded one from datcore - assert any(f.fmd.file_name == Path(datcore_structured_testbucket["filename3"]).name for f in files) + assert any( + f.fmd.file_name == Path(datcore_structured_testbucket["filename3"]).name + for f in files + ) response = await dsm.delete_project_simcore_s3(user_id, destination_project["uuid"]) files = await dsm.list_files(user_id=user_id, location=SIMCORE_S3_STR) assert len(files) == 0 + async def test_dsm_list_datasets_s3(dsm_fixture, dsm_mockup_complete_db): dsm_fixture.has_project_db = True @@ -483,6 +584,7 @@ async def test_dsm_list_datasets_s3(dsm_fixture, dsm_mockup_complete_db): assert len(datasets) == 1 assert any("Kember" in d.display_name for d in datasets) + async def test_dsm_list_datasets_datcore(dsm_fixture, datcore_structured_testbucket): if not has_datcore_tokens(): return @@ -492,6 +594,7 @@ async def test_dsm_list_datasets_datcore(dsm_fixture, datcore_structured_testbuc assert len(datasets) assert any(BUCKET_NAME in d.display_name for d in datasets) + async def test_dsm_list_dataset_files_s3(dsm_fixture, dsm_mockup_complete_db): dsm_fixture.has_project_db = True @@ -499,13 +602,18 @@ async def test_dsm_list_dataset_files_s3(dsm_fixture, dsm_mockup_complete_db): assert len(datasets) == 1 assert any("Kember" in d.display_name for d in datasets) for d in datasets: - files = await dsm_fixture.list_files_dataset(user_id="21", location=SIMCORE_S3_STR, dataset_id=d.dataset_id) + files = await dsm_fixture.list_files_dataset( + user_id="21", location=SIMCORE_S3_STR, dataset_id=d.dataset_id + ) if "Kember" in d.display_name: assert len(files) == 2 else: assert len(files) == 0 -async def test_dsm_list_dataset_files_datcore(dsm_fixture, datcore_structured_testbucket): + +async def test_dsm_list_dataset_files_datcore( + dsm_fixture, datcore_structured_testbucket +): if not has_datcore_tokens(): return @@ -515,12 +623,17 @@ async def test_dsm_list_dataset_files_datcore(dsm_fixture, datcore_structured_te assert any(BUCKET_NAME in d.display_name for d in datasets) for d in datasets: - files = await dsm_fixture.list_files_dataset(user_id=USER_ID, location=DATCORE_STR, dataset_id=d.dataset_id) + files = await dsm_fixture.list_files_dataset( + user_id=USER_ID, location=DATCORE_STR, dataset_id=d.dataset_id + ) if BUCKET_NAME in d.display_name: assert len(files) == 3 + @pytest.mark.skip(reason="develop only") -async def test_download_links(datcore_structured_testbucket, s3_client, mock_files_factory): +async def test_download_links( + datcore_structured_testbucket, s3_client, mock_files_factory +): s3_client.create_bucket(BUCKET_NAME, delete_contents_if_exists=True) _file = mock_files_factory(count=1)[0] @@ -528,15 +641,15 @@ async def test_download_links(datcore_structured_testbucket, s3_client, mock_fil link = s3_client.create_presigned_get_url(BUCKET_NAME, "test.txt") print(link) - dcw = datcore_structured_testbucket['dcw'] + dcw = datcore_structured_testbucket["dcw"] - endings = ['txt', 'json', 'zip', 'dat', 'mat'] + endings = ["txt", "json", "zip", "dat", "mat"] counter = 1 for e in endings: file_name = "test{}.{}".format(counter, e) file2 = str(Path(_file).parent / file_name) copyfile(_file, file_name) - dataset_id = datcore_structured_testbucket['dataset_id'] + dataset_id = datcore_structured_testbucket["dataset_id"] file_id = await dcw.upload_file_to_id(dataset_id, file_name) link, _file_name = await dcw.download_link_by_id(file_id) print(_file_name, link) diff --git a/services/storage/tests/test_package.py b/services/storage/tests/test_package.py index 33d090b1f77..f2a9c8a9855 100644 --- a/services/storage/tests/test_package.py +++ b/services/storage/tests/test_package.py @@ -18,16 +18,17 @@ def pylintrc(osparc_simcore_root_dir): assert pylintrc.exists() return pylintrc + def test_run_pylint(pylintrc, package_dir): try: - AUTODETECT=0 - cmd = f'pylint --jobs={AUTODETECT} --rcfile {pylintrc} -v {package_dir}'.split() + AUTODETECT = 0 + cmd = f"pylint --jobs={AUTODETECT} --rcfile {pylintrc} -v {package_dir}".split() assert subprocess.check_call(cmd) == 0 except subprocess.CalledProcessError as err: pytest.fail("Linting error. Linter existed with code %d" % err.returncode) -def test_main(here): # pylint: disable=unused-variable +def test_main(here): # pylint: disable=unused-variable with pytest.raises(SystemExit) as excinfo: main("--help".split()) diff --git a/services/storage/tests/test_resources.py b/services/storage/tests/test_resources.py index 0ebeeec0a23..1a0ed7c5500 100644 --- a/services/storage/tests/test_resources.py +++ b/services/storage/tests/test_resources.py @@ -14,16 +14,21 @@ log = logging.getLogger(__name__) + @pytest.fixture def app_resources(package_dir): resource_names = [] - for name in (RSC_CONFIG_DIR_KEY, 'api'): + for name in (RSC_CONFIG_DIR_KEY, "api"): folder = package_dir / name - resource_names += [ str(p.relative_to(package_dir)) for p in folder.rglob("*.y*ml") ] + resource_names += [ + str(p.relative_to(package_dir)) for p in folder.rglob("*.y*ml") + ] return resource_names -#------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ + def test_resource_io_utils(app_resources): @@ -41,14 +46,20 @@ def test_resource_io_utils(app_resources): assert ostream.closed + def test_named_resources(): - exposed = [getattr(resources, name) for name in dir(resources) if name.startswith("RESOURCES")] + exposed = [ + getattr(resources, name) + for name in dir(resources) + if name.startswith("RESOURCES") + ] for resource_name in exposed: assert resources.exists(resource_name) assert resources.isdir(resource_name) assert resources.listdir(resource_name) + def test_paths(app_resources): for resource_name in app_resources: assert resources.get_path(resource_name).exists() diff --git a/services/storage/tests/test_rest.py b/services/storage/tests/test_rest.py index 2f856c833ac..a6407f98a46 100644 --- a/services/storage/tests/test_rest.py +++ b/services/storage/tests/test_rest.py @@ -18,8 +18,7 @@ from aiohttp import web from simcore_service_storage.db import setup_db -from simcore_service_storage.dsm import (APP_DSM_KEY, DataStorageManager, - setup_dsm) +from simcore_service_storage.dsm import APP_DSM_KEY, DataStorageManager, setup_dsm from simcore_service_storage.rest import setup_rest from simcore_service_storage.s3 import setup_s3 from simcore_service_storage.settings import APP_CONFIG_KEY, SIMCORE_S3_ID @@ -29,6 +28,7 @@ current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + def parse_db(dsm_mockup_db): id_name_map = {} id_file_count = {} @@ -42,34 +42,43 @@ def parse_db(dsm_mockup_db): return id_file_count, id_name_map + @pytest.fixture -def client(loop, aiohttp_unused_port, aiohttp_client, postgres_service, minio_service, osparc_api_specs_dir): +def client( + loop, + aiohttp_unused_port, + aiohttp_client, + postgres_service, + minio_service, + osparc_api_specs_dir, +): app = web.Application() api_token = os.environ.get("BF_API_KEY", "none") api_secret = os.environ.get("BF_API_SECRET", "none") main_cfg = { - 'port': aiohttp_unused_port(), - 'host': 'localhost', - "max_workers" : 4, - "testing" : True, - "test_datcore" : { 'api_token' : api_token, 'api_secret' : api_secret} + "port": aiohttp_unused_port(), + "host": "localhost", + "max_workers": 4, + "testing": True, + "test_datcore": {"api_token": api_token, "api_secret": api_secret}, } rest_cfg = { - 'oas_repo': str(osparc_api_specs_dir), #'${OSPARC_SIMCORE_REPO_ROOTDIR}/api/specs', - #oas_repo: http://localhost:8043/api/specs + "oas_repo": str( + osparc_api_specs_dir + ), #'${OSPARC_SIMCORE_REPO_ROOTDIR}/api/specs', + # oas_repo: http://localhost:8043/api/specs } postgres_cfg = postgres_service s3_cfg = minio_service - # fake config app[APP_CONFIG_KEY] = { - 'main': main_cfg, - 'postgres' : postgres_cfg, - 's3' : s3_cfg, - 'rest': rest_cfg + "main": main_cfg, + "postgres": postgres_cfg, + "s3": s3_cfg, + "rest": rest_cfg, } setup_db(app) @@ -77,9 +86,10 @@ def client(loop, aiohttp_unused_port, aiohttp_client, postgres_service, minio_se setup_dsm(app) setup_s3(app) - cli = loop.run_until_complete( aiohttp_client(app, server_kwargs=main_cfg) ) + cli = loop.run_until_complete(aiohttp_client(app, server_kwargs=main_cfg)) return cli + async def test_health_check(client): resp = await client.get("/v0/") text = await resp.text() @@ -87,13 +97,14 @@ async def test_health_check(client): assert resp.status == 200, text payload = await resp.json() - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert data assert not error - assert data['name'] == 'simcore_service_storage' - assert data['status'] == 'SERVICE_RUNNING' + assert data["name"] == "simcore_service_storage" + assert data["status"] == "SERVICE_RUNNING" + async def test_locations(client): user_id = USER_ID @@ -103,9 +114,9 @@ async def test_locations(client): payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) - _locs = 2 if has_datcore_tokens() else 1 + _locs = 2 if has_datcore_tokens() else 1 assert len(data) == _locs assert not error @@ -119,7 +130,7 @@ async def test_s3_files_metadata(client, dsm_mockup_db): payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error assert len(data) == id_file_count[_id] @@ -127,49 +138,69 @@ async def test_s3_files_metadata(client, dsm_mockup_db): for d in dsm_mockup_db.keys(): fmd = dsm_mockup_db[d] uuid_filter = os.path.join(fmd.project_id, fmd.node_id) - resp = await client.get("/v0/locations/0/files/metadata?user_id={}&uuid_filter={}".format(fmd.user_id, quote(uuid_filter, safe=''))) + resp = await client.get( + "/v0/locations/0/files/metadata?user_id={}&uuid_filter={}".format( + fmd.user_id, quote(uuid_filter, safe="") + ) + ) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error for d in data: - assert os.path.join(d['project_id'], d['node_id']) == uuid_filter + assert os.path.join(d["project_id"], d["node_id"]) == uuid_filter + async def test_s3_file_metadata(client, dsm_mockup_db): # go through all files and get them for d in dsm_mockup_db.keys(): fmd = dsm_mockup_db[d] - resp = await client.get("/v0/locations/0/files/{}/metadata?user_id={}".format(quote(fmd.file_uuid, safe=''), fmd.user_id)) + resp = await client.get( + "/v0/locations/0/files/{}/metadata?user_id={}".format( + quote(fmd.file_uuid, safe=""), fmd.user_id + ) + ) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error assert data + async def test_download_link(client, dsm_mockup_db): for d in dsm_mockup_db.keys(): fmd = dsm_mockup_db[d] - resp = await client.get("/v0/locations/0/files/{}?user_id={}".format(quote(fmd.file_uuid, safe=''), fmd.user_id)) + resp = await client.get( + "/v0/locations/0/files/{}?user_id={}".format( + quote(fmd.file_uuid, safe=""), fmd.user_id + ) + ) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error assert data + async def test_upload_link(client, dsm_mockup_db): for d in dsm_mockup_db.keys(): fmd = dsm_mockup_db[d] - resp = await client.put("/v0/locations/0/files/{}?user_id={}".format(quote(fmd.file_uuid, safe=''), fmd.user_id)) + resp = await client.put( + "/v0/locations/0/files/{}?user_id={}".format( + quote(fmd.file_uuid, safe=""), fmd.user_id + ) + ) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error assert data + async def test_copy(client, dsm_mockup_db, datcore_structured_testbucket): if not has_datcore_tokens(): return @@ -179,13 +210,19 @@ async def test_copy(client, dsm_mockup_db, datcore_structured_testbucket): for d in dsm_mockup_db.keys(): fmd = dsm_mockup_db[d] source_uuid = fmd.file_uuid - datcore_id = datcore_structured_testbucket['coll1_id'] - resp = await client.put("/v0/locations/1/files/{}?user_id={}&extra_location={}&extra_source={}".format(quote(datcore_id, safe=''), - fmd.user_id, SIMCORE_S3_ID, quote(source_uuid, safe=''))) + datcore_id = datcore_structured_testbucket["coll1_id"] + resp = await client.put( + "/v0/locations/1/files/{}?user_id={}&extra_location={}&extra_source={}".format( + quote(datcore_id, safe=""), + fmd.user_id, + SIMCORE_S3_ID, + quote(source_uuid, safe=""), + ) + ) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error assert data @@ -195,24 +232,33 @@ async def test_copy(client, dsm_mockup_db, datcore_structured_testbucket): # list files for every user user_id = USER_ID - resp = await client.get("/v0/locations/1/files/metadata?user_id={}&uuid_filter={}".format(user_id, BUCKET_NAME)) + resp = await client.get( + "/v0/locations/1/files/metadata?user_id={}&uuid_filter={}".format( + user_id, BUCKET_NAME + ) + ) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error assert len(data) > N + async def test_delete_file(client, dsm_mockup_db): id_file_count, _id_name_map = parse_db(dsm_mockup_db) for d in dsm_mockup_db.keys(): fmd = dsm_mockup_db[d] - resp = await client.delete("/v0/locations/0/files/{}?user_id={}".format(quote(fmd.file_uuid, safe=''), fmd.user_id)) + resp = await client.delete( + "/v0/locations/0/files/{}?user_id={}".format( + quote(fmd.file_uuid, safe=""), fmd.user_id + ) + ) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error assert not data @@ -221,25 +267,19 @@ async def test_delete_file(client, dsm_mockup_db): payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error assert len(data) == 0 + async def test_action_check(client): - QUERY = 'mguidon' - ACTION = 'echo' - FAKE = { - 'path_value': 'one', - 'query_value': 'two', - 'body_value': { - 'a': 33, - 'b': 45 - } - } + QUERY = "mguidon" + ACTION = "echo" + FAKE = {"path_value": "one", "query_value": "two", "body_value": {"a": 33, "b": 45}} resp = await client.post(f"/v0/check/{ACTION}?data={QUERY}", json=FAKE) payload = await resp.json() - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert resp.status == 200, str(payload) assert data @@ -247,8 +287,9 @@ async def test_action_check(client): # TODO: validate response against specs - assert data['path_value'] == ACTION - assert data['query_value'] == QUERY + assert data["path_value"] == ACTION + assert data["query_value"] == QUERY + def get_project_with_data(): projects = [] @@ -258,57 +299,80 @@ def get_project_with_data(): # TODO: add schema validation return projects -@pytest.mark.parametrize("project_name,project", [ (prj['name'], prj) for prj in get_project_with_data()]) -async def test_create_and_delete_folders_from_project(client, dsm_mockup_db, project_name, project, mocker): + +@pytest.mark.parametrize( + "project_name,project", [(prj["name"], prj) for prj in get_project_with_data()] +) +async def test_create_and_delete_folders_from_project( + client, dsm_mockup_db, project_name, project, mocker +): source_project = project destination_project, nodes_map = clone_project_data(source_project) dsm = client.app[APP_DSM_KEY] - mock_dsm = mocker.patch.object(dsm,"copy_file_datcore_s3") + mock_dsm = mocker.patch.object(dsm, "copy_file_datcore_s3") mock_dsm.return_value = Future() mock_dsm.return_value.set_result("Howdie") - # CREATING - url = client.app.router["copy_folders_from_project"].url_for().with_query(user_id="1") - resp = await client.post(url, json={ - 'source':source_project, - 'destination': destination_project, - 'nodes_map': nodes_map - }) + url = ( + client.app.router["copy_folders_from_project"].url_for().with_query(user_id="1") + ) + resp = await client.post( + url, + json={ + "source": source_project, + "destination": destination_project, + "nodes_map": nodes_map, + }, + ) data, _error = await assert_status(resp, expected_cls=web.HTTPCreated) # data should be equal to the destination project, and all store entries should point to simcore.s3 for key in data: - if key!="workbench": + if key != "workbench": assert data[key] == destination_project[key] else: for _node_id, node in data[key].items(): - if 'outputs' in node: - for _o_id, o in node['outputs'].items(): - if 'store' in o: - assert o['store'] == SIMCORE_S3_ID + if "outputs" in node: + for _o_id, o in node["outputs"].items(): + if "store" in o: + assert o["store"] == SIMCORE_S3_ID # DELETING - project_id = data['uuid'] - url = client.app.router["delete_folders_of_project"].url_for(folder_id=project_id).with_query(user_id="1") + project_id = data["uuid"] + url = ( + client.app.router["delete_folders_of_project"] + .url_for(folder_id=project_id) + .with_query(user_id="1") + ) resp = await client.delete(url) await assert_status(resp, expected_cls=web.HTTPNoContent) + async def test_s3_datasets_metadata(client): - url = client.app.router["get_datasets_metadata"].url_for(location_id=str(SIMCORE_S3_ID)).with_query(user_id="21") + url = ( + client.app.router["get_datasets_metadata"] + .url_for(location_id=str(SIMCORE_S3_ID)) + .with_query(user_id="21") + ) resp = await client.get(url) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error + async def test_s3_files_datasets_metadata(client): - url = client.app.router["get_files_metadata_dataset"].url_for(location_id=str(SIMCORE_S3_ID), dataset_id="aa").with_query(user_id="21") + url = ( + client.app.router["get_files_metadata_dataset"] + .url_for(location_id=str(SIMCORE_S3_ID), dataset_id="aa") + .with_query(user_id="21") + ) resp = await client.get(url) payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple( payload.get(k) for k in ('data', 'error') ) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert not error diff --git a/services/storage/tests/utils.py b/services/storage/tests/utils.py index 393ccde3ac0..80282ab8e19 100644 --- a/services/storage/tests/utils.py +++ b/services/storage/tests/utils.py @@ -8,35 +8,44 @@ import requests import sqlalchemy as sa -from simcore_service_storage.models import (FileMetaData, file_meta_data, - projects, user_to_projects, users) +from simcore_service_storage.models import ( + FileMetaData, + file_meta_data, + projects, + user_to_projects, + users, +) log = logging.getLogger(__name__) -DATABASE = 'aio_login_tests' -USER = 'admin' -PASS = 'admin' +DATABASE = "aio_login_tests" +USER = "admin" +PASS = "admin" -ACCESS_KEY = '12345678' -SECRET_KEY = '12345678' +ACCESS_KEY = "12345678" +SECRET_KEY = "12345678" + +BUCKET_NAME = "simcore-testing" +USER_ID = "0" -BUCKET_NAME ="simcore-testing" -USER_ID = '0' def current_dir(): return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + def data_dir(): return current_dir() / Path("data") -def has_datcore_tokens()->bool: + +def has_datcore_tokens() -> bool: token = os.environ.get("BF_API_KEY", "none") if token == "none": pytest.skip("Datcore access tokens not available, skipping test") return False return True + def is_responsive(url, code=200): """Check if something responds to ``url`` syncronously""" try: @@ -48,6 +57,7 @@ def is_responsive(url, code=200): return False + def is_postgres_responsive(url): """Check if something responds to ``url`` """ try: @@ -58,6 +68,7 @@ def is_postgres_responsive(url): return False return True + def create_tables(url, engine=None): meta = sa.MetaData() if not engine: @@ -74,48 +85,58 @@ def drop_tables(url, engine=None): meta.drop_all(bind=engine, tables=[file_meta_data]) + def insert_metadata(url: str, fmd: FileMetaData): - #FIXME: E1120:No value for argument 'dml' in method call + # FIXME: E1120:No value for argument 'dml' in method call # pylint: disable=E1120 ins = file_meta_data.insert().values( - file_uuid = fmd.file_uuid, - location_id = fmd.location_id, - location = fmd.location, - bucket_name = fmd.bucket_name, - object_name = fmd.object_name, - project_id = fmd.project_id, - project_name = fmd.project_name, - node_id = fmd.node_id, - node_name = fmd.node_name, - file_name = fmd.file_name, - user_id = fmd.user_id, - user_name= fmd.user_name, - file_id = fmd.file_id, - raw_file_path = fmd.raw_file_path, - display_file_path = fmd.display_file_path, - created_at = fmd.created_at, - last_modified = fmd.last_modified, - file_size = fmd.file_size) - + file_uuid=fmd.file_uuid, + location_id=fmd.location_id, + location=fmd.location, + bucket_name=fmd.bucket_name, + object_name=fmd.object_name, + project_id=fmd.project_id, + project_name=fmd.project_name, + node_id=fmd.node_id, + node_name=fmd.node_name, + file_name=fmd.file_name, + user_id=fmd.user_id, + user_name=fmd.user_name, + file_id=fmd.file_id, + raw_file_path=fmd.raw_file_path, + display_file_path=fmd.display_file_path, + created_at=fmd.created_at, + last_modified=fmd.last_modified, + file_size=fmd.file_size, + ) engine = sa.create_engine(url) conn = engine.connect() conn.execute(ins) engine.dispose() + def create_full_tables(url): meta = sa.MetaData() engine = sa.create_engine(url) - meta.drop_all(bind=engine, tables=[file_meta_data, projects, user_to_projects, users], checkfirst=True) - meta.create_all(bind=engine, tables=[file_meta_data, projects, user_to_projects, users]) + meta.drop_all( + bind=engine, + tables=[file_meta_data, projects, user_to_projects, users], + checkfirst=True, + ) + meta.create_all( + bind=engine, tables=[file_meta_data, projects, user_to_projects, users] + ) for t in ["file_meta_data", "projects", "users", "user_to_projects"]: filename = t + ".csv" csv_file = str(data_dir() / Path(filename)) - with open(csv_file, 'r') as file: + with open(csv_file, "r") as file: data_df = pd.read_csv(file) - data_df.to_sql(t, con=engine, index=False, index_label="id", if_exists='append') + data_df.to_sql( + t, con=engine, index=False, index_label="id", if_exists="append" + ) # NOTE: Leave here as a reference # import psycopg2 @@ -145,9 +166,12 @@ def create_full_tables(url): # data_df.to_sql(t, con=engine, index=False, index_label="id", if_exists='append') engine.dispose() + def drop_all_tables(url): meta = sa.MetaData() engine = sa.create_engine(url) - meta.drop_all(bind=engine, tables=[file_meta_data, projects, user_to_projects, users]) + meta.drop_all( + bind=engine, tables=[file_meta_data, projects, user_to_projects, users] + ) engine.dispose() diff --git a/services/web/server/Makefile b/services/web/server/Makefile index dfd314196fa..7877b898cc1 100644 --- a/services/web/server/Makefile +++ b/services/web/server/Makefile @@ -1,19 +1,13 @@ # -# TODO: under development +# Targets for DEVELOPMENT for Webserver service # -.DEFAULT_GOAL := help +include ../../../scripts/common.Makefile APP_NAME := webserver APP_CLI_NAME := simcore-service-catalog export APP_VERSION = $(shell cat VERSION) REPO_BASE_DIR = $(abspath $(CURDIR)/../../../) -VENV_DIR ?= $(abspath $(REPO_BASE_DIR)/.venv) - - -.PHONY: devenv -devenv: ## build development environment (using main services/docker-compose-build.yml) - @$(MAKE) -C ${REPO_BASE_DIR} $@ .PHONY: requirements @@ -26,15 +20,10 @@ openapi-specs: ## updates and validates openapi specifications $(MAKE) -C $(CURDIR)/src/simcore_service_${APP_NAME}/api $@ -.check-venv-active: - # checking whether virtual environment was activated - @python3 -c "import sys; assert sys.base_prefix!=sys.prefix" - - .PHONY: install-dev install-prod install-ci -install-dev install-prod install-ci: openapi-specs requirements .check-venv-active ## install app in development/production or CI mode +install-dev install-prod install-ci: openapi-specs requirements _check_venv_active ## install app in development/production or CI mode # installing in $(subst install-,,$@) mode - @$(VENV_DIR)/bin/pip3 install -r requirements/$(subst install-,,$@).txt + python -m pip install -r requirements/$(subst install-,,$@).txt .PHONY: tests-unit tests-integration tests @@ -62,8 +51,6 @@ run-devel: ## runs app with pg service $(APP_CLI_NAME) -c tests/unit/with_dbs/config-devel.yml - - .PHONY: build build-nc build-devel build-devel-nc build-cache build-cache-nc build build-nc build-devel build-devel-nc build-cache build-cache-nc: openapi-specs ## docker image build in many flavours # building ${APP_NAME} ... @@ -73,41 +60,3 @@ build build-nc build-devel build-devel-nc build-cache build-cache-nc: openapi-sp .PHONY: tail tail: ## tails log of $(APP_NAME) container docker logs -f $(shell docker ps -f "name=$(APP_NAME)*" --format {{.ID}}) > $(APP_NAME).log 2>&1 - - - -.PHONY: autoformat -autoformat: ## runs black python formatter on this service's code [https://black.readthedocs.io/en/stable/] - # auto formatting with black - @python3 -m black --verbose $(CURDIR) - -.PHONY: version-patch version-minor -version-patch version-minor: ## commits version as patch (bug fixes not affecting the API), minor/minor (backwards-compatible/INcompatible API addition or changes) - # upgrades as $(subst version-,,$@) version, commits and tags - @bump2version --verbose --list $(subst version-,,$@) - - -.PHONY: info -info: ## displays - # installed - @pip list - # version - @cat setup.py | grep version= - - -.PHONY: clean -clean: ## cleans all unversioned files in project and temp files create by this makefile - # Cleaning unversioned - @git clean -ndxf -e .vscode/ - @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] - @git clean -dxf -e .vscode/ - - -.PHONY: help -# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html -help: ## this colorful help - @echo "Recipes for '${APP_NAME}':" - @echo "" - @awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo "" diff --git a/services/web/server/docker/healthcheck.py b/services/web/server/docker/healthcheck.py index af33e1cafa3..3faa018b291 100644 --- a/services/web/server/docker/healthcheck.py +++ b/services/web/server/docker/healthcheck.py @@ -26,9 +26,14 @@ ok = os.environ.get("SC_BOOT_MODE").lower() == "debug" # Queries host -ok = ok or urlopen("{host}{baseurl}".format( - host=sys.argv[1], - baseurl=os.environ.get("SIMCORE_NODE_BASEPATH", "")) # adds a base-path if defined in environ - ).getcode() == 200 +ok = ( + ok + or urlopen( + "{host}{baseurl}".format( + host=sys.argv[1], baseurl=os.environ.get("SIMCORE_NODE_BASEPATH", "") + ) # adds a base-path if defined in environ + ).getcode() + == 200 +) sys.exit(SUCCESS if ok else UNHEALTHY) diff --git a/services/web/server/setup.py b/services/web/server/setup.py index c1b85714a8a..58aab19e2fc 100644 --- a/services/web/server/setup.py +++ b/services/web/server/setup.py @@ -7,43 +7,42 @@ current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -def read_reqs( reqs_path: Path): - return re.findall(r'(^[^#-][\w]+[-~>=<.\w]+)', reqs_path.read_text(), re.MULTILINE) +def read_reqs(reqs_path: Path): + return re.findall(r"(^[^#-][\w]+[-~>=<.\w]+)", reqs_path.read_text(), re.MULTILINE) -#----------------------------------------------------------------- +# ----------------------------------------------------------------- -install_requirements = read_reqs( current_dir / "requirements" / "_base.txt" ) + [ +install_requirements = read_reqs(current_dir / "requirements" / "_base.txt") + [ "s3wrapper==0.1.0", "simcore-postgres-database", "simcore-sdk==0.1.0", "simcore-service-library", ] -test_requirements = read_reqs( current_dir / "requirements" / "_test.txt" ) +test_requirements = read_reqs(current_dir / "requirements" / "_test.txt") setup( - name='simcore-service-webserver', + name="simcore-service-webserver", version="0.4.0", - packages=find_packages(where='src'), - package_dir={ - '': 'src', - }, + packages=find_packages(where="src"), + package_dir={"": "src",}, include_package_data=True, package_data={ - '': [ - 'api/v0/openapi.yaml', - 'api/v0/schemas/*.json', - 'config/*.y*ml', - 'data/*.json', - 'templates/**/*.html', - ] + "": [ + "api/v0/openapi.yaml", + "api/v0/schemas/*.json", + "config/*.y*ml", + "data/*.json", + "templates/**/*.html", + ] }, entry_points={ - 'console_scripts': [ - 'simcore-service-webserver=simcore_service_webserver.__main__:main', ] - }, - python_requires='>=3.6', + "console_scripts": [ + "simcore-service-webserver=simcore_service_webserver.__main__:main", + ] + }, + python_requires=">=3.6", install_requires=install_requirements, tests_require=test_requirements, - setup_requires=['pytest-runner'] + setup_requires=["pytest-runner"], ) diff --git a/services/web/server/src/simcore_service_webserver/__version__.py b/services/web/server/src/simcore_service_webserver/__version__.py index ead0df82a2c..8f4c858af49 100644 --- a/services/web/server/src/simcore_service_webserver/__version__.py +++ b/services/web/server/src/simcore_service_webserver/__version__.py @@ -5,7 +5,7 @@ from semantic_version import Version -__version__ : str = pkg_resources.get_distribution('simcore_service_webserver').version +__version__: str = pkg_resources.get_distribution("simcore_service_webserver").version version = Version(__version__) diff --git a/services/web/server/src/simcore_service_webserver/activity/__init__.py b/services/web/server/src/simcore_service_webserver/activity/__init__.py index b10b4915af3..8c7520cb601 100644 --- a/services/web/server/src/simcore_service_webserver/activity/__init__.py +++ b/services/web/server/src/simcore_service_webserver/activity/__init__.py @@ -4,9 +4,11 @@ from aiohttp import web from servicelib.application_keys import APP_CONFIG_KEY from servicelib.application_setup import ModuleCategory, app_module_setup -from servicelib.rest_routing import (get_handlers_from_namespace, - iter_path_operations, - map_handlers_with_operations) +from servicelib.rest_routing import ( + get_handlers_from_namespace, + iter_path_operations, + map_handlers_with_operations, +) from ..rest_config import APP_OPENAPI_SPECS_KEY from . import handlers @@ -14,11 +16,13 @@ logger = logging.getLogger(__name__) + @app_module_setup( __name__, category=ModuleCategory.ADDON, - depends=['simcore_service_webserver.rest'], - logger=logger) + depends=["simcore_service_webserver.rest"], + logger=logger, +) def setup(app: web.Application): # setup routes ------------ @@ -26,16 +30,12 @@ def setup(app: web.Application): def include_path(tup_object): _method, path, _operation_id, _tags = tup_object - return any( tail in path for tail in ['/activity/status'] ) + return any(tail in path for tail in ["/activity/status"]) - handlers_dict = { - 'get_status': handlers.get_status - } + handlers_dict = {"get_status": handlers.get_status} routes = map_handlers_with_operations( - handlers_dict, - filter(include_path, iter_path_operations(specs)), - strict=True + handlers_dict, filter(include_path, iter_path_operations(specs)), strict=True ) app.router.add_routes(routes) @@ -43,6 +43,4 @@ def include_path(tup_object): # alias setup_activity = setup -__all__ = ( - 'setup_activity' -) +__all__ = "setup_activity" diff --git a/services/web/server/src/simcore_service_webserver/activity/config.py b/services/web/server/src/simcore_service_webserver/activity/config.py index 0bc5d2a675f..3ff44ac975e 100644 --- a/services/web/server/src/simcore_service_webserver/activity/config.py +++ b/services/web/server/src/simcore_service_webserver/activity/config.py @@ -6,9 +6,13 @@ CONFIG_SECTION_NAME = "activity" -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Bool(), - T.Key("prometheus_host", default='http://prometheus', optional=False): T.String(), - T.Key("prometheus_port", default=9090, optional=False): T.Int(), - T.Key("prometheus_api_version", default='v1', optional=False): T.String() -}) +schema = T.Dict( + { + T.Key("enabled", default=True, optional=True): T.Bool(), + T.Key( + "prometheus_host", default="http://prometheus", optional=False + ): T.String(), + T.Key("prometheus_port", default=9090, optional=False): T.Int(), + T.Key("prometheus_api_version", default="v1", optional=False): T.String(), + } +) diff --git a/services/web/server/src/simcore_service_webserver/activity/handlers.py b/services/web/server/src/simcore_service_webserver/activity/handlers.py index 3aa85a4140d..0883a83debf 100644 --- a/services/web/server/src/simcore_service_webserver/activity/handlers.py +++ b/services/web/server/src/simcore_service_webserver/activity/handlers.py @@ -1,10 +1,12 @@ import asyncio +from collections import defaultdict import aiohttp +from yarl import URL + from servicelib.application_keys import APP_CONFIG_KEY from servicelib.client_session import get_client_session from servicelib.request_keys import RQT_USERID_KEY -from yarl import URL from ..computation_handlers import get_celery from ..login.decorators import login_required @@ -15,9 +17,11 @@ async def query_prometheus(session, url, query): result = await resp.json() return result + def celery_reserved(app): return get_celery(app).control.inspect().reserved() + # # Functions getting the data to be executed async # @@ -25,24 +29,27 @@ async def get_cpu_usage(session, url, user_id): cpu_query = f'sum by (container_label_node_id) (irate(container_cpu_usage_seconds_total{{container_label_node_id=~".+", container_label_user_id="{user_id}"}}[20s])) * 100' return await query_prometheus(session, url, cpu_query) + async def get_memory_usage(session, url, user_id): memory_query = f'container_memory_usage_bytes{{container_label_node_id=~".+", container_label_user_id="{user_id}"}} / 1000000' return await query_prometheus(session, url, memory_query) + async def get_celery_reserved(app): return celery_reserved(app) + async def get_container_metric_for_labels(session, url, user_id): just_a_metric = f'container_cpu_user_seconds_total{{container_label_node_id=~".+", container_label_user_id="{user_id}"}}' return await query_prometheus(session, url, just_a_metric) def get_prometheus_result_or_default(result, default): - if (isinstance(result, Exception)): + if isinstance(result, Exception): # Logs exception return default - return result['data']['result'] - + return result["data"]["result"] + @login_required async def get_status(request: aiohttp.web.Request): @@ -50,66 +57,61 @@ async def get_status(request: aiohttp.web.Request): user_id = request.get(RQT_USERID_KEY, -1) - config = request.app[APP_CONFIG_KEY]['activity'] - url = URL(config.get('prometheus_host')).with_port(config.get('prometheus_port')).with_path('api/' + config.get('prometheus_api_version') + '/query') + config = request.app[APP_CONFIG_KEY]["activity"] + url = ( + URL(config.get("prometheus_host")) + .with_port(config.get("prometheus_port")) + .with_path("api/" + config.get("prometheus_api_version") + "/query") + ) results = await asyncio.gather( get_cpu_usage(session, url, user_id), get_memory_usage(session, url, user_id), get_celery_reserved(request.app), get_container_metric_for_labels(session, url, user_id), - return_exceptions=True + return_exceptions=True, ) cpu_usage = get_prometheus_result_or_default(results[0], []) mem_usage = get_prometheus_result_or_default(results[1], []) metric = get_prometheus_result_or_default(results[3], []) celery_inspect = results[2] - res = {} + res = defaultdict(dict) for node in cpu_usage: - node_id = node['metric']['container_label_node_id'] - usage = float(node['value'][1]) - res[node_id] = { - 'stats': { - 'cpuUsage': usage - } - } + node_id = node["metric"]["container_label_node_id"] + usage = float(node["value"][1]) + res[node_id] = {"stats": {"cpuUsage": usage}} for node in mem_usage: - node_id = node['metric']['container_label_node_id'] - usage = float(node['value'][1]) + node_id = node["metric"]["container_label_node_id"] + usage = float(node["value"][1]) if node_id in res: - res[node_id]['stats']['memUsage'] = usage + res[node_id]["stats"]["memUsage"] = usage else: - res[node_id] = { - 'stats': { - 'memUsage': usage - } - } + res[node_id] = {"stats": {"memUsage": usage}} for node in metric: - limits = { - 'cpus': 0, - 'mem': 0 - } - metric_labels = node['metric'] - limits['cpus'] = float(metric_labels.get('container_label_nano_cpus_limit', 0)) / pow(10, 9) # Nanocpus to cpus - limits['mem'] = float(metric_labels.get('container_label_mem_limit', 0)) / pow(1024, 2) # In MB - node_id = metric_labels.get('container_label_node_id') - res[node_id]['limits'] = limits - - if (hasattr(celery_inspect, 'items')): + limits = {"cpus": 0, "mem": 0} + metric_labels = node["metric"] + limits["cpus"] = float( + metric_labels.get("container_label_nano_cpus_limit", 0) + ) / pow( + 10, 9 + ) # Nanocpus to cpus + limits["mem"] = float(metric_labels.get("container_label_mem_limit", 0)) / pow( + 1024, 2 + ) # In MB + node_id = metric_labels.get("container_label_node_id") + res[node_id]["limits"] = limits + + if hasattr(celery_inspect, "items"): for dummy_worker_id, worker in celery_inspect.items(): for task in worker: - if (task['args'][1:-1].split(', ')[0] == str(user_id)): # Extracts user_id from task's args - node_id = task['args'][1:-1].split(', ')[2][1:-1] # Extracts node_id from task's args - if node_id in res: - res[node_id]['queued'] = True - else: - res[node_id] = { - 'queued': True - } - - if (not res): + values = task["args"][1:-1].split(", ") + if values[0] == str(user_id): # Extracts user_id from task's args + node_id = values[2][1:-1] # Extracts node_id from task's args + res[node_id]["queued"] = True + + if not res: raise aiohttp.web.HTTPNoContent - return res + return dict(res) diff --git a/services/web/server/src/simcore_service_webserver/application.py b/services/web/server/src/simcore_service_webserver/application.py index f5f4e95554e..0be88d232ff 100644 --- a/services/web/server/src/simcore_service_webserver/application.py +++ b/services/web/server/src/simcore_service_webserver/application.py @@ -34,9 +34,12 @@ log = logging.getLogger(__name__) -@app_module_setup("servicelib.monitoring", ModuleCategory.ADDON, +@app_module_setup( + "servicelib.monitoring", + ModuleCategory.ADDON, config_enabled="main.monitoring_enabled", - logger=log) + logger=log, +) def setup_app_monitoring(app: web.Application): return setup_monitoring(app, "simcore_service_webserver") @@ -45,8 +48,10 @@ def create_application(config: Dict) -> web.Application: """ Initializes service """ - log.debug("Initializing app with config:\n%s", - json.dumps(config, indent=2, sort_keys=True)) + log.debug( + "Initializing app with config:\n%s", + json.dumps(config, indent=2, sort_keys=True), + ) app = create_safe_application(config) @@ -66,7 +71,7 @@ def create_application(config: Dict) -> web.Application: setup_director(app) setup_storage(app) setup_users(app) - setup_projects(app) # needs storage + setup_projects(app) # needs storage setup_studies_access(app) setup_activity(app) setup_app_proxy(app) @@ -76,6 +81,7 @@ def create_application(config: Dict) -> web.Application: return app + def run_service(config: dict): """ Runs service @@ -84,12 +90,7 @@ def run_service(config: dict): app = create_application(config) - web.run_app(app, - host=config["main"]["host"], - port=config["main"]["port"]) + web.run_app(app, host=config["main"]["host"], port=config["main"]["port"]) -__all__ = ( - 'create_application', - 'run_service' -) +__all__ = ("create_application", "run_service") diff --git a/services/web/server/src/simcore_service_webserver/application_config.py b/services/web/server/src/simcore_service_webserver/application_config.py index d5cfeaf8901..442295ebda5 100644 --- a/services/web/server/src/simcore_service_webserver/application_config.py +++ b/services/web/server/src/simcore_service_webserver/application_config.py @@ -24,8 +24,16 @@ from servicelib.config_schema_utils import addon_section, minimal_addon_schema from trafaret_config.simple import read_and_validate -from . import (catalog_config, computation_config, db_config, email_config, - rest_config, session_config, storage_config, tracing) +from . import ( + catalog_config, + computation_config, + db_config, + email_config, + rest_config, + session_config, + storage_config, + tracing, +) from .activity import config as activity_config from .director import config as director_config from .login import config as login_config @@ -36,8 +44,8 @@ log = logging.getLogger(__name__) -CLI_DEFAULT_CONFIGFILE = 'server-defaults.yaml' -assert resources.exists( 'config/' + CLI_DEFAULT_CONFIGFILE ) # nosec +CLI_DEFAULT_CONFIGFILE = "server-defaults.yaml" +assert resources.exists("config/" + CLI_DEFAULT_CONFIGFILE) # nosec def create_schema() -> T.Dict: @@ -45,50 +53,66 @@ def create_schema() -> T.Dict: Build schema for the configuration's file by aggregating all the subsystem configurations """ - schema = T.Dict({ - "version": T.String(), - "main": T.Dict({ - "host": T.IP, - "port": T.Int(), - "client_outdir": T.String(), - "log_level": T.Enum(*logging._nameToLevel.keys()), # pylint: disable=protected-access - "testing": T.Bool(), - T.Key("studies_access_enabled", default=False): T.Or(T.Bool(), T.Int), - - T.Key("monitoring_enabled", default=False): T.Or(T.Bool(), T.Int), # Int added to use environs - }), - addon_section(tracing.tracing_section_name, optional=True): tracing.schema, - db_config.CONFIG_SECTION_NAME: db_config.schema, - director_config.CONFIG_SECTION_NAME: director_config.schema, - rest_config.CONFIG_SECTION_NAME: rest_config.schema, - projects_config.CONFIG_SECTION_NAME: projects_config.schema, - email_config.CONFIG_SECTION_NAME: email_config.schema, - computation_config.CONFIG_SECTION_NAME: computation_config.schema, - storage_config.CONFIG_SECTION_NAME: storage_config.schema, - addon_section(login_config.CONFIG_SECTION_NAME, optional=True): login_config.schema, - addon_section(socketio_config.CONFIG_SECTION_NAME, optional=True): socketio_config.schema, - session_config.CONFIG_SECTION_NAME: session_config.schema, - activity_config.CONFIG_SECTION_NAME: activity_config.schema, - resource_manager_config.CONFIG_SECTION_NAME: resource_manager_config.schema, - # BELOW HERE minimal sections until more options are needed - addon_section("reverse_proxy", optional=True): minimal_addon_schema(), - addon_section("application_proxy", optional=True): minimal_addon_schema(), - addon_section("users", optional=True): minimal_addon_schema(), - addon_section("studies_access", optional=True): minimal_addon_schema(), - addon_section("tags", optional=True): minimal_addon_schema(), - addon_section("catalog", optional=True): catalog_config.schema, - }) + # pylint: disable=protected-access + schema = T.Dict( + { + "version": T.String(), + "main": T.Dict( + { + "host": T.IP, + "port": T.Int(), + "client_outdir": T.String(), + "log_level": T.Enum( + *logging._nameToLevel.keys() + ), + "testing": T.Bool(), + T.Key("studies_access_enabled", default=False): T.Or( + T.Bool(), T.Int + ), + T.Key("monitoring_enabled", default=False): T.Or( + T.Bool(), T.Int + ), # Int added to use environs + } + ), + addon_section(tracing.tracing_section_name, optional=True): tracing.schema, + db_config.CONFIG_SECTION_NAME: db_config.schema, + director_config.CONFIG_SECTION_NAME: director_config.schema, + rest_config.CONFIG_SECTION_NAME: rest_config.schema, + projects_config.CONFIG_SECTION_NAME: projects_config.schema, + email_config.CONFIG_SECTION_NAME: email_config.schema, + computation_config.CONFIG_SECTION_NAME: computation_config.schema, + storage_config.CONFIG_SECTION_NAME: storage_config.schema, + addon_section( + login_config.CONFIG_SECTION_NAME, optional=True + ): login_config.schema, + addon_section( + socketio_config.CONFIG_SECTION_NAME, optional=True + ): socketio_config.schema, + session_config.CONFIG_SECTION_NAME: session_config.schema, + activity_config.CONFIG_SECTION_NAME: activity_config.schema, + resource_manager_config.CONFIG_SECTION_NAME: resource_manager_config.schema, + # BELOW HERE minimal sections until more options are needed + addon_section("reverse_proxy", optional=True): minimal_addon_schema(), + addon_section("application_proxy", optional=True): minimal_addon_schema(), + addon_section("users", optional=True): minimal_addon_schema(), + addon_section("studies_access", optional=True): minimal_addon_schema(), + addon_section("tags", optional=True): minimal_addon_schema(), + addon_section("catalog", optional=True): catalog_config.schema, + } + ) section_names = [k.name for k in schema.keys] - assert len(section_names) == len(set(section_names)), "Found repeated section names in %s" % section_names # nosec + assert len(section_names) == len(set(section_names)), ( + "Found repeated section names in %s" % section_names + ) # nosec return schema def load_default_config(environs=None) -> Dict: - filepath: Path = resources.get_path(f'config/{CLI_DEFAULT_CONFIGFILE}') + filepath: Path = resources.get_path(f"config/{CLI_DEFAULT_CONFIGFILE}") return read_and_validate(filepath, trafaret=app_schema, vars=environs) -app_schema = create_schema() # TODO: rename as schema +app_schema = create_schema() # TODO: rename as schema diff --git a/services/web/server/src/simcore_service_webserver/application_proxy.py b/services/web/server/src/simcore_service_webserver/application_proxy.py index b9731acea85..ab90e9122dc 100644 --- a/services/web/server/src/simcore_service_webserver/application_proxy.py +++ b/services/web/server/src/simcore_service_webserver/application_proxy.py @@ -26,6 +26,7 @@ logger = logging.getLogger(__name__) + @attr.s(auto_attribs=True) class ServiceMonitor(ServiceResolutionPolicy): app: web.Application @@ -51,8 +52,7 @@ async def _request_info(self, service_identifier: str): # override async def get_image_name(self, service_identifier: str) -> str: data = await self._request_info(service_identifier) - return data.get('service_key') - + return data.get("service_key") # override async def find_url(self, service_identifier: str) -> URL: @@ -60,30 +60,32 @@ async def find_url(self, service_identifier: str) -> URL: """ data = await self._request_info(service_identifier) - base_url = URL.build(scheme="http", - host=data.get('service_host'), - port=data.get('service_port'), - path=data.get('service_basepath')) - - if not os.environ.get('IS_CONTAINER_CONTEXT'): + base_url = URL.build( + scheme="http", + host=data.get("service_host"), + port=data.get("service_port"), + path=data.get("service_basepath"), + ) + + if not os.environ.get("IS_CONTAINER_CONTEXT"): # If server is not in swarm (e.g. during testing) then host:port = localhost:data['published_port'] - base_url = base_url.with_host('127.0.0.1') \ - .with_port(data['published_port']) + base_url = base_url.with_host("127.0.0.1").with_port(data["published_port"]) return base_url - - -@app_module_setup(__name__, ModuleCategory.ADDON, - depends=["simcore_service_webserver.director", ], - logger=logger) +@app_module_setup( + __name__, + ModuleCategory.ADDON, + depends=["simcore_service_webserver.director",], + logger=logger, +) def setup(app: web.Application): monitor = ServiceMonitor(app, base_url=app[APP_DIRECTOR_API_KEY]) setup_reverse_proxy(app, monitor) - assert "reverse_proxy" in app.router # nosec + assert "reverse_proxy" in app.router # nosec app["reverse_proxy.basemount"] = monitor.base_mountpoint @@ -92,6 +94,4 @@ def setup(app: web.Application): setup_app_proxy = setup -__all__ = ( - 'setup_app_proxy' -) +__all__ = "setup_app_proxy" diff --git a/services/web/server/src/simcore_service_webserver/cli_config.py b/services/web/server/src/simcore_service_webserver/cli_config.py index 672ad008ed2..08e2f8cee03 100644 --- a/services/web/server/src/simcore_service_webserver/cli_config.py +++ b/services/web/server/src/simcore_service_webserver/cli_config.py @@ -1,4 +1,3 @@ - import argparse import os import logging @@ -25,13 +24,15 @@ def add_cli_options(argument_parser, default_config): argument_parser = argparse.ArgumentParser() commandline.standard_argparse_options( - argument_parser.add_argument_group('settings'), - default_config=default_config) + argument_parser.add_argument_group("settings"), default_config=default_config + ) return argument_parser -def config_from_options(options, schema, vars=None): # pylint: disable=redefined-builtin +def config_from_options( + options, schema, vars=None +): # pylint: disable=redefined-builtin if vars is None: vars = os.environ @@ -40,7 +41,7 @@ def config_from_options(options, schema, vars=None): # pylint: disable=redefined if resources.exists(resource_name): options.config = resources.get_path(resource_name) else: - resource_name = resources.config_folder + '/' + resource_name + resource_name = resources.config_folder + "/" + resource_name if resources.exists(resource_name): options.config = resources.get_path(resource_name) @@ -49,16 +50,12 @@ def config_from_options(options, schema, vars=None): # pylint: disable=redefined return commandline.config_from_options(options, trafaret=schema, vars=vars) - - - - - - # FIXME: should replace these functions and remove dependency -def read_and_validate(filepath, vars=None): # pylint: disable=W0622 + +def read_and_validate(filepath, vars=None): # pylint: disable=W0622 from .application_config import app_schema + if vars is None: vars = os.environ # NOTE: vars=os.environ in signature freezes default to os.environ before it gets @@ -74,5 +71,6 @@ def config_from_file(filepath) -> dict: Raises trafaret_config.ConfigError """ from .application_config import app_schema + config = trafaret_config.read_and_validate(filepath, app_schema, vars=os.environ) return config diff --git a/services/web/server/src/simcore_service_webserver/computation.py b/services/web/server/src/simcore_service_webserver/computation.py index c63d3fa45e3..b8aa185e35a 100644 --- a/services/web/server/src/simcore_service_webserver/computation.py +++ b/services/web/server/src/simcore_service_webserver/computation.py @@ -11,8 +11,7 @@ from aiohttp import web from servicelib.application_setup import ModuleCategory, app_module_setup -from servicelib.rest_routing import (iter_path_operations, - map_handlers_with_operations) +from servicelib.rest_routing import iter_path_operations, map_handlers_with_operations from . import computation_handlers from .computation_comp_tasks_listening_task import setup as setup_comp_tasks_listener @@ -23,9 +22,9 @@ log = logging.getLogger(__file__) -@app_module_setup(__name__, ModuleCategory.ADDON, - config_section=CONFIG_SECTION_NAME, - logger=log) +@app_module_setup( + __name__, ModuleCategory.ADDON, config_section=CONFIG_SECTION_NAME, logger=log +) def setup(app: web.Application): # subscribe to rabbit upon startup # TODO: Define connection policies (e.g. {on-startup}, lazy). Could be defined in config-file @@ -35,23 +34,25 @@ def setup(app: web.Application): # app.on_cleanup.append(unsubscribe) if not APP_OPENAPI_SPECS_KEY in app: - log.warning("rest submodule not initialised? computation routes will not be defined!") + log.warning( + "rest submodule not initialised? computation routes will not be defined!" + ) return specs = app[APP_OPENAPI_SPECS_KEY] - routes = map_handlers_with_operations({ - 'start_pipeline': computation_handlers.start_pipeline, - 'update_pipeline': computation_handlers.update_pipeline + routes = map_handlers_with_operations( + { + "start_pipeline": computation_handlers.start_pipeline, + "update_pipeline": computation_handlers.update_pipeline, }, filter(lambda o: "/computation" in o[1], iter_path_operations(specs)), - strict=True + strict=True, ) app.router.add_routes(routes) setup_comp_tasks_listener(app) + # alias setup_computation = setup -__all__ = ( - "setup_computation" -) +__all__ = "setup_computation" diff --git a/services/web/server/src/simcore_service_webserver/computation_api.py b/services/web/server/src/simcore_service_webserver/computation_api.py index 9d64efc16d5..9dcaefca60b 100644 --- a/services/web/server/src/simcore_service_webserver/computation_api.py +++ b/services/web/server/src/simcore_service_webserver/computation_api.py @@ -171,7 +171,7 @@ async def _parse_project_data(pipeline_data: Dict, app: web.Application): "inputs": node_inputs, "outputs": node_outputs, "image": {"name": node_key, "tag": node_version}, - "node_class": to_node_class(node_key) + "node_class": to_node_class(node_key), } log.debug("storing task for node %s: %s", node_uuid, task) @@ -256,7 +256,7 @@ async def _set_tasks_in_tasks_db( await conn.execute(query) internal_id = internal_id + 1 - except psycopg2.errors.UniqueViolation: # pylint: disable=no-member + except psycopg2.errors.UniqueViolation: # pylint: disable=no-member if replace_pipeline: # replace task query = ( diff --git a/services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py b/services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py index f5a894d58f6..bc312f4fbd5 100644 --- a/services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py +++ b/services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py @@ -59,12 +59,12 @@ async def register_trigger_function(app: web.Application): EXECUTE PROCEDURE {DB_PROCEDURE_NAME}(); """ - async with db_engine.acquire() as conn: async with conn.begin(): await conn.execute(notification_fct_query) await conn.execute(trigger_registration_query) + async def listen(app: web.Application): listen_query = f"LISTEN {DB_CHANNEL_NAME};" db_engine: Engine = app[APP_DB_ENGINE_KEY] @@ -80,10 +80,16 @@ async def listen(app: web.Application): project_id = node_data["project_id"] # find the user(s) linked to that project joint_table = user_to_projects.join(projects) - query = select([user_to_projects]).select_from(joint_table).where(projects.c.uuid == project_id) + query = ( + select([user_to_projects]) + .select_from(joint_table) + .where(projects.c.uuid == project_id) + ) async for row in conn.execute(query): user_id = row["user_id"] - node_data = await projects_api.update_project_node_outputs(app, user_id, project_id, node_id, data=task_output) + node_data = await projects_api.update_project_node_outputs( + app, user_id, project_id, node_id, data=task_output + ) messages = {"nodeUpdated": {"Node": node_id, "Data": node_data}} await post_messages(app, user_id, messages) @@ -99,6 +105,7 @@ async def comp_tasks_listening_task(app: web.Application) -> None: finally: pass + async def setup_comp_tasks_listening_task(app: web.Application): task = asyncio.get_event_loop().create_task(comp_tasks_listening_task(app)) yield diff --git a/services/web/server/src/simcore_service_webserver/computation_config.py b/services/web/server/src/simcore_service_webserver/computation_config.py index 4f5d39a666f..70fbfd82dc7 100644 --- a/services/web/server/src/simcore_service_webserver/computation_config.py +++ b/services/web/server/src/simcore_service_webserver/computation_config.py @@ -7,7 +7,7 @@ # import trafaret as T -SERVICE_NAME = 'rabbit' +SERVICE_NAME = "rabbit" CONFIG_SECTION_NAME = SERVICE_NAME APP_CLIENT_RABBIT_DECORATED_HANDLERS_KEY = __name__ + ".rabbit_handlers" APP_COMP_TASKS_LISTENING_KEY: str = __name__ + ".comp_tasks_listening_key" diff --git a/services/web/server/src/simcore_service_webserver/computation_handlers.py b/services/web/server/src/simcore_service_webserver/computation_handlers.py index 644faf51220..8ce9360f63c 100644 --- a/services/web/server/src/simcore_service_webserver/computation_handlers.py +++ b/services/web/server/src/simcore_service_webserver/computation_handlers.py @@ -21,12 +21,14 @@ computation_routes = web.RouteTableDef() + def get_celery(_app: web.Application): config = _app[APP_CONFIG_KEY][CONFIG_RABBIT_SECTION] rabbit = rabbit_config(config=config) celery = Celery(rabbit.name, broker=rabbit.broker, backend=rabbit.backend) return celery + async def _process_request(request): # TODO: PC->SAN why validation is commented??? # params, query, body = await extract_and_validate(request) @@ -41,6 +43,7 @@ async def _process_request(request): # HANDLERS ------------------------------------------ + @login_required async def update_pipeline(request: web.Request) -> web.Response: await check_permission(request, "services.pipeline.*") @@ -68,14 +71,18 @@ async def start_pipeline(request: web.Request) -> web.Response: await update_pipeline_db(request.app, project_id, project["workbench"]) # commit the tasks to celery - _ = get_celery(request.app).send_task("comp.task", args=(user_id, project_id,), kwargs={}) + _ = get_celery(request.app).send_task( + "comp.task", args=(user_id, project_id,), kwargs={} + ) - log.debug("Task (user_id=%s, project_id=%s) submitted for execution.", user_id, project_id) + log.debug( + "Task (user_id=%s, project_id=%s) submitted for execution.", user_id, project_id + ) # answer the client while task has been spawned data = { # TODO: PC->SAN: some name with task id. e.g. to distinguish two projects with identical pipeline? - "pipeline_name":"request_data", - "project_id": project_id + "pipeline_name": "request_data", + "project_id": project_id, } return data diff --git a/services/web/server/src/simcore_service_webserver/computation_models.py b/services/web/server/src/simcore_service_webserver/computation_models.py index 1c21daa9f11..bd01ddfbf33 100644 --- a/services/web/server/src/simcore_service_webserver/computation_models.py +++ b/services/web/server/src/simcore_service_webserver/computation_models.py @@ -9,14 +9,14 @@ # TODO: test agains all names in registry/fake projects?? node_key_re = re.compile(r"^simcore/services/(comp|dynamic|frontend)(/[^\s/]+)+$") str_to_nodeclass = { - 'comp': NodeClass.COMPUTATIONAL, - 'dynamic': NodeClass.INTERACTIVE, - 'frontend': NodeClass.FRONTEND, + "comp": NodeClass.COMPUTATIONAL, + "dynamic": NodeClass.INTERACTIVE, + "frontend": NodeClass.FRONTEND, } + def to_node_class(node_key: str) -> NodeClass: match = node_key_re.match(node_key) if match: return str_to_nodeclass.get(match.group(1)) return None - diff --git a/services/web/server/src/simcore_service_webserver/computation_subscribe.py b/services/web/server/src/simcore_service_webserver/computation_subscribe.py index 676c4f19a83..1d30855d164 100644 --- a/services/web/server/src/simcore_service_webserver/computation_subscribe.py +++ b/services/web/server/src/simcore_service_webserver/computation_subscribe.py @@ -12,29 +12,32 @@ from servicelib.application_keys import APP_CONFIG_KEY from simcore_sdk.config.rabbit import eval_broker -from .computation_config import (APP_CLIENT_RABBIT_DECORATED_HANDLERS_KEY, - CONFIG_SECTION_NAME) +from .computation_config import ( + APP_CLIENT_RABBIT_DECORATED_HANDLERS_KEY, + CONFIG_SECTION_NAME, +) from .projects import projects_api -from .projects.projects_exceptions import (NodeNotFoundError, - ProjectNotFoundError) +from .projects.projects_exceptions import NodeNotFoundError, ProjectNotFoundError from .socketio.events import post_messages log = logging.getLogger(__file__) + class RabbitMQRetryPolicyUponInitialization: """ Retry policy upon service initialization """ + WAIT_SECS = 2 ATTEMPTS_COUNT = 20 - def __init__(self, logger: Optional[logging.Logger]=None): + def __init__(self, logger: Optional[logging.Logger] = None): logger = logger or log self.kwargs = dict( wait=wait_fixed(self.WAIT_SECS), stop=stop_after_attempt(self.ATTEMPTS_COUNT), before_sleep=before_sleep_log(logger, logging.INFO), - reraise=True + reraise=True, ) @@ -43,13 +46,17 @@ def rabbit_adapter(app: web.Application) -> Callable: I.e. aiopika handler expect functions of type `async def function(message)` This allows to create a function of type `async def function(message, app: web.Application) """ + def decorator(func) -> Coroutine: @wraps(func) async def wrapped(*args, **kwargs) -> Coroutine: return await func(*args, **kwargs, app=app) + return wrapped + return decorator + async def parse_rabbit_message_data(app: web.Application, data: Dict) -> None: log.debug("parsing message data:\n%s", pformat(data, depth=3)) # get common data @@ -61,7 +68,9 @@ async def parse_rabbit_message_data(app: web.Application, data: Dict) -> None: messages = {} if data["Channel"] == "Progress": # update corresponding project, node, progress value - node_data = await projects_api.update_project_node_progress(app, user_id, project_id, node_id, progress=data["Progress"]) + node_data = await projects_api.update_project_node_progress( + app, user_id, project_id, node_id, progress=data["Progress"] + ) messages["nodeUpdated"] = {"Node": node_id, "Data": node_data} elif data["Channel"] == "Log": messages["logger"] = data @@ -73,12 +82,15 @@ async def parse_rabbit_message_data(app: web.Application, data: Dict) -> None: log.exception("parsed rabbit message invalid") -async def rabbit_message_handler(message: aio_pika.IncomingMessage, app: web.Application) -> None: +async def rabbit_message_handler( + message: aio_pika.IncomingMessage, app: web.Application +) -> None: data = json.loads(message.body) await parse_rabbit_message_data(app, data) # NOTE: this allows the webserver to breath if a lot of messages are entering await asyncio.sleep(1) + async def subscribe(app: web.Application) -> None: # TODO: catch and deal with missing connections: # e.g. CRITICAL:pika.adapters.base_connection:Could not get addresses to use: [Errno -2] Name or service not known (rabbit) @@ -89,10 +101,10 @@ async def subscribe(app: web.Application) -> None: log.info("Creating pika connection for %s", rabbit_broker) await wait_till_rabbitmq_responsive(rabbit_broker) - connection = await aio_pika.connect_robust(rabbit_broker, - client_properties={ - "connection_name": "webserver read connection" - }) + connection = await aio_pika.connect_robust( + rabbit_broker, + client_properties={"connection_name": "webserver read connection"}, + ) channel = await connection.channel() await channel.set_qos(prefetch_count=1) @@ -119,6 +131,7 @@ async def subscribe(app: web.Application) -> None: app[APP_CLIENT_RABBIT_DECORATED_HANDLERS_KEY] = [partial_rabbit_message_handler] await queue.consume(partial_rabbit_message_handler, exclusive=True, no_ack=True) + @retry(**RabbitMQRetryPolicyUponInitialization().kwargs) async def wait_till_rabbitmq_responsive(url: str) -> bool: """Check if something responds to ``url`` """ diff --git a/services/web/server/src/simcore_service_webserver/data/s4l_converter.py b/services/web/server/src/simcore_service_webserver/data/s4l_converter.py index fcb1ae20310..006d1816e4b 100644 --- a/services/web/server/src/simcore_service_webserver/data/s4l_converter.py +++ b/services/web/server/src/simcore_service_webserver/data/s4l_converter.py @@ -7,51 +7,54 @@ import s4l_v1.document as document from s4l_v1.simulation.emlf import MaterialSettings -here = os.path.dirname( sys.argv[0] ) +here = os.path.dirname(sys.argv[0]) + + +PATTERN = re.compile(r"\W") -PATTERN = re.compile(r'\W') def make_key(m): return PATTERN.sub("_", m.Name) + "-UUID" + sims = document.AllSimulations sim = sims[0] materials = [s for s in sim.AllSettings if isinstance(s, MaterialSettings)] - def create_material_getitemlist(materials): - path = os.path.join(here, r'fake-materialDB-LF-getItemList.json') - with open(path, 'wt') as f: + path = os.path.join(here, r"fake-materialDB-LF-getItemList.json") + with open(path, "wt") as f: data = [dict(key=make_key(m), label=m.Name) for m in materials] json.dump(data, f, indent=2) + def create_item(m): props = [ - m.MassDensityProp, - m.ElectricProps.ConductivityProp, - m.ElectricProps.RelativePermittivityProp, - m.MagneticProps.ConductivityProp, - m.MagneticProps.RelativePermeabilityProp, + m.MassDensityProp, + m.ElectricProps.ConductivityProp, + m.ElectricProps.RelativePermittivityProp, + m.MagneticProps.ConductivityProp, + m.MagneticProps.RelativePermeabilityProp, ] - + result = {} for index, prop in enumerate(props): result[prop.Name.replace(" ", "")] = { - 'displayOrder': index, - 'label': prop.Name, - 'unit': str(prop.Unit or ""), - 'type': "number", - 'defaultValue': prop.Value + "displayOrder": index, + "label": prop.Name, + "unit": str(prop.Unit or ""), + "type": "number", + "defaultValue": prop.Value, } return result - + def create_material_getitem(materials): - path = os.path.join(here, r'fake-materialDB-LF-getItem.json') - with open(path, 'wt') as f: - data = { make_key(m): create_item(m) for m in materials } + path = os.path.join(here, r"fake-materialDB-LF-getItem.json") + with open(path, "wt") as f: + data = {make_key(m): create_item(m) for m in materials} json.dump(data, f, indent=2) @@ -64,8 +67,8 @@ def create_map(sim): result[make_key(m)].append(make_key(c)) return result - path = os.path.join(here, r'fake-materialDB-LF-Material2Entities.json') - with open(path, 'wt') as f: + path = os.path.join(here, r"fake-materialDB-LF-Material2Entities.json") + with open(path, "wt") as f: data = create_map(sim) json.dump(data, f, indent=2) @@ -79,7 +82,12 @@ def get_name(ent): name = name.replace("Model/", "") return name -path = os.path.join(here, r'fake-modeler-LF-getItemList.json') -with open(path, 'wt') as f: - data = [ dict(key=make_key(c), label=get_name(c.Geometry)) for c in sim.AllComponents if c.Geometry ] + +path = os.path.join(here, r"fake-modeler-LF-getItemList.json") +with open(path, "wt") as f: + data = [ + dict(key=make_key(c), label=get_name(c.Geometry)) + for c in sim.AllComponents + if c.Geometry + ] json.dump(data, f, indent=2) diff --git a/services/web/server/src/simcore_service_webserver/db.py b/services/web/server/src/simcore_service_webserver/db.py index 2c55afaf065..c9e5bd289ba 100644 --- a/services/web/server/src/simcore_service_webserver/db.py +++ b/services/web/server/src/simcore_service_webserver/db.py @@ -5,10 +5,14 @@ import logging from aiohttp import web -from servicelib.aiopg_utils import (DataSourceName, - PostgresRetryPolicyUponInitialization, - create_pg_engine, init_pg_tables, - is_pg_responsive, raise_if_not_responsive) +from servicelib.aiopg_utils import ( + DataSourceName, + PostgresRetryPolicyUponInitialization, + create_pg_engine, + init_pg_tables, + is_pg_responsive, + raise_if_not_responsive, +) from servicelib.application_keys import APP_CONFIG_KEY, APP_DB_ENGINE_KEY from servicelib.application_setup import ModuleCategory, app_module_setup from tenacity import Retrying @@ -16,57 +20,60 @@ from .db_config import CONFIG_SECTION_NAME from .db_models import metadata -THIS_MODULE_NAME = __name__.split(".")[-1] -THIS_SERVICE_NAME = 'postgres' +THIS_MODULE_NAME = __name__.split(".")[-1] +THIS_SERVICE_NAME = "postgres" log = logging.getLogger(__name__) async def pg_engine(app: web.Application): cfg = app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] - pg_cfg = cfg['postgres'] - - app[f"{__name__}.dsn"]= dsn = \ - DataSourceName( - application_name=f'{__name__}_{id(app)}', - database=pg_cfg['database'], - user=pg_cfg['user'], - password=pg_cfg['password'], - host=pg_cfg['host'], - port=pg_cfg['port'] - ) + pg_cfg = cfg["postgres"] + + app[f"{__name__}.dsn"] = dsn = DataSourceName( + application_name=f"{__name__}_{id(app)}", + database=pg_cfg["database"], + user=pg_cfg["user"], + password=pg_cfg["password"], + host=pg_cfg["host"], + port=pg_cfg["port"], + ) log.info("Creating pg engine for %s", dsn) for attempt in Retrying(**PostgresRetryPolicyUponInitialization(log).kwargs): with attempt: - engine = await create_pg_engine(dsn, - minsize=pg_cfg['minsize'], - maxsize=pg_cfg['maxsize'] + engine = await create_pg_engine( + dsn, minsize=pg_cfg["minsize"], maxsize=pg_cfg["maxsize"] ) await raise_if_not_responsive(engine) - assert engine # nosec + assert engine # nosec app[APP_DB_ENGINE_KEY] = engine - if cfg['init_tables']: + if cfg["init_tables"]: log.info("Initializing tables for %s", dsn) init_pg_tables(dsn, schema=metadata) - yield #------------------- + yield # ------------------- if engine is not app.get(APP_DB_ENGINE_KEY): log.critical("app does not hold right db engine. Somebody has changed it??") engine.close() await engine.wait_closed() - log.debug("engine '%s' after shutdown: closed=%s, size=%d", engine.dsn, engine.closed, engine.size) + log.debug( + "engine '%s' after shutdown: closed=%s, size=%d", + engine.dsn, + engine.closed, + engine.size, + ) def is_service_enabled(app: web.Application): return app.get(APP_DB_ENGINE_KEY) is not None -async def is_service_responsive(app:web.Application): +async def is_service_responsive(app: web.Application): """ Returns true if the app can connect to db service """ @@ -89,7 +96,4 @@ def setup(app: web.Application): # alias --- setup_db = setup -__all__ = ( - 'setup_db', - 'is_service_enabled' -) +__all__ = ("setup_db", "is_service_enabled") diff --git a/services/web/server/src/simcore_service_webserver/db_config.py b/services/web/server/src/simcore_service_webserver/db_config.py index bbe78fe022f..8563b218a38 100644 --- a/services/web/server/src/simcore_service_webserver/db_config.py +++ b/services/web/server/src/simcore_service_webserver/db_config.py @@ -7,11 +7,11 @@ from simcore_sdk.config.db import CONFIG_SCHEMA as _PG_SCHEMA -CONFIG_SECTION_NAME = 'db' +CONFIG_SECTION_NAME = "db" # FIXME: database user password host port minsize maxsize -#CONFIG_SCHEMA = T.Dict({ +# CONFIG_SCHEMA = T.Dict({ # "database": T.String(), # "user": T.String(), # "password": T.String(), @@ -19,11 +19,13 @@ # "port": T.Or( T.Int, T.Null), # T.Key("minsize", default=1 ,optional=True): T.Int(), # T.Key("maxsize", default=4, optional=True): T.Int(), -#}) +# }) -schema = T.Dict({ - T.Key("postgres"): _PG_SCHEMA, - T.Key("init_tables", default=False, optional=True): T.Or(T.Bool, T.Int), - T.Key("enabled", default=True, optional=True): T.Bool() -}) +schema = T.Dict( + { + T.Key("postgres"): _PG_SCHEMA, + T.Key("init_tables", default=False, optional=True): T.Or(T.Bool, T.Int), + T.Key("enabled", default=True, optional=True): T.Bool(), + } +) diff --git a/services/web/server/src/simcore_service_webserver/db_models.py b/services/web/server/src/simcore_service_webserver/db_models.py index b6ff68d4301..8113263eaf2 100644 --- a/services/web/server/src/simcore_service_webserver/db_models.py +++ b/services/web/server/src/simcore_service_webserver/db_models.py @@ -2,15 +2,27 @@ Facade to keep API LEGACY """ from simcore_postgres_database.models.base import metadata -from simcore_postgres_database.webserver_models import (ConfirmationAction, - UserRole, UserStatus, - confirmations, tokens, - users, tags, study_tags) +from simcore_postgres_database.webserver_models import ( + ConfirmationAction, + UserRole, + UserStatus, + confirmations, + tokens, + users, + tags, + study_tags, +) # TODO: roles table that maps every role with allowed tasks e.g. read/write,...?? __all__ = ( - "UserStatus", "UserRole", "ConfirmationAction", - "users", "confirmations", "tokens", - "metadata", "tags", "study_tags" + "UserStatus", + "UserRole", + "ConfirmationAction", + "users", + "confirmations", + "tokens", + "metadata", + "tags", + "study_tags", ) diff --git a/services/web/server/src/simcore_service_webserver/director/__init__.py b/services/web/server/src/simcore_service_webserver/director/__init__.py index ed2f9b6d88f..b9e4561af07 100644 --- a/services/web/server/src/simcore_service_webserver/director/__init__.py +++ b/services/web/server/src/simcore_service_webserver/director/__init__.py @@ -10,9 +10,11 @@ from servicelib.application_keys import APP_CONFIG_KEY from servicelib.application_setup import ModuleCategory, app_module_setup -from servicelib.rest_routing import (get_handlers_from_namespace, - iter_path_operations, - map_handlers_with_operations) +from servicelib.rest_routing import ( + get_handlers_from_namespace, + iter_path_operations, + map_handlers_with_operations, +) from ..rest_config import APP_OPENAPI_SPECS_KEY from . import handlers @@ -22,10 +24,9 @@ module_name = __name__.replace(".__init__", "") -@app_module_setup(module_name, ModuleCategory.ADDON, - depends=[], - logger=logger) -def setup(app: web.Application,* , disable_login=False): + +@app_module_setup(module_name, ModuleCategory.ADDON, depends=[], logger=logger) +def setup(app: web.Application, *, disable_login=False): """ Sets up director's subsystem :param app: main application @@ -43,28 +44,25 @@ def setup(app: web.Application,* , disable_login=False): def include_path(tup_object): _method, path, _operation_id, _tags = tup_object - return any( tail in path for tail in ['/running_interactive_services', '/services'] ) + return any( + tail in path for tail in ["/running_interactive_services", "/services"] + ) - handlers_dict = { - 'services_get': handlers.services_get - } + handlers_dict = {"services_get": handlers.services_get} # Disables login_required decorator for testing purposes if disable_login: for name, hnds in handlers_dict.items(): - if hasattr(hnds, '__wrapped__'): + if hasattr(hnds, "__wrapped__"): handlers_dict[name] = hnds.__wrapped__ routes = map_handlers_with_operations( - handlers_dict, - filter(include_path, iter_path_operations(specs)), - strict=True + handlers_dict, filter(include_path, iter_path_operations(specs)), strict=True ) app.router.add_routes(routes) + # alias setup_director = setup -__all__ = ( - 'setup_director' -) +__all__ = "setup_director" diff --git a/services/web/server/src/simcore_service_webserver/director/config.py b/services/web/server/src/simcore_service_webserver/director/config.py index b039987eb21..5b7b46af26d 100644 --- a/services/web/server/src/simcore_service_webserver/director/config.py +++ b/services/web/server/src/simcore_service_webserver/director/config.py @@ -12,23 +12,30 @@ APP_DIRECTOR_API_KEY = __name__ + ".director_api" -CONFIG_SECTION_NAME = 'director' +CONFIG_SECTION_NAME = "director" + +schema = T.Dict( + { + T.Key("enabled", default=True, optional=True): T.Bool(), + T.Key("host", default="director",): T.String(), + T.Key("port", default=8001): T.Int(), + T.Key("version", default="v0"): T.Regexp( + regexp=r"^v\d+" + ), # storage API version basepath + } +) -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Bool(), - T.Key("host", default="director", ): T.String(), - T.Key("port", default=8001): T.Int(), - T.Key("version", default="v0"): T.Regexp(regexp=r'^v\d+') # storage API version basepath -}) def build_api_url(config: Dict) -> URL: - api_baseurl = URL.build(scheme='http', - host=config['host'], - port=config['port']).with_path(config["version"]) + api_baseurl = URL.build( + scheme="http", host=config["host"], port=config["port"] + ).with_path(config["version"]) return api_baseurl + def get_config(app: web.Application) -> Dict: return app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] + def get_client_session(app: web.Application) -> ClientSession: return app[APP_CLIENT_SESSION_KEY] diff --git a/services/web/server/src/simcore_service_webserver/director/director_api.py b/services/web/server/src/simcore_service_webserver/director/director_api.py index 01401a48309..e1c4035fe1b 100644 --- a/services/web/server/src/simcore_service_webserver/director/director_api.py +++ b/services/web/server/src/simcore_service_webserver/director/director_api.py @@ -1,3 +1,5 @@ + # pylint: disable=too-many-arguments + import asyncio import logging import urllib @@ -20,15 +22,18 @@ def _get_director_client(app: web.Application) -> URL: # Use director. # TODO: this is also in app[APP_DIRECTOR_API_KEY] upon startup api_endpoint = URL.build( - scheme='http', - host=cfg['host'], - port=cfg['port']).with_path(cfg["version"]) + scheme="http", host=cfg["host"], port=cfg["port"] + ).with_path(cfg["version"]) session = get_client_session(app) return session, api_endpoint -async def get_running_interactive_services(app: web.Application, user_id: Optional[str] = None, project_id: Optional[str] = None) -> List[Dict]: +async def get_running_interactive_services( + app: web.Application, + user_id: Optional[str] = None, + project_id: Optional[str] = None, +) -> List[Dict]: session, api_endpoint = _get_director_client(app) params = {} @@ -45,7 +50,14 @@ async def get_running_interactive_services(app: web.Application, user_id: Option return [] -async def start_service(app: web.Application, user_id: str, project_id: str, service_key: str, service_version: str, service_uuid: str) -> Optional[Dict]: # pylint: disable=too-many-arguments +async def start_service( + app: web.Application, + user_id: str, + project_id: str, + service_key: str, + service_version: str, + service_uuid: str, +) -> Optional[Dict]: session, api_endpoint = _get_director_client(app) params = { @@ -54,7 +66,7 @@ async def start_service(app: web.Application, user_id: str, project_id: str, ser "service_key": service_key, "service_tag": service_version, "service_uuid": service_uuid, - "service_basepath": f"/x/{service_uuid}" + "service_basepath": f"/x/{service_uuid}", } url = (api_endpoint / "running_interactive_services").with_query(params) @@ -66,7 +78,7 @@ async def start_service(app: web.Application, user_id: str, project_id: str, ser async def stop_service(app: web.Application, service_uuid: str) -> None: session, api_endpoint = _get_director_client(app) - url = (api_endpoint / "running_interactive_services" / service_uuid) + url = api_endpoint / "running_interactive_services" / service_uuid async with session.delete(url, ssl=False) as resp: if resp.status == 404: raise director_exceptions.ServiceNotFoundError(service_uuid) @@ -74,19 +86,33 @@ async def stop_service(app: web.Application, service_uuid: str) -> None: payload = await resp.json() raise director_exceptions.DirectorException(payload) -async def stop_services(app: web.Application, user_id: Optional[str] = None, project_id: Optional[str] = None) -> None: + +async def stop_services( + app: web.Application, + user_id: Optional[str] = None, + project_id: Optional[str] = None, +) -> None: if not user_id and not project_id: raise ValueError("Expected either user or project") - services = await get_running_interactive_services(app, user_id=user_id, project_id=project_id) + services = await get_running_interactive_services( + app, user_id=user_id, project_id=project_id + ) stop_tasks = [stop_service(app, service_uuid) for service_uuid in services] await asyncio.gather(*stop_tasks) -async def get_service_by_key_version(app: web.Application, service_key: str, service_version: str) -> Optional[Dict]: + +async def get_service_by_key_version( + app: web.Application, service_key: str, service_version: str +) -> Optional[Dict]: session, api_endpoint = _get_director_client(app) - url = (api_endpoint / "services" / - urllib.parse.quote(service_key, safe='') / service_version) + url = ( + api_endpoint + / "services" + / urllib.parse.quote(service_key, safe="") + / service_version + ) async with session.get(url) as resp: if resp.status != 200: return diff --git a/services/web/server/src/simcore_service_webserver/director/director_exceptions.py b/services/web/server/src/simcore_service_webserver/director/director_exceptions.py index 9e1fe3a570c..d90e6839b09 100644 --- a/services/web/server/src/simcore_service_webserver/director/director_exceptions.py +++ b/services/web/server/src/simcore_service_webserver/director/director_exceptions.py @@ -1,12 +1,15 @@ class DirectorException(Exception): """Basic exception for errors raised with director""" + def __init__(self, msg=None): if msg is None: msg = "Unexpected error occured in director subpackage" super(DirectorException, self).__init__(msg) + class ServiceNotFoundError(DirectorException): """Service was not found in swarm""" + def __init__(self, service_uuid): msg = "Service with uuid {} not found".format(service_uuid) super(ServiceNotFoundError, self).__init__(msg) diff --git a/services/web/server/src/simcore_service_webserver/director/handlers.py b/services/web/server/src/simcore_service_webserver/director/handlers.py index 2fcf56a371d..0eb2263f35e 100644 --- a/services/web/server/src/simcore_service_webserver/director/handlers.py +++ b/services/web/server/src/simcore_service_webserver/director/handlers.py @@ -23,20 +23,22 @@ def _forward_url(app: web.Application, url: URL) -> URL: # director service API endpoint # TODO: service API endpoint could be deduced and checked upon setup (e.g. health check on startup) - endpoint = URL.build( - scheme='http', - host=cfg['host'], - port=cfg['port']).with_path(cfg["version"]) + endpoint = URL.build(scheme="http", host=cfg["host"], port=cfg["port"]).with_path( + cfg["version"] + ) tail = "/".join(url.raw_parts[2:]) - url = (endpoint / tail) + url = endpoint / tail return url + def _resolve_url(request: web.Request) -> URL: return _forward_url(request.app, request.url) + # HANDLERS ------------------------------------------------------------------- + @login_required async def services_get(request: web.Request) -> web.Response: await check_permission(request, "services.catalog.*") diff --git a/services/web/server/src/simcore_service_webserver/email.py b/services/web/server/src/simcore_service_webserver/email.py index b05495e32d2..b87f05b8d06 100644 --- a/services/web/server/src/simcore_service_webserver/email.py +++ b/services/web/server/src/simcore_service_webserver/email.py @@ -5,7 +5,8 @@ import logging import aiohttp_jinja2 -#import jinja2 TODO: check + +# import jinja2 TODO: check import jinja_app_loader from aiohttp import web @@ -15,32 +16,32 @@ from .resources import resources # TODO: move login/utils.py email functionality here! -#from email.mime.text import MIMEText -#import aiosmtplib +# from email.mime.text import MIMEText +# import aiosmtplib log = logging.getLogger(__name__) -@app_module_setup(__name__, ModuleCategory.ADDON, - config_section=CONFIG_SECTION_NAME, - logger=log) -def setup(app: web.Application, debug: bool=False): - tmpl_dir = resources.get_path('templates') + +@app_module_setup( + __name__, ModuleCategory.ADDON, config_section=CONFIG_SECTION_NAME, logger=log +) +def setup(app: web.Application, debug: bool = False): + tmpl_dir = resources.get_path("templates") if not tmpl_dir.exists(): log.error("Cannot find email templates in '%s'", tmpl_dir) return False env = aiohttp_jinja2.setup( app, - loader=jinja_app_loader.Loader(), #jinja2.FileSystemLoader(tmpl_dir) - auto_reload=debug + loader=jinja_app_loader.Loader(), # jinja2.FileSystemLoader(tmpl_dir) + auto_reload=debug, ) return env + # alias setup_email = setup -__all__ = ( - 'setup_email' -) +__all__ = "setup_email" diff --git a/services/web/server/src/simcore_service_webserver/email_config.py b/services/web/server/src/simcore_service_webserver/email_config.py index c7e83d6ec1c..d8253554461 100644 --- a/services/web/server/src/simcore_service_webserver/email_config.py +++ b/services/web/server/src/simcore_service_webserver/email_config.py @@ -6,15 +6,18 @@ import trafaret as T -CONFIG_SECTION_NAME = 'smtp' +CONFIG_SECTION_NAME = "smtp" -schema = T.Dict({ - T.Key('sender', default='OSPARC support '): T.String(), # FIXME: email format - 'host': T.String(), - 'port': T.Int(), - T.Key('tls', default=False): T.Or(T.Bool(), T.Int), - T.Key('username', default=None): T.Or(T.String, T.Null), - T.Key('password', default=None): T.Or(T.String, T.Null) +schema = T.Dict( + { + T.Key( + "sender", default="OSPARC support " + ): T.String(), # FIXME: email format + "host": T.String(), + "port": T.Int(), + T.Key("tls", default=False): T.Or(T.Bool(), T.Int), + T.Key("username", default=None): T.Or(T.String, T.Null), + T.Key("password", default=None): T.Or(T.String, T.Null), } ) diff --git a/services/web/server/src/simcore_service_webserver/login/__init__.py b/services/web/server/src/simcore_service_webserver/login/__init__.py index 2f6ea1cfd4c..12ee926619c 100644 --- a/services/web/server/src/simcore_service_webserver/login/__init__.py +++ b/services/web/server/src/simcore_service_webserver/login/__init__.py @@ -33,17 +33,14 @@ def _create_login_config(app: web.Application, storage: AsyncpgStorage) -> Dict: """ Creates compatible config to update login.cfg.cfg object """ - login_cfg = app[APP_CONFIG_KEY].get(CONFIG_SECTION_NAME, {}) # optional! + login_cfg = app[APP_CONFIG_KEY].get(CONFIG_SECTION_NAME, {}) # optional! smtp_cfg = app[APP_CONFIG_KEY][SMTP_SECTION] - config = { - "APP": app, - "STORAGE": storage - } + config = {"APP": app, "STORAGE": storage} def _fmt(val): if isinstance(val, str): - if val.strip().lower() in ['null', 'none', '']: + if val.strip().lower() in ["null", "none", ""]: return None return val @@ -55,6 +52,7 @@ def _fmt(val): return config + async def _setup_config_and_pgpool(app: web.Application): """ - gets input configs from different subsystems and initializes cfg (internal configuration) @@ -63,30 +61,33 @@ async def _setup_config_and_pgpool(app: web.Application): :param app: fully setup application on startup :type app: web.Application """ - db_cfg = app[APP_CONFIG_KEY][DB_SECTION]['postgres'] + db_cfg = app[APP_CONFIG_KEY][DB_SECTION]["postgres"] # db pool = await asyncpg.create_pool( dsn=DSN.format(**db_cfg) + f"?application_name={__name__}_{id(app)}", - min_size=db_cfg['minsize'], - max_size=db_cfg['maxsize'], - loop=asyncio.get_event_loop()) + min_size=db_cfg["minsize"], + max_size=db_cfg["maxsize"], + loop=asyncio.get_event_loop(), + ) - storage = AsyncpgStorage(pool) #NOTE: this key belongs to cfg, not settings! + storage = AsyncpgStorage(pool) # NOTE: this key belongs to cfg, not settings! # config config = _create_login_config(app, storage) cfg.configure(config) if INDEX_RESOURCE_NAME in app.router: - cfg['LOGIN_REDIRECT'] = app.router[INDEX_RESOURCE_NAME].url_for() + cfg["LOGIN_REDIRECT"] = app.router[INDEX_RESOURCE_NAME].url_for() else: - log.warning("Unknown location for login page. Defaulting redirection to %s", - cfg['LOGIN_REDIRECT'] ) + log.warning( + "Unknown location for login page. Defaulting redirection to %s", + cfg["LOGIN_REDIRECT"], + ) app[APP_LOGIN_CONFIG] = cfg - yield # ---------------- + yield # ---------------- if config["STORAGE"].pool is not pool: log.error("Somebody has changed the db pool") @@ -96,10 +97,12 @@ async def _setup_config_and_pgpool(app: web.Application): log.exception("Failed to close login storage loop") - -@app_module_setup(__name__, ModuleCategory.ADDON, - depends=[f'simcore_service_webserver.{mod}' for mod in ('rest', 'db') ], - logger=log) +@app_module_setup( + __name__, + ModuleCategory.ADDON, + depends=[f"simcore_service_webserver.{mod}" for mod in ("rest", "db")], + logger=log, +) def setup_login(app: web.Application): """ Setting up login subsystem in application @@ -114,6 +117,4 @@ def setup_login(app: web.Application): return True -__all__ = ( - 'setup_login' -) +__all__ = "setup_login" diff --git a/services/web/server/src/simcore_service_webserver/login/cfg.py b/services/web/server/src/simcore_service_webserver/login/cfg.py index dc81409acee..675611011cd 100644 --- a/services/web/server/src/simcore_service_webserver/login/cfg.py +++ b/services/web/server/src/simcore_service_webserver/login/cfg.py @@ -2,65 +2,66 @@ APP_LOGIN_CONFIG = __name__ + ".config" -_MINUTES = 1./24./60. +_MINUTES = 1.0 / 24.0 / 60.0 REQUIRED = object() DEFAULTS = { - 'THEME': 'templates/osparc.io', - 'COMMON_THEME': 'templates/common', - 'PASSWORD_LEN': (6, 30), - 'LOGIN_REDIRECT': '/', - 'LOGOUT_REDIRECT': '/', - 'REGISTRATION_CONFIRMATION_REQUIRED': True, - + "THEME": "templates/osparc.io", + "COMMON_THEME": "templates/common", + "PASSWORD_LEN": (6, 30), + "LOGIN_REDIRECT": "/", + "LOGOUT_REDIRECT": "/", + "REGISTRATION_CONFIRMATION_REQUIRED": True, # TODO: add in configuration file as environ! - 'SMTP_SENDER': None, - 'SMTP_HOST': REQUIRED, - 'SMTP_PORT': REQUIRED, - 'SMTP_TLS_ENABLED': False, - 'SMTP_USERNAME': None, - 'SMTP_PASSWORD': None, - + "SMTP_SENDER": None, + "SMTP_HOST": REQUIRED, + "SMTP_PORT": REQUIRED, + "SMTP_TLS_ENABLED": False, + "SMTP_USERNAME": None, + "SMTP_PASSWORD": None, # email confirmation links lifetime in days - 'REGISTRATION_CONFIRMATION_LIFETIME': 5, - 'INVITATION_CONFIRMATION_LIFETIME': 5, - 'RESET_PASSWORD_CONFIRMATION_LIFETIME': 20 * _MINUTES, - 'CHANGE_EMAIL_CONFIRMATION_LIFETIME': 5, - - 'MSG_LOGGED_IN': 'You are logged in', - 'MSG_LOGGED_OUT': 'You are logged out', - 'MSG_ACTIVATED': 'Your account is activated', - 'MSG_UNKNOWN_EMAIL': 'This email is not registered', - 'MSG_WRONG_PASSWORD': 'Wrong password', - 'MSG_PASSWORD_MISMATCH': 'Password and confirmation do not match', - 'MSG_USER_BANNED': 'This user is banned', - 'MSG_ACTIVATION_REQUIRED': ('You have to activate your account via' - ' email, before you can login'), - 'MSG_EMAIL_EXISTS': 'This email is already registered', - 'MSG_OFTEN_RESET_PASSWORD': ( - 'You can not request of restoring your password so often. Please, use' - ' the link we sent you recently'), - 'MSG_CANT_SEND_MAIL': 'Can\'t send email, try a little later', - 'MSG_PASSWORDS_NOT_MATCH': 'Passwords must match', - 'MSG_PASSWORD_CHANGED': 'Your password is changed', - 'MSG_CHANGE_EMAIL_REQUESTED': ('Please, click on the verification link' - ' we sent to your new email address'), - 'MSG_EMAIL_CHANGED': 'Your email is changed', - 'MSG_AUTH_FAILED': 'Authorization failed', - 'MSG_EMAIL_SENT': 'An email has been sent to {email} with further instructions', - + "REGISTRATION_CONFIRMATION_LIFETIME": 5, + "INVITATION_CONFIRMATION_LIFETIME": 5, + "RESET_PASSWORD_CONFIRMATION_LIFETIME": 20 * _MINUTES, + "CHANGE_EMAIL_CONFIRMATION_LIFETIME": 5, + "MSG_LOGGED_IN": "You are logged in", + "MSG_LOGGED_OUT": "You are logged out", + "MSG_ACTIVATED": "Your account is activated", + "MSG_UNKNOWN_EMAIL": "This email is not registered", + "MSG_WRONG_PASSWORD": "Wrong password", + "MSG_PASSWORD_MISMATCH": "Password and confirmation do not match", + "MSG_USER_BANNED": "This user is banned", + "MSG_ACTIVATION_REQUIRED": ( + "You have to activate your account via" " email, before you can login" + ), + "MSG_EMAIL_EXISTS": "This email is already registered", + "MSG_OFTEN_RESET_PASSWORD": ( + "You can not request of restoring your password so often. Please, use" + " the link we sent you recently" + ), + "MSG_CANT_SEND_MAIL": "Can't send email, try a little later", + "MSG_PASSWORDS_NOT_MATCH": "Passwords must match", + "MSG_PASSWORD_CHANGED": "Your password is changed", + "MSG_CHANGE_EMAIL_REQUESTED": ( + "Please, click on the verification link" " we sent to your new email address" + ), + "MSG_EMAIL_CHANGED": "Your email is changed", + "MSG_AUTH_FAILED": "Authorization failed", + "MSG_EMAIL_SENT": "An email has been sent to {email} with further instructions", # next settings are initialized during `setup()`, do not set it manually - 'APP': REQUIRED, - 'STORAGE': REQUIRED, + "APP": REQUIRED, + "STORAGE": REQUIRED, } -assert 'STORAGE' in DEFAULTS.keys() # nosec +assert "STORAGE" in DEFAULTS.keys() # nosec + def get_storage(app: web.Application): - return app[APP_LOGIN_CONFIG]['STORAGE'] + return app[APP_LOGIN_CONFIG]["STORAGE"] + class Cfg(dict): - ''' + """ Settings storage witch suports both, dict and dot notations >>> cfg = Cfg({'foo': 1, 'bar': 2, 'baz': REQUIRED}) @@ -103,7 +104,8 @@ class Cfg(dict): Traceback (most recent call last): ... AttributeError - ''' + """ + def __init__(self, defaults): super().__init__(self) self.defaults = defaults @@ -112,12 +114,12 @@ def __init__(self, defaults): # pylint: disable=E0202 def __getitem__(self, name): if not self.configured: - raise RuntimeError('Settings are not configured yet') + raise RuntimeError("Settings are not configured yet") self.__getitem__ = super().__getitem__ return super().__getitem__(name) def __getattr__(self, name): - if name == '__wrapped__': + if name == "__wrapped__": raise AttributeError try: return self[name] @@ -129,13 +131,14 @@ def configure(self, updates): for key in self.defaults: value = updates.get(key, self.defaults[key]) if value == REQUIRED: - raise RuntimeError('You have to set `{}`'.format(key)) + raise RuntimeError("You have to set `{}`".format(key)) self[key] = value self.configured = True -if __name__ == '__main__': +if __name__ == "__main__": import doctest + print(doctest.testmod()) else: cfg = Cfg(DEFAULTS) diff --git a/services/web/server/src/simcore_service_webserver/login/config.py b/services/web/server/src/simcore_service_webserver/login/config.py index d3cfeb6a178..cb954d57357 100644 --- a/services/web/server/src/simcore_service_webserver/login/config.py +++ b/services/web/server/src/simcore_service_webserver/login/config.py @@ -7,17 +7,26 @@ from .cfg import DEFAULTS -CONFIG_SECTION_NAME = 'login' +CONFIG_SECTION_NAME = "login" # TODO: merge with cfg.py -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Bool(), - T.Key("registration_confirmation_required", default=DEFAULTS["REGISTRATION_CONFIRMATION_REQUIRED"], optional=True): T.Or(T.Bool, T.Int), - T.Key("registration_invitation_required", default=False, optional=True): T.Or(T.Bool, T.Int), -}) +schema = T.Dict( + { + T.Key("enabled", default=True, optional=True): T.Bool(), + T.Key( + "registration_confirmation_required", + default=DEFAULTS["REGISTRATION_CONFIRMATION_REQUIRED"], + optional=True, + ): T.Or(T.Bool, T.Int), + T.Key("registration_invitation_required", default=False, optional=True): T.Or( + T.Bool, T.Int + ), + } +) def get_login_config(app): from servicelib.application_keys import APP_CONFIG_KEY + cfg = app[APP_CONFIG_KEY].get(CONFIG_SECTION_NAME, dict()) return cfg diff --git a/services/web/server/src/simcore_service_webserver/login/confirmation.py b/services/web/server/src/simcore_service_webserver/login/confirmation.py index 8b045a3654f..35dd9f4216c 100644 --- a/services/web/server/src/simcore_service_webserver/login/confirmation.py +++ b/services/web/server/src/simcore_service_webserver/login/confirmation.py @@ -13,30 +13,34 @@ log = logging.getLogger(__name__) + async def validate_confirmation_code(code, db): - confirmation = await db.get_confirmation({'code': code}) + confirmation = await db.get_confirmation({"code": code}) if confirmation and is_confirmation_expired(confirmation): - log.info("Confirmation code '%s' %s. Deleting ...", code, - "consumed" if confirmation else "expired") + log.info( + "Confirmation code '%s' %s. Deleting ...", + code, + "consumed" if confirmation else "expired", + ) await db.delete_confirmation(confirmation) confirmation = None return confirmation async def make_confirmation_link(request, confirmation): - link = request.app.router['auth_confirmation'].url_for(code=confirmation['code']) - return '{}://{}{}'.format(request.scheme, request.host, link) + link = request.app.router["auth_confirmation"].url_for(code=confirmation["code"]) + return "{}://{}{}".format(request.scheme, request.host, link) def get_expiration_date(confirmation): lifetime = get_confirmation_lifetime(confirmation) - estimated_expiration = confirmation['created_at'] + lifetime + estimated_expiration = confirmation["created_at"] + lifetime return estimated_expiration async def is_confirmation_allowed(user, action): db = cfg.STORAGE - confirmation = await db.get_confirmation({'user': user, 'action': action}) + confirmation = await db.get_confirmation({"user": user, "action": action}) if not confirmation: return True if is_confirmation_expired(confirmation): @@ -45,18 +49,17 @@ async def is_confirmation_allowed(user, action): def is_confirmation_expired(confirmation): - age = datetime.utcnow() - confirmation['created_at'] + age = datetime.utcnow() - confirmation["created_at"] lifetime = get_confirmation_lifetime(confirmation) return age > lifetime def get_confirmation_lifetime(confirmation): - lifetime_days = cfg['{}_CONFIRMATION_LIFETIME'.format( - confirmation['action'].upper())] + lifetime_days = cfg[ + "{}_CONFIRMATION_LIFETIME".format(confirmation["action"].upper()) + ] lifetime = timedelta(days=lifetime_days) return lifetime -__all__ = ( - "ConfirmationAction", -) +__all__ = ("ConfirmationAction",) diff --git a/services/web/server/src/simcore_service_webserver/login/decorators.py b/services/web/server/src/simcore_service_webserver/login/decorators.py index 6cf7aca1ac8..e6d580449a0 100644 --- a/services/web/server/src/simcore_service_webserver/login/decorators.py +++ b/services/web/server/src/simcore_service_webserver/login/decorators.py @@ -13,11 +13,13 @@ def user_to_request(handler): """ Handler decorator that injects in request, current authorized user ID """ + @wraps(handler) async def wrapped(*args, **kwargs): request = get_request(*args, **kwargs) request[RQT_USERID_KEY] = await authorized_userid(request) return await handler(*args) + return wrapped @@ -29,6 +31,7 @@ def login_required(handler): Keeps userid in request[RQT_USERID_KEY] """ + @wraps(handler) async def wrapped(*args, **kwargs): request = get_request(*args, **kwargs) @@ -39,9 +42,8 @@ async def wrapped(*args, **kwargs): request[RQT_USERID_KEY] = userid ret = await handler(*args, **kwargs) return ret + return wrapped -__all__ = ( - "login_required" -) +__all__ = "login_required" diff --git a/services/web/server/src/simcore_service_webserver/login/handlers.py b/services/web/server/src/simcore_service_webserver/login/handlers.py index 7dde6370531..a152ec0343e 100644 --- a/services/web/server/src/simcore_service_webserver/login/handlers.py +++ b/services/web/server/src/simcore_service_webserver/login/handlers.py @@ -10,14 +10,22 @@ from ..security_api import check_password, encrypt_password, forget, remember from .cfg import APP_LOGIN_CONFIG, cfg, get_storage from .config import get_login_config -from .confirmation import (is_confirmation_allowed, make_confirmation_link, - validate_confirmation_code) +from .confirmation import ( + is_confirmation_allowed, + make_confirmation_link, + validate_confirmation_code, +) from .decorators import RQT_USERID_KEY, login_required from .registration import check_invitation, check_registration -from .utils import (common_themed, flash_response, get_client_ip, - render_and_send_mail, themed) +from .utils import ( + common_themed, + flash_response, + get_client_ip, + render_and_send_mail, + themed, +) - # FIXME: do not use cfg singleton. use instead cfg = request.app[APP_LOGIN_CONFIG] +# FIXME: do not use cfg singleton. use instead cfg = request.app[APP_LOGIN_CONFIG] log = logging.getLogger(__name__) @@ -28,43 +36,49 @@ def to_names(enum_cls, names): return [getattr(enum_cls, att).name for att in names.split()] -CONFIRMATION_PENDING, ACTIVE, BANNED = to_names(UserStatus, \ - 'CONFIRMATION_PENDING ACTIVE BANNED') +CONFIRMATION_PENDING, ACTIVE, BANNED = to_names( + UserStatus, "CONFIRMATION_PENDING ACTIVE BANNED" +) -ANONYMOUS, GUEST, USER, TESTER= to_names(UserRole, \ - 'ANONYMOUS GUEST USER TESTER') +ANONYMOUS, GUEST, USER, TESTER = to_names(UserRole, "ANONYMOUS GUEST USER TESTER") -REGISTRATION, RESET_PASSWORD, CHANGE_EMAIL = to_names(ConfirmationAction, \ - 'REGISTRATION RESET_PASSWORD CHANGE_EMAIL') +REGISTRATION, RESET_PASSWORD, CHANGE_EMAIL = to_names( + ConfirmationAction, "REGISTRATION RESET_PASSWORD CHANGE_EMAIL" +) async def register(request: web.Request): _, _, body = await extract_and_validate(request) # see https://aiohttp.readthedocs.io/en/stable/web_advanced.html#data-sharing-aka-no-singletons-please - app_cfg = get_login_config(request.app) # TODO: replace cfg by app_cfg + app_cfg = get_login_config(request.app) # TODO: replace cfg by app_cfg db = get_storage(request.app) email = body.email - username = email.split('@')[0] # FIXME: this has to be unique and add this in user registration! + username = email.split("@")[ + 0 + ] # FIXME: this has to be unique and add this in user registration! password = body.password - confirm = body.confirm if hasattr(body, 'confirm') else None + confirm = body.confirm if hasattr(body, "confirm") else None if app_cfg.get("registration_invitation_required"): - invitation = body.invitation if hasattr(body, 'invitation') else None + invitation = body.invitation if hasattr(body, "invitation") else None await check_invitation(invitation, db) await check_registration(email, password, confirm, db) - user = await db.create_user({ - 'name': username, - 'email': email, - 'password_hash': encrypt_password(password), - 'status': CONFIRMATION_PENDING if bool(cfg.REGISTRATION_CONFIRMATION_REQUIRED) - else ACTIVE, - 'role': USER, - 'created_ip': get_client_ip(request), # FIXME: does not get right IP! - }) + user = await db.create_user( + { + "name": username, + "email": email, + "password_hash": encrypt_password(password), + "status": CONFIRMATION_PENDING + if bool(cfg.REGISTRATION_CONFIRMATION_REQUIRED) + else ACTIVE, + "role": USER, + "created_ip": get_client_ip(request), # FIXME: does not get right IP! + } + ) if not bool(cfg.REGISTRATION_CONFIRMATION_REQUIRED): # user is logged in @@ -77,24 +91,27 @@ async def register(request: web.Request): link = await make_confirmation_link(request, confirmation_) try: await render_and_send_mail( - request, email, - themed('registration_email.html'), { - 'auth': { - 'cfg': cfg, - }, - 'host': request.host, - 'link': link, - 'name': email.split("@")[0], - }) - except Exception: #pylint: disable=broad-except - log.exception('Can not send email') + request, + email, + themed("registration_email.html"), + { + "auth": {"cfg": cfg,}, + "host": request.host, + "link": link, + "name": email.split("@")[0], + }, + ) + except Exception: # pylint: disable=broad-except + log.exception("Can not send email") await db.delete_confirmation(confirmation_) await db.delete_user(user) raise web.HTTPServiceUnavailable(reason=cfg.MSG_CANT_SEND_MAIL) response = flash_response( "You are registered successfully! To activate your account, please, " - "click on the verification link in the email we sent you.", "INFO") + "click on the verification link in the email we sent you.", + "INFO", + ) return response @@ -105,28 +122,32 @@ async def login(request: web.Request): email = body.email password = body.password - user = await db.get_user({'email': email}) + user = await db.get_user({"email": email}) if not user: - raise web.HTTPUnauthorized(reason=cfg.MSG_UNKNOWN_EMAIL, - content_type='application/json') + raise web.HTTPUnauthorized( + reason=cfg.MSG_UNKNOWN_EMAIL, content_type="application/json" + ) - if user['status'] == BANNED or user['role'] == ANONYMOUS: - raise web.HTTPUnauthorized(reason=cfg.MSG_USER_BANNED, - content_type='application/json') + if user["status"] == BANNED or user["role"] == ANONYMOUS: + raise web.HTTPUnauthorized( + reason=cfg.MSG_USER_BANNED, content_type="application/json" + ) - if not check_password(password, user['password_hash']): - raise web.HTTPUnauthorized(reason=cfg.MSG_WRONG_PASSWORD, - content_type='application/json') + if not check_password(password, user["password_hash"]): + raise web.HTTPUnauthorized( + reason=cfg.MSG_WRONG_PASSWORD, content_type="application/json" + ) - if user['status'] == CONFIRMATION_PENDING: - raise web.HTTPUnauthorized(reason=cfg.MSG_ACTIVATION_REQUIRED, - content_type='application/json') + if user["status"] == CONFIRMATION_PENDING: + raise web.HTTPUnauthorized( + reason=cfg.MSG_ACTIVATION_REQUIRED, content_type="application/json" + ) - assert user['status'] == ACTIVE, "db corrupted. Invalid status" # nosec - assert user['email'] == email, "db corrupted. Invalid email" # nosec + assert user["status"] == ACTIVE, "db corrupted. Invalid status" # nosec + assert user["email"] == email, "db corrupted. Invalid email" # nosec # user logs in - identity = user['email'] + identity = user["email"] response = flash_response(cfg.MSG_LOGGED_IN, "INFO") await remember(request, response, identity) return response @@ -163,39 +184,40 @@ async def reset_password(request: web.Request): db = get_storage(request.app) email = body.email - user = await db.get_user({'email': email}) + user = await db.get_user({"email": email}) try: if not user: - raise web.HTTPUnprocessableEntity(reason=cfg.MSG_UNKNOWN_EMAIL, - content_type='application/json') # 422 + raise web.HTTPUnprocessableEntity( + reason=cfg.MSG_UNKNOWN_EMAIL, content_type="application/json" + ) # 422 - if user['status'] == BANNED: - raise web.HTTPUnauthorized(reason=cfg.MSG_USER_BANNED, - content_type='application/json') # 401 + if user["status"] == BANNED: + raise web.HTTPUnauthorized( + reason=cfg.MSG_USER_BANNED, content_type="application/json" + ) # 401 - if user['status'] == CONFIRMATION_PENDING: - raise web.HTTPUnauthorized(reason=cfg.MSG_ACTIVATION_REQUIRED, - content_type='application/json') # 401 + if user["status"] == CONFIRMATION_PENDING: + raise web.HTTPUnauthorized( + reason=cfg.MSG_ACTIVATION_REQUIRED, content_type="application/json" + ) # 401 - assert user['status'] == ACTIVE # nosec - assert user['email'] == email # nosec + assert user["status"] == ACTIVE # nosec + assert user["email"] == email # nosec if not await is_confirmation_allowed(user, action=RESET_PASSWORD): - raise web.HTTPUnauthorized(reason=cfg.MSG_OFTEN_RESET_PASSWORD, - content_type='application/json') # 401 + raise web.HTTPUnauthorized( + reason=cfg.MSG_OFTEN_RESET_PASSWORD, content_type="application/json" + ) # 401 except web.HTTPError as err: # Email wiht be an explanation and suggest alternative approaches or ways to contact support for help try: await render_and_send_mail( - request, email, - common_themed('reset_password_email_failed.html'), { - 'auth': { - 'cfg': cfg, - }, - 'host': request.host, - 'reason': err.reason, - }) - except Exception: #pylint: disable=broad-except + request, + email, + common_themed("reset_password_email_failed.html"), + {"auth": {"cfg": cfg,}, "host": request.host, "reason": err.reason,}, + ) + except Exception: # pylint: disable=broad-except log.exception("Cannot send email") raise web.HTTPServiceUnavailable(reason=cfg.MSG_CANT_SEND_MAIL) else: @@ -204,16 +226,13 @@ async def reset_password(request: web.Request): try: # primary reset email with a URL and the normal instructions. await render_and_send_mail( - request, email, - common_themed('reset_password_email.html'), { - 'auth': { - 'cfg': cfg, - }, - 'host': request.host, - 'link': link, - }) - except Exception: #pylint: disable=broad-except - log.exception('Can not send email') + request, + email, + common_themed("reset_password_email.html"), + {"auth": {"cfg": cfg,}, "host": request.host, "link": link,}, + ) + except Exception: # pylint: disable=broad-except + log.exception("Can not send email") await db.delete_confirmation(confirmation) raise web.HTTPServiceUnavailable(reason=cfg.MSG_CANT_SEND_MAIL) @@ -228,21 +247,18 @@ async def change_email(request: web.Request): db = get_storage(request.app) email = body.email - user = await db.get_user({'id': request[RQT_USERID_KEY]}) - assert user # nosec + user = await db.get_user({"id": request[RQT_USERID_KEY]}) + assert user # nosec - if user['email'] == email: + if user["email"] == email: return flash_response("Email changed") - other = await db.get_user({'email': email}) + other = await db.get_user({"email": email}) if other: raise web.HTTPUnprocessableEntity(reason="This email cannot be used") # Reset if previously requested - confirmation = await db.get_confirmation({ - 'user': user, - 'action': CHANGE_EMAIL} - ) + confirmation = await db.get_confirmation({"user": user, "action": CHANGE_EMAIL}) if confirmation: await db.delete_confirmation(confirmation) @@ -251,16 +267,13 @@ async def change_email(request: web.Request): link = await make_confirmation_link(request, confirmation) try: await render_and_send_mail( - request, email, - common_themed('change_email_email.html'), { - 'auth': { - 'cfg': cfg, - }, - 'host': request.host, - 'link': link, - }) - except Exception: #pylint: disable=broad-except - log.error('Can not send email') + request, + email, + common_themed("change_email_email.html"), + {"auth": {"cfg": cfg,}, "host": request.host, "link": link,}, + ) + except Exception: # pylint: disable=broad-except + log.error("Can not send email") await db.delete_confirmation(confirmation) raise web.HTTPServiceUnavailable(reason=cfg.MSG_CANT_SEND_MAIL) @@ -272,8 +285,8 @@ async def change_email(request: web.Request): async def change_password(request: web.Request): db = get_storage(request.app) - user = await db.get_user({'id': request[RQT_USERID_KEY]}) - assert user # nosec + user = await db.get_user({"id": request[RQT_USERID_KEY]}) + assert user # nosec _, _, body = await extract_and_validate(request) @@ -281,15 +294,17 @@ async def change_password(request: web.Request): new_password = body.new confirm = body.confirm - if not check_password(cur_password, user['password_hash']): - raise web.HTTPUnprocessableEntity(reason=cfg.MSG_WRONG_PASSWORD, - content_type='application/json') # 422 + if not check_password(cur_password, user["password_hash"]): + raise web.HTTPUnprocessableEntity( + reason=cfg.MSG_WRONG_PASSWORD, content_type="application/json" + ) # 422 if new_password != confirm: - raise web.HTTPConflict(reason=cfg.MSG_PASSWORD_MISMATCH, - content_type='application/json') # 409 + raise web.HTTPConflict( + reason=cfg.MSG_PASSWORD_MISMATCH, content_type="application/json" + ) # 409 - await db.update_user(user, {'password_hash': encrypt_password(new_password)}) + await db.update_user(user, {"password_hash": encrypt_password(new_password)}) response = flash_response(cfg.MSG_PASSWORD_CHANGED) return response @@ -313,30 +328,30 @@ async def email_confirmation(request: web.Request): params, _, _ = await extract_and_validate(request) db = get_storage(request.app) - code = params['code'] + code = params["code"] confirmation = await validate_confirmation_code(code, db) if confirmation: - action = confirmation['action'] - redirect_url = URL(request.app[APP_LOGIN_CONFIG]['LOGIN_REDIRECT']) + action = confirmation["action"] + redirect_url = URL(request.app[APP_LOGIN_CONFIG]["LOGIN_REDIRECT"]) if action == REGISTRATION: - user = await db.get_user({'id': confirmation['user_id']}) - await db.update_user(user, {'status': ACTIVE}) + user = await db.get_user({"id": confirmation["user_id"]}) + await db.update_user(user, {"status": ACTIVE}) await db.delete_confirmation(confirmation) log.debug("User %s registered", user) redirect_url = redirect_url.with_fragment("?registered=true") elif action == CHANGE_EMAIL: - user = await db.get_user({'id': confirmation['user_id']}) - await db.update_user(user, {'email': confirmation['data']}) + user = await db.get_user({"id": confirmation["user_id"]}) + await db.update_user(user, {"email": confirmation["data"]}) await db.delete_confirmation(confirmation) log.debug("User %s changed email", user) elif action == RESET_PASSWORD: # NOTE: By using fragments (instead of queries or path parameters), the browser does NOT reloads page - redirect_url = redirect_url.with_fragment("reset-password?code=%s" % code ) + redirect_url = redirect_url.with_fragment("reset-password?code=%s" % code) log.debug("Reset password requested %s", confirmation) raise web.HTTPFound(location=redirect_url) @@ -349,27 +364,28 @@ async def reset_password_allowed(request: web.Request): params, _, body = await extract_and_validate(request) db = get_storage(request.app) - code = params['code'] + code = params["code"] password = body.password confirm = body.confirm if password != confirm: - raise web.HTTPConflict(reason=cfg.MSG_PASSWORD_MISMATCH, - content_type='application/json') # 409 + raise web.HTTPConflict( + reason=cfg.MSG_PASSWORD_MISMATCH, content_type="application/json" + ) # 409 confirmation = await validate_confirmation_code(code, db) if confirmation: - user = await db.get_user({'id': confirmation['user_id']}) - assert user # nosec + user = await db.get_user({"id": confirmation["user_id"]}) + assert user # nosec - await db.update_user(user, { - 'password_hash': encrypt_password(password) - }) + await db.update_user(user, {"password_hash": encrypt_password(password)}) await db.delete_confirmation(confirmation) response = flash_response(cfg.MSG_PASSWORD_CHANGED) return response - raise web.HTTPUnauthorized(reason="Cannot reset password. Invalid token or user", - content_type='application/json') # 401 + raise web.HTTPUnauthorized( + reason="Cannot reset password. Invalid token or user", + content_type="application/json", + ) # 401 diff --git a/services/web/server/src/simcore_service_webserver/login/registration.py b/services/web/server/src/simcore_service_webserver/login/registration.py index 28be2601e31..3c0558e5c8c 100644 --- a/services/web/server/src/simcore_service_webserver/login/registration.py +++ b/services/web/server/src/simcore_service_webserver/login/registration.py @@ -13,38 +13,45 @@ from ..db_models import UserStatus from .cfg import cfg -from .confirmation import (ConfirmationAction, get_expiration_date, - is_confirmation_expired, validate_confirmation_code) +from .confirmation import ( + ConfirmationAction, + get_expiration_date, + is_confirmation_expired, + validate_confirmation_code, +) from .storage import AsyncpgStorage log = logging.getLogger(__name__) -async def check_registration(email: str, password: str, confirm: str, db: AsyncpgStorage): +async def check_registration( + email: str, password: str, confirm: str, db: AsyncpgStorage +): # email : required & formats # password: required & secure[min length, ...] # If the email field is missing, return a 400 - HTTPBadRequest if email is None or password is None: - raise web.HTTPBadRequest(reason="Email and password required", - content_type='application/json') + raise web.HTTPBadRequest( + reason="Email and password required", content_type="application/json" + ) if confirm and password != confirm: - raise web.HTTPConflict(reason=cfg.MSG_PASSWORD_MISMATCH, - content_type='application/json') + raise web.HTTPConflict( + reason=cfg.MSG_PASSWORD_MISMATCH, content_type="application/json" + ) # TODO: If the email field isn’t a valid email, return a 422 - HTTPUnprocessableEntity # TODO: If the password field is too short, return a 422 - HTTPUnprocessableEntity # TODO: use passwordmeter to enforce good passwords, but first create helper in front-end - user = await db.get_user({'email': email}) + user = await db.get_user({"email": email}) if user: # Resets pending confirmation if re-registers? - if user['status'] == UserStatus.CONFIRMATION_PENDING.value: - _confirmation = await db.get_confirmation({ - 'user': user, - 'action': ConfirmationAction.REGISTRATION.value - }) + if user["status"] == UserStatus.CONFIRMATION_PENDING.value: + _confirmation = await db.get_confirmation( + {"user": user, "action": ConfirmationAction.REGISTRATION.value} + ) if is_confirmation_expired(_confirmation): await db.delete_confirmation(_confirmation) @@ -52,12 +59,14 @@ async def check_registration(email: str, password: str, confirm: str, db: Asyncp return # If the email is already taken, return a 409 - HTTPConflict - raise web.HTTPConflict(reason=cfg.MSG_EMAIL_EXISTS, - content_type='application/json') + raise web.HTTPConflict( + reason=cfg.MSG_EMAIL_EXISTS, content_type="application/json" + ) log.debug("Registration data validated") -async def create_invitation(host:Dict, guest:str, db:AsyncpgStorage): + +async def create_invitation(host: Dict, guest: str, db: AsyncpgStorage): """ Creates an invitation token for a guest to register in the platform Creates and injects an invitation token in the confirmation table associated @@ -70,46 +79,53 @@ async def create_invitation(host:Dict, guest:str, db:AsyncpgStorage): confirmation = await db.create_confirmation( user=host, action=ConfirmationAction.INVITATION.name, - data= json.dumps({ - "created_by": host['email'], - "guest": guest - }) + data=json.dumps({"created_by": host["email"], "guest": guest}), ) return confirmation -async def check_invitation(invitation:str, db:AsyncpgStorage): + +async def check_invitation(invitation: str, db: AsyncpgStorage): confirmation = None if invitation: confirmation = await validate_confirmation_code(invitation, db) if confirmation: - #FIXME: check if action=invitation?? - log.info("Invitation code used. Deleting %s", pformat(get_confirmation_info(confirmation))) + # FIXME: check if action=invitation?? + log.info( + "Invitation code used. Deleting %s", + pformat(get_confirmation_info(confirmation)), + ) await db.delete_confirmation(confirmation) else: - raise web.HTTPForbidden(reason=("Invalid invitation code." - "Your invitation was already used or might have expired." - "Please contact our support team to get a new one.") ) + raise web.HTTPForbidden( + reason=( + "Invalid invitation code." + "Your invitation was already used or might have expired." + "Please contact our support team to get a new one." + ) + ) + def get_confirmation_info(confirmation): info = dict(confirmation) # data column is a string try: - info['data'] = json.loads(confirmation['data']) + info["data"] = json.loads(confirmation["data"]) except json.decoder.JSONDecodeError: log.warning("Failed to load data from confirmation. Skipping 'data' field.") # extra info["expires"] = get_expiration_date(confirmation) - if confirmation['action']==ConfirmationAction.INVITATION.name: + if confirmation["action"] == ConfirmationAction.INVITATION.name: info["url"] = get_invitation_url(confirmation) return info -def get_invitation_url(confirmation, origin: URL=None) -> URL: - code = confirmation['code'] - is_invitation = confirmation['action'] == ConfirmationAction.INVITATION.name + +def get_invitation_url(confirmation, origin: URL = None) -> URL: + code = confirmation["code"] + is_invitation = confirmation["action"] == ConfirmationAction.INVITATION.name if origin is None or not is_invitation: origin = URL() diff --git a/services/web/server/src/simcore_service_webserver/login/routes.py b/services/web/server/src/simcore_service_webserver/login/routes.py index 0e84d44d94e..61b78959bd7 100644 --- a/services/web/server/src/simcore_service_webserver/login/routes.py +++ b/services/web/server/src/simcore_service_webserver/login/routes.py @@ -10,15 +10,13 @@ from aiohttp import web from servicelib import openapi -from servicelib.rest_routing import (iter_path_operations, - map_handlers_with_operations) +from servicelib.rest_routing import iter_path_operations, map_handlers_with_operations from . import handlers as login_handlers log = logging.getLogger(__name__) - def create(specs: openapi.Spec) -> List[web.RouteDef]: """ Creates routes mapping operators_id with handler functions @@ -36,23 +34,21 @@ def include_path(tuple_object): return path.startswith(base_path + "/auth/") handlers_map = { - 'auth_register': login_handlers.register, - 'auth_login': login_handlers.login, - 'auth_logout': login_handlers.logout, - 'auth_reset_password': login_handlers.reset_password, - 'auth_reset_password_allowed': login_handlers.reset_password_allowed, - 'auth_change_email': login_handlers.change_email, - 'auth_change_password': login_handlers.change_password, - 'auth_confirmation': login_handlers.email_confirmation, + "auth_register": login_handlers.register, + "auth_login": login_handlers.login, + "auth_logout": login_handlers.logout, + "auth_reset_password": login_handlers.reset_password, + "auth_reset_password_allowed": login_handlers.reset_password_allowed, + "auth_change_email": login_handlers.change_email, + "auth_change_password": login_handlers.change_password, + "auth_confirmation": login_handlers.email_confirmation, } routes = map_handlers_with_operations( - handlers_map, - filter(include_path, iter_path_operations(specs)), - strict=True + handlers_map, filter(include_path, iter_path_operations(specs)), strict=True ) - log.debug("Mapped auth routes: %s", "\n".join( [pformat(r) for r in routes]) ) + log.debug("Mapped auth routes: %s", "\n".join([pformat(r) for r in routes])) return routes @@ -60,6 +56,4 @@ def include_path(tuple_object): # alias create_routes = create -__all__ = ( - 'create_routes' -) +__all__ = "create_routes" diff --git a/services/web/server/src/simcore_service_webserver/login/settings.py b/services/web/server/src/simcore_service_webserver/login/settings.py index 115a9dd7e0c..cd092905e45 100644 --- a/services/web/server/src/simcore_service_webserver/login/settings.py +++ b/services/web/server/src/simcore_service_webserver/login/settings.py @@ -2,7 +2,7 @@ APP_LOGIN_CONFIG = __name__ + ".config" -CFG_LOGIN_STORAGE = "STORAGE" # Needs to match login.cfg!!! +CFG_LOGIN_STORAGE = "STORAGE" # Needs to match login.cfg!!! def get_storage(app: web.Application): diff --git a/services/web/server/src/simcore_service_webserver/login/sql.py b/services/web/server/src/simcore_service_webserver/login/sql.py index 2cf03b1918f..5876bfa6dd4 100644 --- a/services/web/server/src/simcore_service_webserver/login/sql.py +++ b/services/web/server/src/simcore_service_webserver/login/sql.py @@ -2,7 +2,7 @@ log = getLogger(__name__) -LOG_TPL = '%s <--%s' +LOG_TPL = "%s <--%s" def find_one(conn, table, filter_, fields=None): @@ -12,27 +12,27 @@ def find_one(conn, table, filter_, fields=None): def find_one_sql(table, filter_, fields=None): - ''' + """ >>> find_one_sql('tbl', {'foo': 10, 'bar': 'baz'}) ('SELECT * FROM tbl WHERE bar=$1 AND foo=$2', ['baz', 10]) >>> find_one_sql('tbl', {'id': 10}, fields=['foo', 'bar']) ('SELECT foo, bar FROM tbl WHERE id=$1', [10]) - ''' + """ keys, values = _split_dict(filter_) - fields = ', '.join(fields) if fields else '*' + fields = ", ".join(fields) if fields else "*" where = _pairs(keys) - sql = 'SELECT {} FROM {} WHERE {}'.format(fields, table, where) + sql = "SELECT {} FROM {} WHERE {}".format(fields, table, where) return sql, values -def insert(conn, table, data, returning='id'): +def insert(conn, table, data, returning="id"): sql, values = insert_sql(table, data, returning) log.debug(LOG_TPL, sql, values) return conn.fetchval(sql, *values) -def insert_sql(table, data, returning='id'): - ''' +def insert_sql(table, data, returning="id"): + """ >>> insert_sql('tbl', {'foo': 'bar', 'id': 1}) ('INSERT INTO tbl (foo, id) VALUES ($1, $2) RETURNING id', ['bar', 1]) @@ -41,13 +41,14 @@ def insert_sql(table, data, returning='id'): >>> insert_sql('tbl', {'foo': 'bar', 'id': 1}, returning='pk') ('INSERT INTO tbl (foo, id) VALUES ($1, $2) RETURNING pk', ['bar', 1]) - ''' + """ keys, values = _split_dict(data) - sql = 'INSERT INTO {} ({}) VALUES ({}){}'.format( + sql = "INSERT INTO {} ({}) VALUES ({}){}".format( table, - ', '.join(keys), - ', '.join(_placeholders(data)), - ' RETURNING {}'.format(returning) if returning else '') + ", ".join(keys), + ", ".join(_placeholders(data)), + " RETURNING {}".format(returning) if returning else "", + ) return sql, values @@ -58,16 +59,15 @@ def update(conn, table, filter_, updates): def update_sql(table, filter_, updates): - ''' + """ >>> update_sql('tbl', {'foo': 'a', 'bar': 1}, {'bar': 2, 'baz': 'b'}) ('UPDATE tbl SET bar=$1, baz=$2 WHERE bar=$3 AND foo=$4', [2, 'b', 1, 'a']) - ''' + """ where_keys, where_vals = _split_dict(filter_) up_keys, up_vals = _split_dict(updates) - changes = _pairs(up_keys, sep=', ') + changes = _pairs(up_keys, sep=", ") where = _pairs(where_keys, start=len(up_keys) + 1) - sql = 'UPDATE {} SET {} WHERE {}'.format( - table, changes, where) + sql = "UPDATE {} SET {} WHERE {}".format(table, changes, where) return sql, up_vals + where_vals @@ -78,41 +78,41 @@ def delete(conn, table, filter_): def delete_sql(table, filter_): - ''' + """ >>> delete_sql('tbl', {'foo': 10, 'bar': 'baz'}) ('DELETE FROM tbl WHERE bar=$1 AND foo=$2', ['baz', 10]) - ''' + """ keys, values = _split_dict(filter_) where = _pairs(keys) - sql = 'DELETE FROM {} WHERE {}'.format(table, where) + sql = "DELETE FROM {} WHERE {}".format(table, where) return sql, values -def _pairs(keys, *, start=1, sep=' AND '): - ''' +def _pairs(keys, *, start=1, sep=" AND "): + """ >>> _pairs(['foo', 'bar', 'baz'], sep=', ') 'foo=$1, bar=$2, baz=$3' >>> _pairs(['foo', 'bar', 'baz'], start=2) 'foo=$2 AND bar=$3 AND baz=$4' - ''' - return sep.join('{}=${}'.format(k, i) for i, k in enumerate(keys, start)) + """ + return sep.join("{}=${}".format(k, i) for i, k in enumerate(keys, start)) def _placeholders(variables): - '''Returns placeholders by number of variables + """Returns placeholders by number of variables >>> _placeholders(['foo', 'bar', 1]) ['$1', '$2', '$3'] - ''' - return ['${}'.format(i) for i, _ in enumerate(variables, 1)] + """ + return ["${}".format(i) for i, _ in enumerate(variables, 1)] def _split_dict(dic): - '''Split dict into sorted keys and values + """Split dict into sorted keys and values >>> _split_dict({'b': 2, 'a': 1}) (['a', 'b'], [1, 2]) - ''' + """ keys = sorted(dic.keys()) return keys, [dic[k] for k in keys] @@ -120,6 +120,4 @@ def _split_dict(dic): if __name__ == "__main__": import doctest - print(doctest.testmod( - optionflags=doctest.REPORT_ONLY_FIRST_FAILURE - )) + print(doctest.testmod(optionflags=doctest.REPORT_ONLY_FIRST_FAILURE)) diff --git a/services/web/server/src/simcore_service_webserver/login/storage.py b/services/web/server/src/simcore_service_webserver/login/storage.py index 58a680dcd3e..ca908ae205b 100644 --- a/services/web/server/src/simcore_service_webserver/login/storage.py +++ b/services/web/server/src/simcore_service_webserver/login/storage.py @@ -11,10 +11,11 @@ log = getLogger(__name__) + class AsyncpgStorage: - def __init__(self, pool, *, - user_table_name='users', - confirmation_table_name='confirmations'): + def __init__( + self, pool, *, user_table_name="users", confirmation_table_name="confirmations" + ): self.pool = pool self.user_tbl = user_table_name self.confirm_tbl = confirmation_table_name @@ -22,54 +23,49 @@ def __init__(self, pool, *, async def get_user(self, with_data) -> asyncpg.Record: # FIXME: these can throw!!!! async with self.pool.acquire() as conn: - data = await sql.find_one(conn, self.user_tbl, with_data) + data = await sql.find_one(conn, self.user_tbl, with_data) return data async def create_user(self, data) -> asyncpg.Record: - data.setdefault('created_at', datetime.utcnow()) + data.setdefault("created_at", datetime.utcnow()) async with self.pool.acquire() as conn: - data['id'] = await sql.insert(conn, self.user_tbl, data) + data["id"] = await sql.insert(conn, self.user_tbl, data) return data async def update_user(self, user, updates) -> asyncpg.Record: async with self.pool.acquire() as conn: - await sql.update(conn, self.user_tbl, {'id': user['id']}, updates) + await sql.update(conn, self.user_tbl, {"id": user["id"]}, updates) async def delete_user(self, user): async with self.pool.acquire() as conn: - await sql.delete(conn, self.user_tbl, {'id': user['id']}) + await sql.delete(conn, self.user_tbl, {"id": user["id"]}) async def create_confirmation(self, user, action, data=None) -> asyncpg.Record: async with self.pool.acquire() as conn: while True: code = get_random_string(30) - if not await sql.find_one(conn, self.confirm_tbl, - {'code': code}): + if not await sql.find_one(conn, self.confirm_tbl, {"code": code}): break confirmation = { - 'code': code, - 'user_id': user['id'], - 'action': action, - 'data': data, - 'created_at': datetime.utcnow(), + "code": code, + "user_id": user["id"], + "action": action, + "data": data, + "created_at": datetime.utcnow(), } await sql.insert(conn, self.confirm_tbl, confirmation, None) return confirmation async def get_confirmation(self, filter_dict) -> asyncpg.Record: - if 'user' in filter_dict: - filter_dict['user_id'] = filter_dict.pop('user')['id'] + if "user" in filter_dict: + filter_dict["user_id"] = filter_dict.pop("user")["id"] async with self.pool.acquire() as conn: confirmation = await sql.find_one(conn, self.confirm_tbl, filter_dict) return confirmation async def delete_confirmation(self, confirmation): async with self.pool.acquire() as conn: - await sql.delete(conn, self.confirm_tbl, - {'code': confirmation['code']}) - - - + await sql.delete(conn, self.confirm_tbl, {"code": confirmation["code"]}) # helpers ---------------------------- @@ -79,16 +75,19 @@ def _to_enum(data): # TODO: ensure columns names and types! User tables for that # See https://docs.sqlalchemy.org/en/latest/core/metadata.html if data: - for key, enumtype in ( ('status', UserStatus), - ('role', UserRole), - ('action', ConfirmationAction) ): + for key, enumtype in ( + ("status", UserStatus), + ("role", UserRole), + ("action", ConfirmationAction), + ): if key in data: data[key] = getattr(enumtype, data[key]) return data + def _to_name(data): if data: - for key in ('status', 'role', 'action'): + for key in ("status", "role", "action"): if key in data: if isinstance(data[key], enum.Enum): data[key] = data[key].name diff --git a/services/web/server/src/simcore_service_webserver/login/utils.py b/services/web/server/src/simcore_service_webserver/login/utils.py index 577f30198cb..cd6437f0c4d 100644 --- a/services/web/server/src/simcore_service_webserver/login/utils.py +++ b/services/web/server/src/simcore_service_webserver/login/utils.py @@ -1,51 +1,51 @@ import random -import string from email.mime.text import MIMEText from logging import getLogger from os.path import join from pprint import pformat +from typing import Mapping, Optional import aiosmtplib import attr import passlib.hash +from aiohttp import web from aiohttp_jinja2 import render_string +from passlib import pwd -from aiohttp import web from servicelib.rest_models import LogMessageType from ..resources import resources from .cfg import cfg # TODO: remove this singleton!!! -CHARS = string.ascii_uppercase + string.ascii_lowercase + string.digits log = getLogger(__name__) -def encrypt_password(password): - #TODO: add settings sha256_crypt.using(**settings).hash(secret) +def encrypt_password(password: str) -> str: + # TODO: add settings sha256_crypt.using(**settings).hash(secret) # see https://passlib.readthedocs.io/en/stable/lib/passlib.hash.sha256_crypt.html # return passlib.hash.sha256_crypt.using(rounds=1000).hash(password) -def check_password(password, password_hash): +def check_password(password: str, password_hash: str) -> bool: return passlib.hash.sha256_crypt.verify(password, password_hash) -def get_random_string(min_len, max_len=None): +def get_random_string(min_len: int, max_len: Optional[int] = None) -> str: max_len = max_len or min_len size = random.randint(min_len, max_len) - return ''.join(random.choice(CHARS) for x in range(size)) + return pwd.genword(entropy=52, length=size) -def get_client_ip(request): +def get_client_ip(request: web.Request) -> str: try: - ips = request.headers['X-Forwarded-For'] + ips = request.headers["X-Forwarded-For"] except KeyError: - ips = request.transport.get_extra_info('peername')[0] - return ips.split(',')[0] + ips = request.transport.get_extra_info("peername")[0] + return ips.split(",")[0] -async def send_mail(recipient, subject, body): +async def send_mail(recipient: str, subject: str, body: str) -> None: # TODO: move to email submodule smtp_args = dict( loop=cfg.APP.loop, @@ -55,10 +55,10 @@ async def send_mail(recipient, subject, body): ) log.debug("Sending email with smtp configuration: %s", pformat(smtp_args)) - msg = MIMEText(body, 'html') - msg['Subject'] = subject - msg['From'] = cfg.SMTP_SENDER - msg['To'] = recipient + msg = MIMEText(body, "html") + msg["Subject"] = subject + msg["From"] = cfg.SMTP_SENDER + msg["To"] = recipient if cfg.SMTP_PORT == 587: # NOTE: aiosmtplib does not handle port 587 correctly @@ -81,21 +81,25 @@ async def send_mail(recipient, subject, body): await smtp.login(cfg.SMTP_USERNAME, cfg.SMTP_PASSWORD) await smtp.send_message(msg) -async def render_and_send_mail(request, to, template, context=None): + +async def render_and_send_mail( + request: web.Request, to: str, template: str, context: Mapping +): page = render_string(str(template), request, context) - subject, body = page.split('\n', 1) + subject, body = page.split("\n", 1) await send_mail(to, subject.strip(), body) def themed(template): return resources.get_path(join(cfg.THEME, template)) + def common_themed(template): return resources.get_path(join(cfg.COMMON_THEME, template)) -def flash_response(msg: str, level: str="INFO"): - response = web.json_response(data={ - 'data': attr.asdict(LogMessageType(msg, level)), - 'error': None - }) + +def flash_response(msg: str, level: str = "INFO") -> web.Response: + response = web.json_response( + data={"data": attr.asdict(LogMessageType(msg, level)), "error": None} + ) return response diff --git a/services/web/server/src/simcore_service_webserver/projects/__init__.py b/services/web/server/src/simcore_service_webserver/projects/__init__.py index 9e8535add87..66cd4423026 100644 --- a/services/web/server/src/simcore_service_webserver/projects/__init__.py +++ b/services/web/server/src/simcore_service_webserver/projects/__init__.py @@ -12,14 +12,15 @@ from aiohttp import ClientSession, web from tenacity import before_sleep_log, retry, stop_after_attempt, wait_fixed -from servicelib.application_keys import (APP_CONFIG_KEY, - APP_JSONSCHEMA_SPECS_KEY) +from servicelib.application_keys import APP_CONFIG_KEY, APP_JSONSCHEMA_SPECS_KEY from servicelib.application_setup import ModuleCategory, app_module_setup from servicelib.client_session import get_client_session from servicelib.jsonschema_specs import create_jsonschema_specs -from servicelib.rest_routing import (get_handlers_from_namespace, - iter_path_operations, - map_handlers_with_operations) +from servicelib.rest_routing import ( + get_handlers_from_namespace, + iter_path_operations, + map_handlers_with_operations, +) from ..resources import resources from ..rest_config import APP_OPENAPI_SPECS_KEY @@ -41,12 +42,12 @@ def _create_routes(tag, handlers_module, specs, *, disable_login=False): # TODO: Remove 'disable_login' and use instead a mock.patch on the decorator! handlers = get_handlers_from_namespace(handlers_module) if disable_login: - handlers = { name: hnds.__wrapped__ for name, hnds in handlers.items() } + handlers = {name: hnds.__wrapped__ for name, hnds in handlers.items()} routes = map_handlers_with_operations( - handlers, - filter(lambda o: tag in o[3], iter_path_operations(specs)), - strict=True + handlers, + filter(lambda o: tag in o[3], iter_path_operations(specs)), + strict=True, ) if disable_login: @@ -55,10 +56,12 @@ def _create_routes(tag, handlers_module, specs, *, disable_login=False): return routes - -@app_module_setup(module_name, ModuleCategory.ADDON, - depends=[f'simcore_service_webserver.{mod}' for mod in ('rest', 'db') ], - logger=logger) +@app_module_setup( + module_name, + ModuleCategory.ADDON, + depends=[f"simcore_service_webserver.{mod}" for mod in ("rest", "db")], + logger=logger, +) def setup(app: web.Application, *, enable_fake_data=False) -> bool: """ @@ -105,6 +108,4 @@ def setup(app: web.Application, *, enable_fake_data=False) -> bool: # alias setup_projects = setup -__all__ = ( - 'setup_projects' -) +__all__ = "setup_projects" diff --git a/services/web/server/src/simcore_service_webserver/projects/config.py b/services/web/server/src/simcore_service_webserver/projects/config.py index c1f2a2bfd29..eb3e7d6cbad 100644 --- a/services/web/server/src/simcore_service_webserver/projects/config.py +++ b/services/web/server/src/simcore_service_webserver/projects/config.py @@ -7,6 +7,4 @@ CONFIG_SECTION_NAME = "projects" -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Bool() -}) +schema = T.Dict({T.Key("enabled", default=True, optional=True): T.Bool()}) diff --git a/services/web/server/src/simcore_service_webserver/projects/nodes_handlers.py b/services/web/server/src/simcore_service_webserver/projects/nodes_handlers.py index 194408536b8..32409184359 100644 --- a/services/web/server/src/simcore_service_webserver/projects/nodes_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/nodes_handlers.py @@ -13,6 +13,7 @@ log = logging.getLogger(__name__) + @login_required async def get_node_output_ui(request: web.Request): """ Returns a json description of the ui for presenting the output within the mainUi @@ -20,9 +21,7 @@ async def get_node_output_ui(request: web.Request): json payloads and responses for the api calls available at this endpoint """ - log.debug(request.match_info["nodeInstanceUUID"], - request.match_info["outputKey"] - ) + log.debug(request.match_info["nodeInstanceUUID"], request.match_info["outputKey"]) raise NotImplementedError() @@ -33,14 +32,16 @@ async def send_to_node_output_api(request: web.Request): protocol depends on the definition """ body = await request.body - log.debug(request.match_info["nodeInstanceUUID"], - request.match_info["outputKey"], - request.match_info["apiCall"], - body + log.debug( + request.match_info["nodeInstanceUUID"], + request.match_info["outputKey"], + request.match_info["apiCall"], + body, ) raise NotImplementedError() + @login_required async def get_node_output_iframe(request: web.Request): """ entry point for iframe interaction with the node. diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_access.py b/services/web/server/src/simcore_service_webserver/projects/projects_access.py index d9fc5ca09a1..0a71e902d8a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_access.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_access.py @@ -1,4 +1,3 @@ - import jsondiff from aiohttp import web @@ -10,10 +9,10 @@ async def can_update_node_inputs(context): Returns True if user has permission to update inputs """ - db = context['dbapi'] - project_uuid = context['project_id'] - user_id = context['user_id'] - updated_project = context['new_data'] + db = context["dbapi"] + project_uuid = context["project_id"] + user_id = context["user_id"] + updated_project = context["new_data"] if project_uuid is None or user_id is None: return False @@ -28,8 +27,8 @@ async def can_update_node_inputs(context): try: for node in diffs["workbench"]: # can ONLY modify `inputs` fields set as ReadAndWrite - access = current_project['workbench'][node]["inputAccess"] - inputs = diffs["workbench"][node]['inputs'] + access = current_project["workbench"][node]["inputAccess"] + inputs = diffs["workbench"][node]["inputs"] for key in inputs: if access.get(key) != "ReadAndWrite": return False @@ -38,8 +37,7 @@ async def can_update_node_inputs(context): pass return False - return len(diffs)==0 # no changes - + return len(diffs) == 0 # no changes def setup_projects_access(app: web.Application): @@ -49,4 +47,6 @@ def setup_projects_access(app: web.Application): hrba = get_access_model(app) # TODO: add here also named permissions, i.e. all project.* operations - hrba.roles[UserRole.GUEST].check["project.workbench.node.inputs.update"] = can_update_node_inputs + hrba.roles[UserRole.GUEST].check[ + "project.workbench.node.inputs.update" + ] = can_update_node_inputs diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index dd1e3c5ecf1..de0d29f2e59 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -6,9 +6,11 @@ - return data and successful HTTP responses (or raise them) - upon failure raise errors that can be also HTTP reponses """ +# pylint: disable=too-many-arguments + import logging from asyncio import ensure_future, gather -from pprint import pprint +from pprint import pformat from typing import Dict, Optional from uuid import uuid4 @@ -20,26 +22,36 @@ from ..computation_api import delete_pipeline_db from ..director import director_api -from ..storage_api import \ - copy_data_folders_from_project # mocked in unit-tests -from ..storage_api import (delete_data_folders_of_project, - delete_data_folders_of_project_node) +from ..storage_api import copy_data_folders_from_project # mocked in unit-tests +from ..storage_api import ( + delete_data_folders_of_project, + delete_data_folders_of_project_node, +) from .config import CONFIG_SECTION_NAME from .projects_db import APP_PROJECT_DBAPI from .projects_exceptions import NodeNotFoundError, ProjectNotFoundError from .projects_utils import clone_project_document + log = logging.getLogger(__name__) + def _is_node_dynamic(node_key: str) -> bool: return "/dynamic/" in node_key + def validate_project(app: web.Application, project: Dict): project_schema = app[APP_JSONSCHEMA_SPECS_KEY][CONFIG_SECTION_NAME] - validate_instance(project, project_schema) # TODO: handl + validate_instance(project, project_schema) # TODO: handl -async def get_project_for_user(app: web.Application, project_uuid, user_id, *, include_templates=False) -> Dict: +async def get_project_for_user( + app: web.Application, + project_uuid: str, + user_id: int, + *, + include_templates: bool = False +) -> Dict: """ Returns a project accessible to user :raises web.HTTPNotFound: if no match found @@ -66,7 +78,9 @@ async def get_project_for_user(app: web.Application, project_uuid, user_id, *, i raise web.HTTPNotFound(reason="Project not found") -async def clone_project(request: web.Request, project: Dict, user_id, forced_copy_project_id: str ="") -> Dict: +async def clone_project( + request: web.Request, project: Dict, user_id: int, forced_copy_project_id: str = "" +) -> Dict: """Clones both document and data folders of a project - document @@ -87,54 +101,86 @@ async def clone_project(request: web.Request, project: Dict, user_id, forced_cop """ cloned_project, nodes_map = clone_project_document(project, forced_copy_project_id) - updated_project = await copy_data_folders_from_project(request.app, - project, cloned_project, nodes_map, user_id) + updated_project = await copy_data_folders_from_project( + request.app, project, cloned_project, nodes_map, user_id + ) return updated_project -async def start_project_interactive_services(request: web.Request, project: Dict, user_id: str) -> None: + +async def start_project_interactive_services( + request: web.Request, project: Dict, user_id: str +) -> None: # first get the services if they already exist - log.debug("getting running interactive services of project %s for user %s", project["uuid"], user_id) - running_services = await director_api.get_running_interactive_services(request.app, user_id, project["uuid"]) + log.debug( + "getting running interactive services of project %s for user %s", + project["uuid"], + user_id, + ) + running_services = await director_api.get_running_interactive_services( + request.app, user_id, project["uuid"] + ) running_service_uuids = [x["service_uuid"] for x in running_services] # now start them if needed - project_needed_services = {service_uuid:service for service_uuid, service in project["workbench"].items() \ - if _is_node_dynamic(service["key"]) and \ - service_uuid not in running_service_uuids} - - start_service_tasks = [director_api.start_service(request.app, - user_id=user_id, - project_id=project["uuid"], - service_key=service["key"], - service_version=service["version"], - service_uuid=service_uuid) for service_uuid, service in project_needed_services.items()] + project_needed_services = { + service_uuid: service + for service_uuid, service in project["workbench"].items() + if _is_node_dynamic(service["key"]) + and service_uuid not in running_service_uuids + } + + start_service_tasks = [ + director_api.start_service( + request.app, + user_id=user_id, + project_id=project["uuid"], + service_key=service["key"], + service_version=service["version"], + service_uuid=service_uuid, + ) + for service_uuid, service in project_needed_services.items() + ] await gather(*start_service_tasks) -async def delete_project(request: web.Request, project_uuid: str, user_id: str) -> None: +async def delete_project(request: web.Request, project_uuid: str, user_id: int) -> None: await delete_project_from_db(request, project_uuid, user_id) + async def remove_services_and_data(): await remove_project_interactive_services(user_id, project_uuid, request.app) await delete_project_data(request, project_uuid, user_id) + ensure_future(remove_services_and_data()) + @observe(event="SIGNAL_PROJECT_CLOSE") -async def remove_project_interactive_services(user_id: Optional[str], project_uuid: Optional[str], app: web.Application) -> None: +async def remove_project_interactive_services( + user_id: Optional[int], project_uuid: Optional[str], app: web.Application +) -> None: if not user_id and not project_uuid: raise ValueError("Expected either user or project") - list_of_services = await director_api.get_running_interactive_services(app, - project_id=project_uuid, - user_id=user_id) - stop_tasks = [director_api.stop_service(app, service["service_uuid"]) for service in list_of_services] + list_of_services = await director_api.get_running_interactive_services( + app, project_id=project_uuid, user_id=user_id + ) + stop_tasks = [ + director_api.stop_service(app, service["service_uuid"]) + for service in list_of_services + ] if stop_tasks: await gather(*stop_tasks) -async def delete_project_data(request: web.Request, project_uuid: str, user_id: str) -> None: + +async def delete_project_data( + request: web.Request, project_uuid: str, user_id: int +) -> None: # requests storage to delete all project's stored data await delete_data_folders_of_project(request.app, project_uuid, user_id) -async def delete_project_from_db(request: web.Request, project_uuid: str, user_id: str) -> None: + +async def delete_project_from_db( + request: web.Request, project_uuid: str, user_id: int +) -> None: db = request.config_dict[APP_PROJECT_DBAPI] try: await delete_pipeline_db(request.app, project_uuid) @@ -146,58 +192,104 @@ async def delete_project_from_db(request: web.Request, project_uuid: str, user_i # requests storage to delete all project's stored data await delete_data_folders_of_project(request.app, project_uuid, user_id) -async def add_project_node(request: web.Request, project_uuid: str, user_id: str, service_key: str, service_version: str, service_id: Optional[str]) -> str: # pylint: disable=too-many-arguments - log.debug("starting node %s:%s in project %s for user %s", service_key, service_version, project_uuid, user_id) + +async def add_project_node( + request: web.Request, + project_uuid: str, + user_id: int, + service_key: str, + service_version: str, + service_id: Optional[str], +) -> str: + log.debug( + "starting node %s:%s in project %s for user %s", + service_key, + service_version, + project_uuid, + user_id, + ) node_uuid = service_id if service_id else str(uuid4()) if _is_node_dynamic(service_key): - await director_api.start_service(request.app, user_id, project_uuid, service_key, service_version, node_uuid) + await director_api.start_service( + request.app, user_id, project_uuid, service_key, service_version, node_uuid + ) return node_uuid -async def get_project_node(request: web.Request, project_uuid: str, user_id:str, node_id: str): - log.debug("getting node %s in project %s for user %s", node_id, project_uuid, user_id) - list_of_interactive_services = await director_api.get_running_interactive_services(request.app, - project_id=project_uuid, - user_id=user_id) +async def get_project_node( + request: web.Request, project_uuid: str, user_id: int, node_id: str +): + log.debug( + "getting node %s in project %s for user %s", node_id, project_uuid, user_id + ) + + list_of_interactive_services = await director_api.get_running_interactive_services( + request.app, project_id=project_uuid, user_id=user_id + ) # get the project if it is running for service in list_of_interactive_services: if service["service_uuid"] == node_id: return service # the service is not running, it's a computational service maybe # TODO: find out if computational service is running if not throw a 404 since it's not around - return { - "service_uuid": node_id, - "service_state": "idle" - } + return {"service_uuid": node_id, "service_state": "idle"} -async def delete_project_node(request: web.Request, project_uuid: str, user_id: str, node_uuid: str) -> None: - log.debug("deleting node %s in project %s for user %s", node_uuid, project_uuid, user_id) - list_of_services = await director_api.get_running_interactive_services(request.app, - project_id=project_uuid, - user_id=user_id) +async def delete_project_node( + request: web.Request, project_uuid: str, user_id: int, node_uuid: str +) -> None: + log.debug( + "deleting node %s in project %s for user %s", node_uuid, project_uuid, user_id + ) + + list_of_services = await director_api.get_running_interactive_services( + request.app, project_id=project_uuid, user_id=user_id + ) # stop the service if it is running for service in list_of_services: if service["service_uuid"] == node_uuid: await director_api.stop_service(request.app, node_uuid) break # remove its data if any - await delete_data_folders_of_project_node(request.app, project_uuid, node_uuid, user_id) - - -async def update_project_node_progress(app: web.Application, user_id: str, project_id: str, node_id: str, progress: float) -> Optional[Dict]: - log.debug("updating node %s progress in project %s for user %s with %s", node_id, project_id, user_id, progress) + await delete_data_folders_of_project_node( + request.app, project_uuid, node_uuid, user_id + ) + + +async def update_project_node_progress( + app: web.Application, user_id: int, project_id: str, node_id: str, progress: float +) -> Optional[Dict]: + log.debug( + "updating node %s progress in project %s for user %s with %s", + node_id, + project_id, + user_id, + progress, + ) project = await get_project_for_user(app, project_id, user_id) if not node_id in project["workbench"]: raise NodeNotFoundError(project_id, node_id) - project["workbench"][node_id]["progress"] = int(100.0 * float(progress) + .5) + project["workbench"][node_id]["progress"] = int(100.0 * float(progress) + 0.5) db = app[APP_PROJECT_DBAPI] await db.update_user_project(project, user_id, project_id) return project["workbench"][node_id] -async def update_project_node_outputs(app: web.Application, user_id: str, project_id: str, node_id: str, data: Optional[Dict]) -> Optional[Dict]: - log.debug("updating node %s outputs in project %s for user %s with %s", node_id, project_id, user_id, pprint(data)) + +async def update_project_node_outputs( + app: web.Application, + user_id: int, + project_id: str, + node_id: str, + data: Optional[Dict], +) -> Optional[Dict]: + log.debug( + "updating node %s outputs in project %s for user %s with %s", + node_id, + project_id, + user_id, + pformat(data), + ) project = await get_project_for_user(app, project_id, user_id) if not node_id in project["workbench"]: raise NodeNotFoundError(project_id, node_id) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_db.py b/services/web/server/src/simcore_service_webserver/projects/projects_db.py index dc5baaf743a..a51a9aa745a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_db.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_db.py @@ -238,7 +238,7 @@ async def load_template_projects(self, *, only_published=False) -> List[Dict]: async def __load_projects(self, conn: SAConnection, query) -> List[Dict]: api_projects: List[Dict] = [] # API model-compatible projects - db_projects: List[Dict] = [] # DB model-compatible projects + db_projects: List[Dict] = [] # DB model-compatible projects async for row in conn.execute(query): prj = dict(row.items()) log.debug("found project: %s", prj) @@ -405,7 +405,6 @@ async def update_user_project( ) await conn.execute(query) - async def delete_user_project(self, user_id: int, project_uuid: str): log.info("Deleting project %s for user %s", project_uuid, user_id) async with self.engine.acquire() as conn: diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py b/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py index 3c6fb28718c..15a9eb40fa0 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py @@ -1,29 +1,39 @@ """Defines the different exceptions that may arise in the projects subpackage""" + class ProjectsException(Exception): """Basic exception for errors raised in projects""" + def __init__(self, msg=None): if msg is None: msg = "Unexpected error occured in projects subpackage" super(ProjectsException, self).__init__(msg) + class ProjectInvalidRightsError(ProjectsException): """Invalid rights to access project""" + def __init__(self, user_id, project_uuid): - msg = "User {} has no rights to access project with uuid {}".format(user_id, project_uuid) + msg = "User {} has no rights to access project with uuid {}".format( + user_id, project_uuid + ) super(ProjectInvalidRightsError, self).__init__(msg) self.user_id = user_id self.project_uuid = project_uuid + class ProjectNotFoundError(ProjectsException): """Project was not found in DB""" + def __init__(self, project_uuid): msg = "Project with uuid {} not found".format(project_uuid) super(ProjectNotFoundError, self).__init__(msg) self.project_uuid = project_uuid + class NodeNotFoundError(ProjectsException): """Node was not found in project""" + def __init__(self, project_uuid: str, node_uuid: str): msg = f"Node {node_uuid} not found in project {project_uuid}" super(NodeNotFoundError, self).__init__(msg) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_fakes.py b/services/web/server/src/simcore_service_webserver/projects/projects_fakes.py index c3da640a63a..7583e0bb3be 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_fakes.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_fakes.py @@ -10,20 +10,21 @@ log = logging.getLogger(__name__) + class Fake: """ Holds fake database of projects and its association to users for testing purposes Keeps also generated data """ + # TODO: auto generate data from specs and faker tool. Use http://json-schema-faker.js.org ProjectItem = namedtuple("ProjectItem", "id template data".split()) # fake databases - projects = {} # project_id -> ProjectItem - user_to_projects_map = defaultdict(list) # user_id -> [project_id, ...] - + projects = {} # project_id -> ProjectItem + user_to_projects_map = defaultdict(list) # user_id -> [project_id, ...] @classmethod def add_projects(cls, projects, user_id=None): @@ -31,8 +32,10 @@ def add_projects(cls, projects, user_id=None): """ for prj in projects: - pid = prj['uuid'] - cls.projects[pid] = cls.ProjectItem(id=pid, template=user_id is None, data=deepcopy(prj)) + pid = prj["uuid"] + cls.projects[pid] = cls.ProjectItem( + id=pid, template=user_id is None, data=deepcopy(prj) + ) if user_id is not None: cls.user_to_projects_map[user_id].append(pid) @@ -49,8 +52,8 @@ def load_template_projects(cls): projects = projects + json.load(f) for prj in projects: - pid = prj['uuid'] - cls.projects[pid] = cls.ProjectItem(id=pid, template=True, data=prj) + pid = prj["uuid"] + cls.projects[pid] = cls.ProjectItem(id=pid, template=True, data=prj) @classmethod def reset(cls): diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py index 916bb99a050..cd01997fc5c 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py @@ -1,4 +1,3 @@ - """ Handlers for CRUD operations on /projects/ """ @@ -15,10 +14,9 @@ from ..security_api import check_permission from . import projects_api from .projects_db import APP_PROJECT_DBAPI -from .projects_exceptions import (ProjectInvalidRightsError, - ProjectNotFoundError) +from .projects_exceptions import ProjectInvalidRightsError, ProjectNotFoundError -OVERRIDABLE_DOCUMENT_KEYS = ['name', 'description', 'thumbnail', 'prjOwner'] +OVERRIDABLE_DOCUMENT_KEYS = ["name", "description", "thumbnail", "prjOwner"] # TODO: validate these against api/specs/webserver/v0/components/schemas/project-v0.0.1.json log = logging.getLogger(__name__) @@ -26,40 +24,45 @@ @login_required async def create_projects(request: web.Request): - from .projects_api import clone_project # TODO: keep here since is async and parser thinks it is a handler + from .projects_api import ( + clone_project, + ) # TODO: keep here since is async and parser thinks it is a handler # pylint: disable=too-many-branches await check_permission(request, "project.create") - await check_permission(request, "services.pipeline.*") # due to update_pipeline_db + await check_permission(request, "services.pipeline.*") # due to update_pipeline_db user_id = request[RQT_USERID_KEY] db = request.config_dict[APP_PROJECT_DBAPI] - template_uuid = request.query.get('from_template') - as_template = request.query.get('as_template') + template_uuid = request.query.get("from_template") + as_template = request.query.get("as_template") try: project = {} - if as_template: # create template from + if as_template: # create template from await check_permission(request, "project.template.create") # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! from .projects_api import get_project_for_user - source_project = await get_project_for_user(request.app, + source_project = await get_project_for_user( + request.app, project_uuid=as_template, user_id=user_id, - include_templates=False + include_templates=False, ) project = await clone_project(request, source_project, user_id) - elif template_uuid: # create from template + elif template_uuid: # create from template template_prj = await db.get_template_project(template_uuid) if not template_prj: - raise web.HTTPNotFound(reason="Invalid template uuid {}".format(template_uuid)) + raise web.HTTPNotFound( + reason="Invalid template uuid {}".format(template_uuid) + ) project = await clone_project(request, template_prj, user_id) - #FIXME: parameterized inputs should get defaults provided by service + # FIXME: parameterized inputs should get defaults provided by service # overrides with body if request.has_body: @@ -78,7 +81,9 @@ async def create_projects(request: web.Request): projects_api.validate_project(request.app, project) # update metadata (uuid, timestamps, ownership) and save - await db.add_project(project, user_id, force_as_template=as_template is not None) + await db.add_project( + project, user_id, force_as_template=as_template is not None + ) # This is a new project and every new graph needs to be reflected in the pipeline db await update_pipeline_db(request.app, project["uuid"], project["workbench"]) @@ -90,8 +95,7 @@ async def create_projects(request: web.Request): raise web.HTTPUnauthorized else: - raise web.HTTPCreated(text=json.dumps(project), - content_type='application/json') + raise web.HTTPCreated(text=json.dumps(project), content_type="application/json") @login_required @@ -101,7 +105,7 @@ async def list_projects(request: web.Request): # TODO: implement all query parameters as # in https://www.ibm.com/support/knowledgecenter/en/SSCRJU_3.2.0/com.ibm.swg.im.infosphere.streams.rest.api.doc/doc/restapis-queryparms-list.html user_id = request[RQT_USERID_KEY] - ptype = request.query.get('type', 'all') # TODO: get default for oaspecs + ptype = request.query.get("type", "all") # TODO: get default for oaspecs db = request.config_dict[APP_PROJECT_DBAPI] # TODO: improve dbapi to list project @@ -109,13 +113,15 @@ async def list_projects(request: web.Request): if ptype in ("template", "all"): projects_list += await db.load_template_projects() - if ptype in ("user", "all"): # standard only (notice that templates will only) - projects_list += await db.load_user_projects(user_id=user_id, exclude_templates=True) + if ptype in ("user", "all"): # standard only (notice that templates will only) + projects_list += await db.load_user_projects( + user_id=user_id, exclude_templates=True + ) - start = int(request.query.get('start', 0)) - count = int(request.query.get('count',len(projects_list))) + start = int(request.query.get("start", 0)) + count = int(request.query.get("count", len(projects_list))) - stop = min(start+count, len(projects_list)) + stop = min(start + count, len(projects_list)) projects_list = projects_list[start:stop] # validate response @@ -128,7 +134,7 @@ async def list_projects(request: web.Request): log.exception("Skipping invalid project from list") continue - return {'data': validated_projects} + return {"data": validated_projects} @login_required @@ -142,15 +148,14 @@ async def get_project(request: web.Request): project_uuid = request.match_info.get("project_id") - project = await get_project_for_user(request.app, + project = await get_project_for_user( + request.app, project_uuid=project_uuid, user_id=request[RQT_USERID_KEY], - include_templates=True + include_templates=True, ) - return { - 'data': project - } + return {"data": project} @login_required @@ -169,30 +174,34 @@ async def replace_project(request: web.Request): :raises web.HTTPNotFound: cannot find project id in repository """ - await check_permission(request, "services.pipeline.*") # due to update_pipeline_db + await check_permission(request, "services.pipeline.*") # due to update_pipeline_db user_id = request[RQT_USERID_KEY] project_uuid = request.match_info.get("project_id") - replace_pipeline = request.query.get("run", False) # FIXME: Actually was never called. CHECK if logic still applies (issue #1176) + replace_pipeline = request.query.get( + "run", False + ) # FIXME: Actually was never called. CHECK if logic still applies (issue #1176) new_project = await request.json() - db = request.config_dict[APP_PROJECT_DBAPI] - await check_permission(request, "project.update | project.workbench.node.inputs.update", - context={ - 'dbapi': db, - 'project_id': project_uuid, - 'user_id': user_id, - 'new_data': new_project - }) + await check_permission( + request, + "project.update | project.workbench.node.inputs.update", + context={ + "dbapi": db, + "project_id": project_uuid, + "user_id": user_id, + "new_data": new_project, + }, + ) try: projects_api.validate_project(request.app, new_project) await db.update_user_project(new_project, user_id, project_uuid) - await update_pipeline_db(request.app, - project_uuid, new_project["workbench"], - replace_pipeline) + await update_pipeline_db( + request.app, project_uuid, new_project["workbench"], replace_pipeline + ) except ValidationError: raise web.HTTPBadRequest @@ -200,7 +209,8 @@ async def replace_project(request: web.Request): except ProjectNotFoundError: raise web.HTTPNotFound - return {'data': new_project} + return {"data": new_project} + @login_required async def delete_project(request: web.Request): @@ -210,30 +220,30 @@ async def delete_project(request: web.Request): # first check if the project exists user_id = request[RQT_USERID_KEY] project_uuid = request.match_info.get("project_id") - project = await projects_api.get_project_for_user(request.app, - project_uuid=project_uuid, - user_id=user_id, - include_templates=True + project = await projects_api.get_project_for_user( + request.app, project_uuid=project_uuid, user_id=user_id, include_templates=True ) with managed_resource(user_id, None, request.app) as rt: other_users = await rt.find_users_of_resource("project_id", project_uuid) if other_users: message = "Project is opened by another user. It cannot be deleted." if user_id in other_users: - message = "Project is still open. It cannot be deleted until it is closed." + message = ( + "Project is still open. It cannot be deleted until it is closed." + ) # we cannot delete that project raise web.HTTPForbidden(reason=message) await projects_api.delete_project(request, project_uuid, user_id) - raise web.HTTPNoContent(content_type='application/json') + raise web.HTTPNoContent(content_type="application/json") + @login_required async def open_project(request: web.Request) -> web.Response: # TODO: replace by decorator since it checks again authentication await check_permission(request, "project.open") - # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! from .projects_api import get_project_for_user @@ -242,19 +252,19 @@ async def open_project(request: web.Request) -> web.Response: client_session_id = await request.json() with managed_resource(user_id, client_session_id, request.app) as rt: - project = await get_project_for_user(request.app, + project = await get_project_for_user( + request.app, project_uuid=project_uuid, user_id=user_id, - include_templates=True + include_templates=True, ) await rt.add("project_id", project_uuid) # user id opened project uuid await projects_api.start_project_interactive_services(request, project, user_id) - return { - 'data': project - } + return {"data": project} + @login_required async def close_project(request: web.Request) -> web.Response: @@ -270,19 +280,24 @@ async def close_project(request: web.Request) -> web.Response: from .projects_api import get_project_for_user with managed_resource(user_id, client_session_id, request.app) as rt: - project = await get_project_for_user(request.app, + project = await get_project_for_user( + request.app, project_uuid=project_uuid, user_id=user_id, - include_templates=True + include_templates=True, ) await rt.remove("project_id") other_users = await rt.find_users_of_resource("project_id", project_uuid) if not other_users: # only remove the services if no one else is using them now - asyncio.ensure_future(projects_api.remove_project_interactive_services(user_id, project_uuid, request.app)) + asyncio.ensure_future( + projects_api.remove_project_interactive_services( + user_id, project_uuid, request.app + ) + ) + raise web.HTTPNoContent(content_type="application/json") - raise web.HTTPNoContent(content_type='application/json') @login_required async def get_active_project(request: web.Request) -> web.Response: @@ -296,15 +311,16 @@ async def get_active_project(request: web.Request) -> web.Response: if list_project_ids: # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! from .projects_api import get_project_for_user - project = await get_project_for_user(request.app, + + project = await get_project_for_user( + request.app, project_uuid=list_project_ids[0], user_id=user_id, - include_templates=True + include_templates=True, ) - return { - 'data': project - } + return {"data": project} + @login_required async def create_node(request: web.Request) -> web.Response: @@ -317,11 +333,10 @@ async def create_node(request: web.Request) -> web.Response: # ensure the project exists # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! from .projects_api import get_project_for_user - await get_project_for_user(request.app, - project_uuid=project_uuid, - user_id=user_id, - include_templates=True - ) + + await get_project_for_user( + request.app, project_uuid=project_uuid, user_id=user_id, include_templates=True + ) data = { "node_id": await projects_api.add_project_node( request, @@ -329,10 +344,11 @@ async def create_node(request: web.Request) -> web.Response: user_id, body["service_key"], body["service_version"], - body["service_id"] if "service_id" in body else None + body["service_id"] if "service_id" in body else None, ) } - return web.json_response({'data': data}, status=web.HTTPCreated.status_code) + return web.json_response({"data": data}, status=web.HTTPCreated.status_code) + @login_required async def get_node(request: web.Request) -> web.Response: @@ -344,16 +360,16 @@ async def get_node(request: web.Request) -> web.Response: # ensure the project exists # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! from .projects_api import get_project_for_user - await get_project_for_user(request.app, - project_uuid=project_uuid, - user_id=user_id, - include_templates=True - ) - node_details = await projects_api.get_project_node(request, project_uuid, user_id, node_uuid) - return { - 'data': node_details - } + await get_project_for_user( + request.app, project_uuid=project_uuid, user_id=user_id, include_templates=True + ) + + node_details = await projects_api.get_project_node( + request, project_uuid, user_id, node_uuid + ) + return {"data": node_details} + @login_required async def delete_node(request: web.Request) -> web.Response: @@ -365,36 +381,33 @@ async def delete_node(request: web.Request) -> web.Response: # ensure the project exists # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead! from .projects_api import get_project_for_user - await get_project_for_user(request.app, - project_uuid=project_uuid, - user_id=user_id, - include_templates=True - ) + + await get_project_for_user( + request.app, project_uuid=project_uuid, user_id=user_id, include_templates=True + ) await projects_api.delete_project_node(request, project_uuid, user_id, node_uuid) - raise web.HTTPNoContent(content_type='application/json') + raise web.HTTPNoContent(content_type="application/json") @login_required async def add_tag(request: web.Request): - await check_permission(request, 'project.tag.*') + await check_permission(request, "project.tag.*") uid, db = request[RQT_USERID_KEY], request.config_dict[APP_PROJECT_DBAPI] - tag_id, study_uuid = request.match_info.get('tag_id'), request.match_info.get('study_uuid') - return await db.add_tag( - project_uuid=study_uuid, - user_id=uid, - tag_id=int(tag_id) + tag_id, study_uuid = ( + request.match_info.get("tag_id"), + request.match_info.get("study_uuid"), ) + return await db.add_tag(project_uuid=study_uuid, user_id=uid, tag_id=int(tag_id)) @login_required async def remove_tag(request: web.Request): - await check_permission(request, 'project.tag.*') + await check_permission(request, "project.tag.*") uid, db = request[RQT_USERID_KEY], request.config_dict[APP_PROJECT_DBAPI] - tag_id, study_uuid = request.match_info.get('tag_id'), request.match_info.get('study_uuid') - return await db.remove_tag( - project_uuid=study_uuid, - user_id=uid, - tag_id=int(tag_id) + tag_id, study_uuid = ( + request.match_info.get("tag_id"), + request.match_info.get("study_uuid"), ) + return await db.remove_tag(project_uuid=study_uuid, user_id=uid, tag_id=int(tag_id)) diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_models.py b/services/web/server/src/simcore_service_webserver/projects/projects_models.py index 7513b4fd5af..82d7e2e6191 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_models.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_models.py @@ -2,10 +2,14 @@ Facade """ -from simcore_postgres_database.webserver_models import (ProjectType, projects, - user_to_projects) +from simcore_postgres_database.webserver_models import ( + ProjectType, + projects, + user_to_projects, +) __all__ = [ - "projects", "ProjectType", + "projects", + "ProjectType", "user_to_projects", ] diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_utils.py b/services/web/server/src/simcore_service_webserver/projects/projects_utils.py index 6b709265563..c8a59316806 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_utils.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_utils.py @@ -2,14 +2,17 @@ import re import uuid as uuidlib from copy import deepcopy -from typing import Dict, Tuple +from typing import AnyStr, Dict, Match, Optional, Tuple from servicelib.decorators import safe_return log = logging.getLogger(__name__) variable_pattern = re.compile(r"^{{\W*(\w+)\W*}}$") -def clone_project_document(project: Dict, forced_copy_project_id: str ="") -> Tuple[Dict, Dict]: + +def clone_project_document( + project: Dict, forced_copy_project_id: str = "" +) -> Tuple[Dict, Dict]: project_copy = deepcopy(project) # Update project id @@ -17,16 +20,16 @@ def clone_project_document(project: Dict, forced_copy_project_id: str ="") -> Tu if forced_copy_project_id: project_copy_uuid = uuidlib.UUID(forced_copy_project_id) else: - project_copy_uuid = uuidlib.uuid1() # random project id + project_copy_uuid = uuidlib.uuid1() # random project id - project_copy['uuid'] = str(project_copy_uuid) + project_copy["uuid"] = str(project_copy_uuid) # Workbench nodes shall be unique within the project context def _create_new_node_uuid(old_uuid): - return str( uuidlib.uuid5(project_copy_uuid, str(old_uuid)) ) + return str(uuidlib.uuid5(project_copy_uuid, str(old_uuid))) nodes_map = {} - for node_uuid in project.get('workbench', {}).keys(): + for node_uuid in project.get("workbench", {}).keys(): nodes_map[node_uuid] = _create_new_node_uuid(node_uuid) def _replace_uuids(node): @@ -44,12 +47,13 @@ def _replace_uuids(node): node[key] = _replace_uuids(value) return node - project_copy['workbench'] = _replace_uuids(project_copy.get('workbench', {})) + project_copy["workbench"] = _replace_uuids(project_copy.get("workbench", {})) return project_copy, nodes_map - @safe_return(if_fails_return=False, logger=log) -def substitute_parameterized_inputs(parameterized_project: Dict, parameters: Dict) -> Dict: +def substitute_parameterized_inputs( + parameterized_project: Dict, parameters: Dict +) -> Dict: """ Substitutes parameterized r/w inputs NOTE: project is is changed @@ -69,20 +73,30 @@ def _normalize_value(s): except ValueError: return s - for node in project['workbench'].values(): - inputs = node.get('inputs', {}) - access = node.get('inputAccess', {}) + def _get_param_input_match(name, value, access) -> Optional[Match[AnyStr]]: + if isinstance(value, str) and access.get(name, "ReadAndWrite") == "ReadAndWrite": + match = variable_pattern.match(value) + return match + return None + + for node in project["workbench"].values(): + inputs = node.get("inputs", {}) + access = node.get("inputAccess", {}) new_inputs = {} + for name, value in inputs.items(): - if isinstance(value, str) and access.get(name, "ReadAndWrite") == "ReadAndWrite": + match = _get_param_input_match(name, value, access) + if match: # TODO: use jinja2 to interpolate expressions? - m = variable_pattern.match(value) - if m: - value = m.group(1) - if value in parameters: - new_inputs[name] = _normalize_value(parameters[value]) - else: - log.warning("Could not resolve parameter %s. No value provided in %s", value, parameters) + value = match.group(1) + if value in parameters: + new_inputs[name] = _normalize_value(parameters[value]) + else: + log.warning( + "Could not resolve parameter %s. No value provided in %s", + value, + parameters, + ) inputs.update(new_inputs) return project @@ -100,11 +114,15 @@ def is_graph_equal(lhs_workbench: Dict, rhs_workbench: Dict) -> bool: for node_id, node in rhs_workbench.items(): # same nodes - if not all(node.get(k) == lhs_workbench[node_id].get(k) for k in ['key', 'version'] ): + if not all( + node.get(k) == lhs_workbench[node_id].get(k) for k in ["key", "version"] + ): raise ValueError() # same connectivity (edges) - if not set(node.get('inputNodes')) == set(lhs_workbench[node_id].get('inputNodes')): + if not set(node.get("inputNodes")) == set( + lhs_workbench[node_id].get("inputNodes") + ): raise ValueError() # same input values diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/__init__.py b/services/web/server/src/simcore_service_webserver/resource_manager/__init__.py index 0f7dc8cae1b..8f0c6fb8754 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/__init__.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/__init__.py @@ -13,8 +13,11 @@ from servicelib.application_keys import APP_CONFIG_KEY from servicelib.application_setup import ModuleCategory, app_module_setup -from .config import (APP_CLIENT_SOCKET_REGISTRY_KEY, - APP_RESOURCE_MANAGER_TASKS_KEY, CONFIG_SECTION_NAME) +from .config import ( + APP_CLIENT_SOCKET_REGISTRY_KEY, + APP_RESOURCE_MANAGER_TASKS_KEY, + CONFIG_SECTION_NAME, +) from .garbage_collector import setup as setup_garbage_collector from .redis import setup_redis_client from .registry import RedisResourceRegistry @@ -24,6 +27,7 @@ MODULE_NAME = __name__.split(".")[-1] module_name = module_name = __name__.replace(".__init__", "") + @app_module_setup(module_name, ModuleCategory.SYSTEM, logger=logger) def setup(app: web.Application) -> bool: """Sets up resource manager subsystem in the application @@ -32,8 +36,9 @@ def setup(app: web.Application) -> bool: cfg = app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] app[APP_RESOURCE_MANAGER_TASKS_KEY] = [] setup_redis_client(app) - app[APP_CLIENT_SOCKET_REGISTRY_KEY] = RedisResourceRegistry(app) if cfg["redis"]["enabled"] \ - else None + app[APP_CLIENT_SOCKET_REGISTRY_KEY] = ( + RedisResourceRegistry(app) if cfg["redis"]["enabled"] else None + ) setup_garbage_collector(app) return True @@ -41,6 +46,4 @@ def setup(app: web.Application) -> bool: # alias setup_resource_manager = setup -__all__ = ( - 'setup_resource_manager' -) +__all__ = "setup_resource_manager" diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/config.py b/services/web/server/src/simcore_service_webserver/resource_manager/config.py index a0f52e9a197..a086d96bfcd 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/config.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/config.py @@ -8,26 +8,35 @@ from servicelib.application_keys import APP_CONFIG_KEY -CONFIG_SECTION_NAME = 'resource_manager' +CONFIG_SECTION_NAME = "resource_manager" APP_CLIENT_REDIS_CLIENT_KEY = __name__ + ".resource_manager.redis_client" APP_CLIENT_SOCKET_REGISTRY_KEY = __name__ + ".resource_manager.registry" APP_RESOURCE_MANAGER_TASKS_KEY = __name__ + ".resource_manager.tasks.key" APP_GARBAGE_COLLECTOR_KEY = __name__ + ".resource_manager.garbage_collector_key" -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Or(T.Bool(), T.Int()), - T.Key("resource_deletion_timeout_seconds", default=900, optional=True): T.Int(), - T.Key("garbage_collection_interval_seconds", default=30, optional=True): T.Int(), - T.Key("redis", optional=False): T.Dict({ - T.Key("enabled", default=True, optional=True): T.Bool(), - T.Key("host", default="redis", optional=True): T.String(), - T.Key("port", default=6793, optional=True): T.Int(), - }), -}) +schema = T.Dict( + { + T.Key("enabled", default=True, optional=True): T.Or(T.Bool(), T.Int()), + T.Key("resource_deletion_timeout_seconds", default=900, optional=True): T.Int(), + T.Key( + "garbage_collection_interval_seconds", default=30, optional=True + ): T.Int(), + T.Key("redis", optional=False): T.Dict( + { + T.Key("enabled", default=True, optional=True): T.Bool(), + T.Key("host", default="redis", optional=True): T.String(), + T.Key("port", default=6793, optional=True): T.Int(), + } + ), + } +) def get_service_deletion_timeout(app: web.Application) -> int: return app[APP_CONFIG_KEY][CONFIG_SECTION_NAME]["resource_deletion_timeout_seconds"] + def get_garbage_collector_interval(app: web.Application) -> int: - return app[APP_CONFIG_KEY][CONFIG_SECTION_NAME]["garbage_collection_interval_seconds"] + return app[APP_CONFIG_KEY][CONFIG_SECTION_NAME][ + "garbage_collection_interval_seconds" + ] diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py b/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py index 914177a4768..a7911f39af2 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py @@ -19,7 +19,7 @@ logger = logging.getLogger(__name__) -async def collect_garbage(registry: RedisResourceRegistry, app: web.Application): +async def collect_garbage(registry: RedisResourceRegistry, app: web.Application): logger.info("collecting garbage...") alive_keys, dead_keys = await registry.get_all_resource_keys() logger.debug("potential dead keys: %s", dead_keys) @@ -34,24 +34,38 @@ async def collect_garbage(registry: RedisResourceRegistry, app: web.Application logger.debug("found the following resources: %s", resources) # find if there are alive entries using these resources for resource_name, resource_value in resources.items(): - other_keys = [x for x in await registry.find_keys((resource_name, resource_value)) if x != key] + other_keys = [ + x + for x in await registry.find_keys((resource_name, resource_value)) + if x != key + ] # the resource ref can be closed anyway - logger.debug( - "removing resource entry: %s: %s", key, resources) + logger.debug("removing resource entry: %s: %s", key, resources) await registry.remove_resource(key, resource_name) # check if the resource is still in use in the alive keys if not any(elem in alive_keys for elem in other_keys): # remove the resource from the other keys as well - remove_tasks = [registry.remove_resource( - x, resource_name) for x in other_keys] + remove_tasks = [ + registry.remove_resource(x, resource_name) for x in other_keys + ] if remove_tasks: logger.debug( - "removing resource entry: %s: %s", other_keys, resources) + "removing resource entry: %s: %s", other_keys, resources + ) await asyncio.gather(*remove_tasks) logger.debug( - "the resources %s:%s of %s may be now safely closed", resource_name, resource_value, key) - await emit(event="SIGNAL_PROJECT_CLOSE", user_id=None, project_uuid=resource_value, app=app) + "the resources %s:%s of %s may be now safely closed", + resource_name, + resource_value, + key, + ) + await emit( + event="SIGNAL_PROJECT_CLOSE", + user_id=None, + project_uuid=resource_value, + app=app, + ) async def garbage_collector_task(app: web.Application): @@ -70,7 +84,9 @@ async def garbage_collector_task(app: web.Application): async def setup_garbage_collector_task(app: web.Application): - app[APP_GARBAGE_COLLECTOR_KEY] = asyncio.get_event_loop().create_task(garbage_collector_task(app)) + app[APP_GARBAGE_COLLECTOR_KEY] = asyncio.get_event_loop().create_task( + garbage_collector_task(app) + ) yield task = app[APP_GARBAGE_COLLECTOR_KEY] task.cancel() diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/redis.py b/services/web/server/src/simcore_service_webserver/resource_manager/redis.py index 3a06a499d62..fef9636e658 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/redis.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/redis.py @@ -9,15 +9,16 @@ log = logging.getLogger(__name__) -THIS_SERVICE_NAME = 'redis' +THIS_SERVICE_NAME = "redis" DSN = "redis://{host}:{port}" retry_upon_init_policy = dict( stop=stop_after_attempt(3), wait=wait_random(min=1, max=2), - before=before_log(log, logging.WARNING) + before=before_log(log, logging.WARNING), ) + async def redis_client(app: web.Application): cfg = app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] url = DSN.format(**cfg["redis"]) @@ -26,7 +27,7 @@ async def redis_client(app: web.Application): with attempt: client = await aioredis.create_redis_pool(url, encoding="utf-8") - assert client # nosec + assert client # nosec app[APP_CLIENT_REDIS_CLIENT_KEY] = client yield @@ -37,6 +38,7 @@ async def redis_client(app: web.Application): client.close() await client.wait_closed() + def setup_redis_client(app: web.Application): app[APP_CLIENT_REDIS_CLIENT_KEY] = None @@ -50,5 +52,6 @@ def setup_redis_client(app: web.Application): app.cleanup_ctx.append(redis_client) + def get_redis_client(app: web.Application) -> aioredis.Redis: return app[APP_CLIENT_REDIS_CLIENT_KEY] diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/registry.py b/services/web/server/src/simcore_service_webserver/resource_manager/registry.py index 82dfe2c1ff3..4127d68206e 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/registry.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/registry.py @@ -27,6 +27,7 @@ RESOURCE_SUFFIX = "resources" ALIVE_SUFFIX = "alive" + @attr.s(auto_attribs=True) class RedisResourceRegistry: """ Keeps a record of connected sockets per user @@ -34,6 +35,7 @@ class RedisResourceRegistry: redis structure is following Redis Hash: key=user_id:client_session_id values={server_id socket_id project_id} """ + app: web.Application @classmethod @@ -43,11 +45,17 @@ def _hash_key(cls, key: Dict[str, str]) -> str: @classmethod def _decode_hash_key(cls, hash_key: str) -> Dict[str, str]: - tmp_key = hash_key[:-len(f":{RESOURCE_SUFFIX}")] if hash_key.endswith(f":{RESOURCE_SUFFIX}") else hash_key[:-len(f":{ALIVE_SUFFIX}")] + tmp_key = ( + hash_key[: -len(f":{RESOURCE_SUFFIX}")] + if hash_key.endswith(f":{RESOURCE_SUFFIX}") + else hash_key[: -len(f":{ALIVE_SUFFIX}")] + ) key = dict(x.split("=") for x in tmp_key.split(":")) return key - async def set_resource(self, key: Dict[str, str], resource: Tuple[str, str]) -> None: + async def set_resource( + self, key: Dict[str, str], resource: Tuple[str, str] + ) -> None: client = get_redis_client(self.app) hash_key = f"{self._hash_key(key)}:{RESOURCE_SUFFIX}" await client.hmset_dict(hash_key, **{resource[0]: resource[1]}) @@ -62,7 +70,9 @@ async def remove_resource(self, key: Dict[str, str], resource_name: str) -> None hash_key = f"{self._hash_key(key)}:{RESOURCE_SUFFIX}" await client.hdel(hash_key, resource_name) - async def find_resources(self, key: Dict[str, str], resource_name: str) -> List[str]: + async def find_resources( + self, key: Dict[str, str], resource_name: str + ) -> List[str]: client = get_redis_client(self.app) resources = [] # the key might only be partialy complete @@ -82,13 +92,13 @@ async def find_keys(self, resource: Tuple[str, str]) -> List[Dict[str, str]]: keys.append(self._decode_hash_key(hash_key)) return keys - async def set_key_alive(self, key: Dict[str, str], alive: bool, timeout: int =0) -> None: + async def set_key_alive( + self, key: Dict[str, str], alive: bool, timeout: int = 0 + ) -> None: client = get_redis_client(self.app) hash_key = f"{self._hash_key(key)}:{ALIVE_SUFFIX}" - await client.set(hash_key, - 1, - expire=0 if alive else timeout - ) + await client.set(hash_key, 1, expire=0 if alive else timeout) + async def is_key_alive(self, key: Dict[str, str]) -> bool: client = get_redis_client(self.app) hash_key = f"{self._hash_key(key)}:{ALIVE_SUFFIX}" @@ -96,13 +106,24 @@ async def is_key_alive(self, key: Dict[str, str]) -> bool: async def remove_key(self, key: Dict[str, str]) -> None: client = get_redis_client(self.app) - await client.delete(f"{self._hash_key(key)}:{RESOURCE_SUFFIX}", - f"{self._hash_key(key)}:{ALIVE_SUFFIX}") - - async def get_all_resource_keys(self) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]: + await client.delete( + f"{self._hash_key(key)}:{RESOURCE_SUFFIX}", + f"{self._hash_key(key)}:{ALIVE_SUFFIX}", + ) + + async def get_all_resource_keys( + self, + ) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]: client = get_redis_client(self.app) - alive_keys = [self._decode_hash_key(hash_key) async for hash_key in client.iscan(match=f"*:{ALIVE_SUFFIX}")] - dead_keys = [self._decode_hash_key(hash_key) async for hash_key in client.iscan(match=f"*:{RESOURCE_SUFFIX}") if self._decode_hash_key(hash_key) not in alive_keys] + alive_keys = [ + self._decode_hash_key(hash_key) + async for hash_key in client.iscan(match=f"*:{ALIVE_SUFFIX}") + ] + dead_keys = [ + self._decode_hash_key(hash_key) + async for hash_key in client.iscan(match=f"*:{RESOURCE_SUFFIX}") + if self._decode_hash_key(hash_key) not in alive_keys + ] return (alive_keys, dead_keys) diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/websocket_manager.py b/services/web/server/src/simcore_service_webserver/resource_manager/websocket_manager.py index 7551bc28380..1b83047d4d2 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/websocket_manager.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/websocket_manager.py @@ -28,66 +28,124 @@ SOCKET_ID_KEY = "socket_id" + @attr.s(auto_attribs=True) class WebsocketRegistry: user_id: str client_session_id: str app: web.Application - def _resource_key(self) -> Dict[str,str]: + def _resource_key(self) -> Dict[str, str]: return { "user_id": self.user_id, - "client_session_id": self.client_session_id if self.client_session_id else "*" - } + "client_session_id": self.client_session_id + if self.client_session_id + else "*", + } async def set_socket_id(self, socket_id: str) -> None: - log.debug("user %s/tab %s adding socket %s in registry...", self.user_id, self.client_session_id, socket_id) + log.debug( + "user %s/tab %s adding socket %s in registry...", + self.user_id, + self.client_session_id, + socket_id, + ) registry = get_registry(self.app) await registry.set_resource(self._resource_key(), (SOCKET_ID_KEY, socket_id)) await registry.set_key_alive(self._resource_key(), True) async def get_socket_id(self) -> str: - log.debug("user %s/tab %s removing socket from registry...", self.user_id, self.client_session_id) + log.debug( + "user %s/tab %s removing socket from registry...", + self.user_id, + self.client_session_id, + ) registry = get_registry(self.app) resources = await registry.get_resources(self._resource_key()) return resources.get(SOCKET_ID_KEY, None) async def remove_socket_id(self) -> None: - log.debug("user %s/tab %s removing socket from registry...", self.user_id, self.client_session_id) + log.debug( + "user %s/tab %s removing socket from registry...", + self.user_id, + self.client_session_id, + ) registry = get_registry(self.app) await registry.remove_resource(self._resource_key(), SOCKET_ID_KEY) - await registry.set_key_alive(self._resource_key(), False, get_service_deletion_timeout(self.app)) + await registry.set_key_alive( + self._resource_key(), False, get_service_deletion_timeout(self.app) + ) async def find_socket_ids(self) -> List[str]: - log.debug("user %s/tab %s finding %s from registry...", self.user_id, self.client_session_id, SOCKET_ID_KEY) + log.debug( + "user %s/tab %s finding %s from registry...", + self.user_id, + self.client_session_id, + SOCKET_ID_KEY, + ) registry = get_registry(self.app) - user_sockets = await registry.find_resources({"user_id": self.user_id, "client_session_id": "*"}, SOCKET_ID_KEY) + user_sockets = await registry.find_resources( + {"user_id": self.user_id, "client_session_id": "*"}, SOCKET_ID_KEY + ) return user_sockets async def find(self, key: str) -> List[str]: - log.debug("user %s/tab %s finding %s from registry...", self.user_id, self.client_session_id, key) + log.debug( + "user %s/tab %s finding %s from registry...", + self.user_id, + self.client_session_id, + key, + ) registry = get_registry(self.app) user_resources = await registry.find_resources(self._resource_key(), key) return user_resources async def add(self, key: str, value: str) -> None: - log.debug("user %s/tab %s adding %s:%s in registry...", self.user_id, self.client_session_id, key, value) + log.debug( + "user %s/tab %s adding %s:%s in registry...", + self.user_id, + self.client_session_id, + key, + value, + ) registry = get_registry(self.app) - await registry.set_resource(self._resource_key(), (key,value)) + await registry.set_resource(self._resource_key(), (key, value)) async def remove(self, key: str) -> None: - log.debug("user %s/tab %s removing %s from registry...", self.user_id, self.client_session_id, key) + log.debug( + "user %s/tab %s removing %s from registry...", + self.user_id, + self.client_session_id, + key, + ) registry = get_registry(self.app) await registry.remove_resource(self._resource_key(), key) async def find_users_of_resource(self, key: str, value: str) -> List[str]: - log.debug("user %s/tab %s finding %s:%s in registry..." ,self.user_id, self.client_session_id, key, value) + log.debug( + "user %s/tab %s finding %s:%s in registry...", + self.user_id, + self.client_session_id, + key, + value, + ) registry = get_registry(self.app) registry_keys = await registry.find_keys((key, value)) users = list({x["user_id"] for x in registry_keys}) return users + @contextmanager -def managed_resource(user_id: str, client_session_id: str, app: web.Application) -> WebsocketRegistry: +def managed_resource( + user_id: str, client_session_id: str, app: web.Application +) -> WebsocketRegistry: registry = WebsocketRegistry(user_id, client_session_id, app) - yield registry + try: + yield registry + except Exception: + log.exception( + "Error in web-socket for user:%s, session:%s", user_id, client_session_id + ) + raise + + # TODO: PC->SAN?? exception handling? e.g. remove resource from registry? diff --git a/services/web/server/src/simcore_service_webserver/resources.py b/services/web/server/src/simcore_service_webserver/resources.py index d4b9255a3ee..f009ea9add7 100644 --- a/services/web/server/src/simcore_service_webserver/resources.py +++ b/services/web/server/src/simcore_service_webserver/resources.py @@ -7,10 +7,8 @@ resources = ResourcesFacade( package_name=__name__, distribution_name="simcore-service-webserver", - config_folder='config', + config_folder="config", ) -__all__ = ( - 'resources', -) +__all__ = ("resources",) diff --git a/services/web/server/src/simcore_service_webserver/rest.py b/services/web/server/src/simcore_service_webserver/rest.py index 67d4faea436..9b40c565a1b 100644 --- a/services/web/server/src/simcore_service_webserver/rest.py +++ b/services/web/server/src/simcore_service_webserver/rest.py @@ -27,14 +27,14 @@ log = logging.getLogger(__name__) -def get_openapi_specs_path(api_version_dir: Optional[str]=None) -> Path: +def get_openapi_specs_path(api_version_dir: Optional[str] = None) -> Path: if api_version_dir is None: api_version_dir = api_version_prefix - return resources.get_path(f'api/{api_version_dir}/openapi.yaml') + return resources.get_path(f"api/{api_version_dir}/openapi.yaml") -def load_openapi_specs(spec_path: Optional[Path]=None) -> OpenApiSpecs: +def load_openapi_specs(spec_path: Optional[Path] = None) -> OpenApiSpecs: if spec_path is None: spec_path = get_openapi_specs_path() @@ -45,9 +45,12 @@ def load_openapi_specs(spec_path: Optional[Path]=None) -> OpenApiSpecs: return specs -@app_module_setup(__name__, ModuleCategory.ADDON, - depends=['simcore_service_webserver.security'], - logger=log) +@app_module_setup( + __name__, + ModuleCategory.ADDON, + depends=["simcore_service_webserver.security"], + logger=log, +) def setup(app: web.Application): cfg = get_rest_config(app) api_version_dir = cfg["version"] @@ -61,7 +64,9 @@ def setup(app: web.Application): major, *_ = specs.info.version if f"/v{major}" != base_path: - raise ValueError(f"Basepath naming {base_path} does not fit API version {specs.info.version}") + raise ValueError( + f"Basepath naming {base_path} does not fit API version {specs.info.version}" + ) # diagnostics routes routes = rest_routes.create(specs) @@ -72,15 +77,10 @@ def setup(app: web.Application): # rest API doc at /api/doc log.debug("OAS loaded from %s ", spec_path) - setup_swagger(app, - swagger_from_file=str(spec_path), - ui_version=3) - + setup_swagger(app, swagger_from_file=str(spec_path), ui_version=3) # alias setup_rest = setup -__all__ = ( - 'setup_rest' -) +__all__ = "setup_rest" diff --git a/services/web/server/src/simcore_service_webserver/rest_config.py b/services/web/server/src/simcore_service_webserver/rest_config.py index 9a58db47e3a..21117c9142f 100644 --- a/services/web/server/src/simcore_service_webserver/rest_config.py +++ b/services/web/server/src/simcore_service_webserver/rest_config.py @@ -10,17 +10,15 @@ from servicelib.application_keys import APP_CONFIG_KEY, APP_OPENAPI_SPECS_KEY -CONFIG_SECTION_NAME = 'rest' +CONFIG_SECTION_NAME = "rest" -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Bool(), - "version": T.Enum("v0"), -}) +schema = T.Dict( + {T.Key("enabled", default=True, optional=True): T.Bool(), "version": T.Enum("v0"),} +) def get_rest_config(app: web.Application) -> Dict: return app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] -__all__ =[ - 'APP_OPENAPI_SPECS_KEY' -] + +__all__ = ["APP_OPENAPI_SPECS_KEY"] diff --git a/services/web/server/src/simcore_service_webserver/rest_models.py b/services/web/server/src/simcore_service_webserver/rest_models.py index e3dab197066..ae28204555e 100644 --- a/services/web/server/src/simcore_service_webserver/rest_models.py +++ b/services/web/server/src/simcore_service_webserver/rest_models.py @@ -9,6 +9,7 @@ # NOTE: using these, optional and required fields are always transmitted! # NOTE: make some attrs nullable by default!? + @attr.s(auto_attribs=True) class RegistrationType: email: str @@ -16,7 +17,7 @@ class RegistrationType: confirm: str @classmethod - def from_body(cls, data): # struct-like unmarshalled data produced by + def from_body(cls, data): # struct-like unmarshalled data produced by # TODO: simplify return cls(email=data.email, password=data.password, confirm=data.confirm) @@ -24,8 +25,8 @@ def from_body(cls, data): # struct-like unmarshalled data produced by @attr.s(auto_attribs=True) class LogMessageType: message: str - level: str = 'INFO' - logger: str = 'user' + level: str = "INFO" + logger: str = "user" @attr.s(auto_attribs=True) @@ -37,10 +38,8 @@ class ErrorItemType: @classmethod def from_error(cls, err: BaseException): - item = cls( code = err.__class__.__name__, - message=str(err), - resource=None, - field=None + item = cls( + code=err.__class__.__name__, message=str(err), resource=None, field=None ) return item diff --git a/services/web/server/src/simcore_service_webserver/rest_routes.py b/services/web/server/src/simcore_service_webserver/rest_routes.py index f5ce8c35d12..8b6146f33ff 100644 --- a/services/web/server/src/simcore_service_webserver/rest_routes.py +++ b/services/web/server/src/simcore_service_webserver/rest_routes.py @@ -25,21 +25,21 @@ def create(specs: openapi.Spec) -> List[web.RouteDef]: # TODO: routing will be done automatically using operation_id/tags, etc... # diagnostics -- - path, handle = '/', rest_handlers.check_health - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(base_path+path, handle, name=operation_id) ) + path, handle = "/", rest_handlers.check_health + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(base_path + path, handle, name=operation_id)) - path, handle = '/check/{action}', rest_handlers.check_action - operation_id = specs.paths[path].operations['post'].operation_id - routes.append( web.post(base_path+path, handle, name=operation_id) ) + path, handle = "/check/{action}", rest_handlers.check_action + operation_id = specs.paths[path].operations["post"].operation_id + routes.append(web.post(base_path + path, handle, name=operation_id)) - path, handle = '/config', rest_handlers.get_config - operation_id = specs.paths[path].operations['get'].operation_id - routes.append( web.get(base_path+path, handle, name=operation_id) ) + path, handle = "/config", rest_handlers.get_config + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(base_path + path, handle, name=operation_id)) # NOTE: Internal. Not shown in api/docs - path, handle = '/diagnostics', rest_handlers.get_diagnostics - operation_id = 'get_diagnotics' # specs.paths[path].operations['get'].operation_id - routes.append( web.get(base_path+path, handle, name=operation_id) ) + path, handle = "/diagnostics", rest_handlers.get_diagnostics + operation_id = "get_diagnotics" # specs.paths[path].operations['get'].operation_id + routes.append(web.get(base_path + path, handle, name=operation_id)) return routes diff --git a/services/web/server/src/simcore_service_webserver/reverse_proxy/__init__.py b/services/web/server/src/simcore_service_webserver/reverse_proxy/__init__.py index eb9c6351771..da4eb1fa916 100644 --- a/services/web/server/src/simcore_service_webserver/reverse_proxy/__init__.py +++ b/services/web/server/src/simcore_service_webserver/reverse_proxy/__init__.py @@ -26,12 +26,13 @@ ROUTE_NAME = MODULE_NAME module_name = module_name = __name__.replace(".__init__", "") + async def _on_shutdown(app: web.Application): for ws in app[APP_SOCKETS_KEY]: await ws.close() -@app_module_setup(module_name, ModuleCategory.ADDON, - logger=logger) + +@app_module_setup(module_name, ModuleCategory.ADDON, logger=logger) def setup(app: web.Application, service_resolver: ServiceResolutionPolicy): """Sets up reverse-proxy subsystem in the application (a la aiohttp) @@ -40,21 +41,19 @@ def setup(app: web.Application, service_resolver: ServiceResolutionPolicy): # Registers reverse proxy handlers customized for specific service types for name in jupyter.SUPPORTED_IMAGE_NAME: - chooser.register_handler(jupyter.handler, - image_name=name) - + chooser.register_handler(jupyter.handler, image_name=name) for name in paraview.SUPPORTED_IMAGE_NAME: chooser.register_handler(paraview.handler, image_name=name) # /x/{serviceId}/{proxyPath:.*} - app.router.add_route(method='*', path=URL_PATH, - handler=chooser.do_route, name=ROUTE_NAME) + app.router.add_route( + method="*", path=URL_PATH, handler=chooser.do_route, name=ROUTE_NAME + ) # chooser has same lifetime as the application app[__name__] = {"chooser": chooser} - # cleans up all sockets created by the proxy app[APP_SOCKETS_KEY] = list() app.on_shutdown.append(_on_shutdown) @@ -63,6 +62,4 @@ def setup(app: web.Application, service_resolver: ServiceResolutionPolicy): # alias setup_reverse_proxy = setup -__all__ = ( - 'setup_reverse_proxy' -) +__all__ = "setup_reverse_proxy" diff --git a/services/web/server/src/simcore_service_webserver/reverse_proxy/abc.py b/services/web/server/src/simcore_service_webserver/reverse_proxy/abc.py index b365532ea5e..7e62bc94966 100644 --- a/services/web/server/src/simcore_service_webserver/reverse_proxy/abc.py +++ b/services/web/server/src/simcore_service_webserver/reverse_proxy/abc.py @@ -1,4 +1,3 @@ - import abc from yarl import URL @@ -10,6 +9,7 @@ class ServiceResolutionPolicy(metaclass=abc.ABCMeta): """ Implements an interface to identify and resolve the location of a dynamic backend service """ + base_mountpoint = PROXY_MOUNTPOINT @abc.abstractmethod diff --git a/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/aiohttp_client_extension.py b/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/aiohttp_client_extension.py index 4539bf91b52..87f6fe328ff 100644 --- a/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/aiohttp_client_extension.py +++ b/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/aiohttp_client_extension.py @@ -1,7 +1,6 @@ - -#pylint: disable=unused-wildcard-import -#pylint: disable=wildcard-import -#pylint: disable=unused-import, redefined-outer-name, protected-access +# pylint: disable=unused-wildcard-import +# pylint: disable=wildcard-import +# pylint: disable=unused-import, redefined-outer-name, protected-access # TODO: should be fixed in #710. TEMPORARY SOLUTION @@ -14,39 +13,76 @@ import traceback import warnings from types import SimpleNamespace, TracebackType -from typing import (Any, Coroutine, Generator, Generic, Iterable, List, # noqa - Mapping, Optional, Set, Tuple, Type, TypeVar, Union) +from typing import ( + Any, + Coroutine, + Generator, + Generic, + Iterable, + List, # noqa + Mapping, + Optional, + Set, + Tuple, + Type, + TypeVar, + Union, +) import attr from aiohttp import hdrs, http, payload from aiohttp.abc import AbstractCookieJar from aiohttp.client import * from aiohttp.client import _SessionRequestContextManager -from aiohttp.client_exceptions import (ClientConnectionError, - ClientConnectorCertificateError, - ClientConnectorError, - ClientConnectorSSLError, ClientError, - ClientHttpProxyError, ClientOSError, - ClientPayloadError, - ClientProxyConnectionError, - ClientResponseError, ClientSSLError, - ContentTypeError, InvalidURL, - ServerConnectionError, - ServerDisconnectedError, - ServerFingerprintMismatch, - ServerTimeoutError, TooManyRedirects, - WSServerHandshakeError) -from aiohttp.client_reqrep import (ClientRequest, ClientResponse, Fingerprint, - RequestInfo, _merge_ssl_params) +from aiohttp.client_exceptions import ( + ClientConnectionError, + ClientConnectorCertificateError, + ClientConnectorError, + ClientConnectorSSLError, + ClientError, + ClientHttpProxyError, + ClientOSError, + ClientPayloadError, + ClientProxyConnectionError, + ClientResponseError, + ClientSSLError, + ContentTypeError, + InvalidURL, + ServerConnectionError, + ServerDisconnectedError, + ServerFingerprintMismatch, + ServerTimeoutError, + TooManyRedirects, + WSServerHandshakeError, +) +from aiohttp.client_reqrep import ( + ClientRequest, + ClientResponse, + Fingerprint, + RequestInfo, + _merge_ssl_params, +) from aiohttp.client_ws import ClientWebSocketResponse from aiohttp.connector import BaseConnector, TCPConnector, UnixConnector from aiohttp.cookiejar import CookieJar -from aiohttp.helpers import (DEBUG, PY_36, BasicAuth, CeilTimeout, - TimeoutHandle, get_running_loop, proxies_from_env, - sentinel, strip_auth_from_url) +from aiohttp.helpers import ( + DEBUG, + PY_36, + BasicAuth, + CeilTimeout, + TimeoutHandle, + get_running_loop, + proxies_from_env, + sentinel, + strip_auth_from_url, +) from aiohttp.http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter -from aiohttp.http_websocket import (WSHandshakeError, WSMessage, # noqa - ws_ext_gen, ws_ext_parse) +from aiohttp.http_websocket import ( + WSHandshakeError, + WSMessage, # noqa + ws_ext_gen, + ws_ext_parse, +) from aiohttp.streams import FlowControlDataQueue from aiohttp.tracing import Trace, TraceConfig from aiohttp.typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL @@ -55,28 +91,29 @@ def client_request( - method: str, - url: StrOrURL, *, - params: Optional[Mapping[str, str]]=None, - data: Any=None, - json: Any=None, - headers: LooseHeaders=None, - skip_auto_headers: Optional[Iterable[str]]=None, - auth: Optional[BasicAuth]=None, - allow_redirects: bool=True, - max_redirects: int=10, - compress: Optional[str]=None, - chunked: Optional[bool]=None, - expect100: bool=False, - raise_for_status: Optional[bool]=None, - read_until_eof: bool=True, - proxy: Optional[StrOrURL]=None, - proxy_auth: Optional[BasicAuth]=None, - timeout: Union[ClientTimeout, object]=sentinel, - cookies: Optional[LooseCookies]=None, - version: HttpVersion=http.HttpVersion11, - connector: Optional[BaseConnector]=None, - loop: Optional[asyncio.AbstractEventLoop]=None + method: str, + url: StrOrURL, + *, + params: Optional[Mapping[str, str]] = None, + data: Any = None, + json: Any = None, + headers: LooseHeaders = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + allow_redirects: bool = True, + max_redirects: int = 10, + compress: Optional[str] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + raise_for_status: Optional[bool] = None, + read_until_eof: bool = True, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + timeout: Union[ClientTimeout, object] = sentinel, + cookies: Optional[LooseCookies] = None, + version: HttpVersion = http.HttpVersion11, + connector: Optional[BaseConnector] = None, + loop: Optional[asyncio.AbstractEventLoop] = None ) -> _SessionRequestContextManager: """ as aiohttp.client.request using a client session that does not decompress, i.e auto_decompress=False @@ -87,25 +124,34 @@ def client_request( connector = TCPConnector(loop=loop, force_close=True) session = ClientSession( - loop=loop, cookies=cookies, version=version, timeout=timeout, - connector=connector, connector_owner=connector_owner, - auto_decompress=False) + loop=loop, + cookies=cookies, + version=version, + timeout=timeout, + connector=connector, + connector_owner=connector_owner, + auto_decompress=False, + ) return _SessionRequestContextManager( - session._request(method, url, - params=params, - data=data, - json=json, - headers=headers, - skip_auto_headers=skip_auto_headers, - auth=auth, - allow_redirects=allow_redirects, - max_redirects=max_redirects, - compress=compress, - chunked=chunked, - expect100=expect100, - raise_for_status=raise_for_status, - read_until_eof=read_until_eof, - proxy=proxy, - proxy_auth=proxy_auth,), - session) + session._request( + method, + url, + params=params, + data=data, + json=json, + headers=headers, + skip_auto_headers=skip_auto_headers, + auth=auth, + allow_redirects=allow_redirects, + max_redirects=max_redirects, + compress=compress, + chunked=chunked, + expect100=expect100, + raise_for_status=raise_for_status, + read_until_eof=read_until_eof, + proxy=proxy, + proxy_auth=proxy_auth, + ), + session, + ) diff --git a/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/generic.py b/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/generic.py index 9446435854a..a8f435d6c4d 100644 --- a/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/generic.py +++ b/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/generic.py @@ -24,9 +24,12 @@ def check_ws_in_headers(request): - return request.headers.get('connection', '').lower() == 'upgrade' and \ - request.headers.get('upgrade', '').lower() == 'websocket' and \ - request.method == 'GET' + return ( + request.headers.get("connection", "").lower() == "upgrade" + and request.headers.get("upgrade", "").lower() == "websocket" + and request.method == "GET" + ) + async def handle_websocket_requests(ws_server, request, target_url): client_session = aiohttp.ClientSession(cookies=request.cookies) @@ -46,34 +49,36 @@ async def _ws_forward(ws_from, ws_to): elif ws_to.closed: await ws_to.close(code=ws_to.close_code, message=msg.extra) else: - raise ValueError( - 'unexpected message type: %s' % pprint.pformat(msg)) + raise ValueError("unexpected message type: %s" % pprint.pformat(msg)) async with client_session.ws_connect(target_url) as ws_client: - await asyncio.wait([_ws_forward(ws_server, ws_client), - _ws_forward(ws_client, ws_server)], - return_when=asyncio.FIRST_COMPLETED) + await asyncio.wait( + [_ws_forward(ws_server, ws_client), _ws_forward(ws_client, ws_server)], + return_when=asyncio.FIRST_COMPLETED, + ) return ws_server + async def handle_web_request(request, target_url): async with client.request( - request.method, target_url, + request.method, + target_url, headers=request.headers.copy(), allow_redirects=False, - data=await request.read() + data=await request.read(), ) as res: body = await res.read() response = web.Response( - headers=res.headers.copy(), - status=res.status, - body=body + headers=res.headers.copy(), status=res.status, body=body ) return response async def handler(request: web.Request, service_url: str, **_kargs): - target_url = URL(service_url).origin().with_path(request.path).with_query(request.query) + target_url = ( + URL(service_url).origin().with_path(request.path).with_query(request.query) + ) ws_available = False if check_ws_in_headers(request): ws = web.WebSocketResponse() @@ -89,26 +94,26 @@ async def handler(request: web.Request, service_url: str, **_kargs): request.app[APP_SOCKETS_KEY].remove(ws) if not ws_available: - return ( await handle_web_request(request, target_url) ) - + return await handle_web_request(request, target_url) # OTHER IMPLEMENTATIONS ------------------------------------------------------ + async def handler_impl_2(request: web.Request, target_url: str): # FIXME: Taken tmp from https://github.com/weargoggles/aioproxy/blob/master/aioproxy.py start = time.time() async with aiohttp.client.request( - request.method, target_url, + request.method, + target_url, headers=request.headers, chunked=CHUNK, # response_class=ReverseProxyResponse, ) as r: - logger.debug('opened backend request in %d ms', ((time.time() - start) * 1000)) + logger.debug("opened backend request in %d ms", ((time.time() - start) * 1000)) - response = aiohttp.web.StreamResponse(status=r.status, - headers=r.headers) + response = aiohttp.web.StreamResponse(status=r.status, headers=r.headers) await response.prepare(request) content = r.content while True: @@ -117,7 +122,7 @@ async def handler_impl_2(request: web.Request, target_url: str): break await response.write(chunk) - logger.debug('finished sending content in %d ms', ((time.time() - start) * 1000,)) + logger.debug("finished sending content in %d ms", ((time.time() - start) * 1000,)) await response.write_eof() return response diff --git a/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/paraview.py b/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/paraview.py index 196b43c9670..b0fa8673d5f 100644 --- a/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/paraview.py +++ b/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/paraview.py @@ -19,16 +19,22 @@ from ..settings import APP_SOCKETS_KEY -SUPPORTED_IMAGE_NAME = ["simcore/services/dynamic/3d-viewer", - "simcore/services/dynamic/3d-viewer-gpu"] +SUPPORTED_IMAGE_NAME = [ + "simcore/services/dynamic/3d-viewer", + "simcore/services/dynamic/3d-viewer-gpu", +] SUPPORTED_IMAGE_TAG = "==1.0.5" logger = logging.getLogger(__name__) + def check_ws_in_headers(request): - return request.headers.get('connection', '').lower() == 'upgrade' and \ - request.headers.get('upgrade', '').lower() == 'websocket' and \ - request.method == 'GET' + return ( + request.headers.get("connection", "").lower() == "upgrade" + and request.headers.get("upgrade", "").lower() == "websocket" + and request.method == "GET" + ) + async def handle_websocket_requests(ws_server, request: web.Request, target_url: URL): async def _ws_forward(ws_from, ws_to): @@ -46,53 +52,61 @@ async def _ws_forward(ws_from, ws_to): elif ws_to.closed: await ws_to.close(code=ws_to.close_code, message=msg.extra) else: - raise ValueError( - 'unexpected message type: %s' % pprint.pformat(msg)) + raise ValueError("unexpected message type: %s" % pprint.pformat(msg)) async with aiohttp.ClientSession(cookies=request.cookies) as session: # websocket connection with backend services async with session.ws_connect(target_url) as ws_client: - await asyncio.wait([_ws_forward(ws_server, ws_client), - _ws_forward(ws_client, ws_server)], - return_when=asyncio.FIRST_COMPLETED) + await asyncio.wait( + [_ws_forward(ws_server, ws_client), _ws_forward(ws_client, ws_server)], + return_when=asyncio.FIRST_COMPLETED, + ) return ws_server -async def handle_web_request(request: web.Request, target_url: URL, mount_point:str, proxy_path: str): + +async def handle_web_request( + request: web.Request, target_url: URL, mount_point: str, proxy_path: str +): async with client.request( - request.method, target_url, + request.method, + target_url, headers=request.headers.copy(), allow_redirects=False, - data=await request.read() + data=await request.read(), ) as res: # special handling for paraview headers = res.headers.copy() - del headers['content-length'] + del headers["content-length"] body = await res.read() - if proxy_path == 'Visualizer.js': - body = body.replace(b'"https"===window.location.protocol', b'window.location.protocol.startsWith("https")') - body = body.replace(b'"/ws"', b'"%s/ws"' % - mount_point.encode(), 1) + if proxy_path == "Visualizer.js": body = body.replace( - b'"/paraview/"', b'"%s/paraview/"' % mount_point.encode(), 1) + b'"https"===window.location.protocol', + b'window.location.protocol.startsWith("https")', + ) + body = body.replace(b'"/ws"', b'"%s/ws"' % mount_point.encode(), 1) + body = body.replace( + b'"/paraview/"', b'"%s/paraview/"' % mount_point.encode(), 1 + ) logger.info("fixed Visualizer.js paths on the fly") - response = web.Response( - headers=headers, - status=res.status, - body=body - ) + response = web.Response(headers=headers, status=res.status, body=body) return response -async def handler(request: web.Request, service_url: str, mount_point: str, proxy_path: str, **_kargs): + +async def handler( + request: web.Request, service_url: str, mount_point: str, proxy_path: str, **_kargs +): logger.debug("handling request %s, using service url %s", request, service_url) - target_url = URL(service_url).origin().with_path(request.path).with_query(request.query) + target_url = ( + URL(service_url).origin().with_path(request.path).with_query(request.query) + ) ws_available = False if check_ws_in_headers(request): ws = web.WebSocketResponse() ws_available = ws.can_prepare(request) if ws_available: await ws.prepare(request) - logger.info('##### WS_SERVER %s', pprint.pformat(ws)) + logger.info("##### WS_SERVER %s", pprint.pformat(ws)) try: request.app[APP_SOCKETS_KEY].append(ws) # paraview special handling, it is somehow fixed at the root endpoint @@ -102,18 +116,18 @@ async def handler(request: web.Request, service_url: str, mount_point: str, prox finally: request.app[APP_SOCKETS_KEY].remove(ws) if not ws_available: - return (await handle_web_request(request, target_url, mount_point, proxy_path)) + return await handle_web_request(request, target_url, mount_point, proxy_path) + if __name__ == "__main__": # dummies for manual testing - BASE_URL = 'http://0.0.0.0:8080' - MOUNT_POINT = '/x/fakeUuid' + BASE_URL = "http://0.0.0.0:8080" + MOUNT_POINT = "/x/fakeUuid" def adapter(req: web.Request): - proxy_path = req.match_info.get('proxyPath', - 'no proxyPath placeholder defined') + proxy_path = req.match_info.get("proxyPath", "no proxyPath placeholder defined") return handler(req, BASE_URL, MOUNT_POINT, proxy_path) app = web.Application() - app.router.add_route('*', MOUNT_POINT + '{proxyPath:.*}', adapter) + app.router.add_route("*", MOUNT_POINT + "{proxyPath:.*}", adapter) web.run_app(app, port=3985) diff --git a/services/web/server/src/simcore_service_webserver/reverse_proxy/routing.py b/services/web/server/src/simcore_service_webserver/reverse_proxy/routing.py index 92eb7abf9c9..15c27838091 100644 --- a/services/web/server/src/simcore_service_webserver/reverse_proxy/routing.py +++ b/services/web/server/src/simcore_service_webserver/reverse_proxy/routing.py @@ -15,7 +15,9 @@ from aiohttp import web from .abc import ServiceResolutionPolicy -from .handlers.jupyter import handler as default_handler #TODO: jupyter shall be the generic one +from .handlers.jupyter import ( + handler as default_handler, +) # TODO: jupyter shall be the generic one from .settings import PROXY_PATH_KEY, SERVICE_ID_KEY logger = logging.getLogger(__name__) @@ -29,6 +31,7 @@ class Wrapper: NOTE: wrapper.cache.clear() """ + encapsulated: ServiceResolutionPolicy cache: Dict[str, Tuple[str, str]] = OrderedDict() MAXSIZE = 128 @@ -53,8 +56,7 @@ async def resolve(self, service_identifier: str) -> Tuple[str, str]: except Exception: logger.debug("Failed to resolve service", exc_info=True) # TODO: translate exception into HTTPStatus - raise web.HTTPServiceUnavailable( - reason="Cannot resolve service") + raise web.HTTPServiceUnavailable(reason="Cannot resolve service") @attr.s(auto_attribs=True) @@ -62,9 +64,9 @@ class ReverseChooser: resolver: Wrapper = attr.ib(converter=Wrapper) handlers: Dict = dict() - def register_handler(self, - handler: Callable[..., web.StreamResponse], *, - image_name: str): + def register_handler( + self, handler: Callable[..., web.StreamResponse], *, image_name: str + ): self.handlers[image_name] = handler async def do_route(self, request: web.Request) -> web.Response: @@ -75,9 +77,11 @@ async def do_route(self, request: web.Request) -> web.Response: service_identifier = request.match_info.get(SERVICE_ID_KEY) proxy_path = request.match_info.get(PROXY_PATH_KEY) - mountpoint = request.path[:-len(proxy_path)].rstrip("/") + mountpoint = request.path[: -len(proxy_path)].rstrip("/") - image_name, service_url = await cli.resolve(service_identifier) # pylint: disable=E1101 + image_name, service_url = await cli.resolve( + service_identifier + ) # pylint: disable=E1101 # TODO: reset cache for given service_identifier when it is shutdown or reused # To clear cache, use cli.cache.clear() @@ -89,8 +93,7 @@ async def do_route(self, request: web.Request) -> web.Response: # FIXME: add version as well handler = self.handlers.get(image_name, default_handler) - - response = await handler(request, service_url, - mount_point=mountpoint, - proxy_path=proxy_path) + response = await handler( + request, service_url, mount_point=mountpoint, proxy_path=proxy_path + ) return response diff --git a/services/web/server/src/simcore_service_webserver/security.py b/services/web/server/src/simcore_service_webserver/security.py index 85e9223ab9a..9c0e8c0e05e 100644 --- a/services/web/server/src/simcore_service_webserver/security.py +++ b/services/web/server/src/simcore_service_webserver/security.py @@ -35,8 +35,7 @@ def setup(app: web.Application): authorization_policy = AuthorizationPolicy(app, access_model) aiohttp_security.setup(app, identity_policy, authorization_policy) + setup_security = setup -__all__ = ( - 'setup_security' -) +__all__ = "setup_security" diff --git a/services/web/server/src/simcore_service_webserver/security_access_model.py b/services/web/server/src/simcore_service_webserver/security_access_model.py index aaaeacd271b..07bac80cc07 100644 --- a/services/web/server/src/simcore_service_webserver/security_access_model.py +++ b/services/web/server/src/simcore_service_webserver/security_access_model.py @@ -20,24 +20,28 @@ @attr.s(auto_attribs=True) class RolePermissions: role: UserRole - allowed: List[str]=attr.Factory(list) # named permissions allowed - check: Dict[str, Callable[[],bool]]=attr.Factory(dict) # checked permissions: permissions with conditions - inherits: List[str]=attr.Factory(list) + allowed: List[str] = attr.Factory(list) # named permissions allowed + check: Dict[str, Callable[[], bool]] = attr.Factory( + dict + ) # checked permissions: permissions with conditions + inherits: List[str] = attr.Factory(list) @classmethod - def from_rawdata(cls, role, value:Dict): + def from_rawdata(cls, role, value: Dict): if isinstance(role, str): name = role role = UserRole[name] - role = RolePermissions(role=role, allowed=[], check=[], inherits=value.get('inherits', [])) + role = RolePermissions( + role=role, allowed=[], check=[], inherits=value.get("inherits", []) + ) allowed = set() check = dict() - for item in value.get('can', list()): + for item in value.get("can", list()): if isinstance(item, Dict): - check[item['name']] = item['check'] + check[item["name"]] = item["check"] elif isinstance(item, str): allowed.add(item) else: @@ -57,14 +61,15 @@ class RoleBasedAccessModel: - For checks with operation expressions (e.g. can operation A & operation B?) see check_access free function below """ + def __init__(self, roles: List[RolePermissions]): - self.roles = {r.role:r for r in roles} + self.roles = {r.role: r for r in roles} # TODO: all operations allowed for a given role # TODO: build a tree out of the list of allowed operations # TODO: operations to ADD/REMOVE/EDIT permissions in a role - async def can(self, role: UserRole, operation: str, context: Dict=None) -> bool: + async def can(self, role: UserRole, operation: str, context: Dict = None) -> bool: # pylint: disable=too-many-return-statements # undefined operation TODO: check if such a name is defined?? @@ -88,10 +93,12 @@ async def can(self, role: UserRole, operation: str, context: Dict=None) -> bool: check = role_access.check[operation] try: if inspect.iscoroutinefunction(check): - return (await check(context)) + return await check(context) return check(context) - except Exception: #pylint: disable=broad-except - log.exception("Check operation '%s', shall not raise [%s]", operation, check) + except Exception: # pylint: disable=broad-except + log.exception( + "Check operation '%s', shall not raise [%s]", operation, check + ) return False # check if any parents @@ -103,37 +110,40 @@ async def can(self, role: UserRole, operation: str, context: Dict=None) -> bool: return True return False - async def who_can(self, operation: str, context: Dict=None): + async def who_can(self, operation: str, context: Dict = None): allowed = [] for role in self.roles: if await self.can(role, operation, context): allowed.append(role) return allowed - @classmethod def from_rawdata(cls, raw: Dict): - roles = [RolePermissions.from_rawdata(role, value) for role, value in raw.items()] + roles = [ + RolePermissions.from_rawdata(role, value) for role, value in raw.items() + ] return RoleBasedAccessModel(roles) # TODO: print table?? # TODO: implement expression parser: reg = re.compile(r'(&|\||\bAND\b|\bOR\b|\(|\))') -operators_pattern = re.compile(r'(&|\||\bAND\b|\bOR\b)') +operators_pattern = re.compile(r"(&|\||\bAND\b|\bOR\b)") -async def check_access(model: RoleBasedAccessModel, role:UserRole, operations: str, context: Dict=None) -> bool: +async def check_access( + model: RoleBasedAccessModel, role: UserRole, operations: str, context: Dict = None +) -> bool: """ Extends `RoleBasedAccessModel.can` to check access to boolean expressions of operations Returns True if a user with a role has permission on a given context """ tokens = operators_pattern.split(operations) - if len(tokens)==1: + if len(tokens) == 1: return await model.can(role, tokens[0], context) - if len(tokens)==3: - tokens = [t.strip() for t in tokens if t.strip() != ''] + if len(tokens) == 3: + tokens = [t.strip() for t in tokens if t.strip() != ""] lhs, op, rhs = tokens can_lhs = await model.can(role, lhs, context) if op in ["AND", "&"]: diff --git a/services/web/server/src/simcore_service_webserver/security_api.py b/services/web/server/src/simcore_service_webserver/security_api.py index 55d428b24d2..ac76c16af1c 100644 --- a/services/web/server/src/simcore_service_webserver/security_api.py +++ b/services/web/server/src/simcore_service_webserver/security_api.py @@ -7,9 +7,14 @@ import passlib.hash import sqlalchemy as sa from aiohttp import web -from aiohttp_security.api import (AUTZ_KEY, authorized_userid, - check_permission, forget, is_anonymous, - remember) +from aiohttp_security.api import ( + AUTZ_KEY, + authorized_userid, + check_permission, + forget, + is_anonymous, + remember, +) from aiopg.sa import Engine from .db_models import UserStatus, users @@ -21,27 +26,36 @@ async def check_credentials(engine: Engine, email: str, password: str) -> bool: async with engine.acquire() as conn: query = users.select().where( - sa.and_(users.c.email == email, - users.c.status != UserStatus.BANNED) + sa.and_(users.c.email == email, users.c.status != UserStatus.BANNED) ) ret = await conn.execute(query) user = await ret.fetchone() if user is not None: - return check_password(password, user['password_hash'] ) + return check_password(password, user["password_hash"]) return False + def encrypt_password(password): return passlib.hash.sha256_crypt.encrypt(password, rounds=1000) + def check_password(password, password_hash): return passlib.hash.sha256_crypt.verify(password, password_hash) + def get_access_model(app: web.Application): autz_policy = app[AUTZ_KEY] return autz_policy.access_model + __all__ = ( - 'encrypt_password', 'check_credentials', - 'authorized_userid', 'forget', 'remember', 'is_anonymous', 'check_permission', - 'get_access_model', 'UserRole' + "encrypt_password", + "check_credentials", + "authorized_userid", + "forget", + "remember", + "is_anonymous", + "check_permission", + "get_access_model", + "UserRole", ) diff --git a/services/web/server/src/simcore_service_webserver/security_authorization.py b/services/web/server/src/simcore_service_webserver/security_authorization.py index bc13812e03a..c02c4d61b9d 100644 --- a/services/web/server/src/simcore_service_webserver/security_authorization.py +++ b/services/web/server/src/simcore_service_webserver/security_authorization.py @@ -23,7 +23,9 @@ class AuthorizationPolicy(AbstractAuthorizationPolicy): app: web.Application access_model: RoleBasedAccessModel - timed_cache: ExpiringDict = attr.ib(init=False, default=ExpiringDict(max_len=100, max_age_seconds=10)) + timed_cache: ExpiringDict = attr.ib( + init=False, default=ExpiringDict(max_len=100, max_age_seconds=10) + ) @property def engine(self) -> Engine: @@ -31,8 +33,8 @@ def engine(self) -> Engine: :return: database's engine """ - # TODO: what if db is not available? - #return self.app.config_dict[APP_DB_ENGINE_KEY] + # TODO: what if db is not available? + # return self.app.config_dict[APP_DB_ENGINE_KEY] return self.app[APP_DB_ENGINE_KEY] @retry(**PostgresRetryPolicyUponOperation(log).kwargs) @@ -41,8 +43,7 @@ async def _pg_query_user(self, identity: str) -> RowProxy: row = self.timed_cache.get(identity) if not row: query = users.select().where( - sa.and_(users.c.email == identity, - users.c.status != UserStatus.BANNED) + sa.and_(users.c.email == identity, users.c.status != UserStatus.BANNED) ) async with self.engine.acquire() as conn: # NOTE: sometimes it raises psycopg2.DatabaseError in #880 and #1160 @@ -61,7 +62,12 @@ async def authorized_userid(self, identity: str) -> Optional[str]: user = await self._pg_query_user(identity) return user["id"] if user else None - async def permits(self, identity: str, permission: Union[str,Tuple], context: Optional[Dict]=None) -> bool: + async def permits( + self, + identity: str, + permission: Union[str, Tuple], + context: Optional[Dict] = None, + ) -> bool: """ Determines whether an identified user has permission :param identity: session identified corresponds to the user's email as defined in login.handlers.registration @@ -70,12 +76,16 @@ async def permits(self, identity: str, permission: Union[str,Tuple], context: Op :return: True if user has permission to execute this operation within the given context """ if identity is None or permission is None: - log.debug("Invalid indentity [%s] of permission [%s]. Denying access.", identity, permission) + log.debug( + "Invalid indentity [%s] of permission [%s]. Denying access.", + identity, + permission, + ) return False user = await self._pg_query_user(identity) if user: - role = user.get('role') + role = user.get("role") return await check_access(self.access_model, role, permission, context) return False diff --git a/services/web/server/src/simcore_service_webserver/security_permissions.py b/services/web/server/src/simcore_service_webserver/security_permissions.py index 9411025ca4c..c9ca981aaa3 100644 --- a/services/web/server/src/simcore_service_webserver/security_permissions.py +++ b/services/web/server/src/simcore_service_webserver/security_permissions.py @@ -19,7 +19,7 @@ def named_permissions() -> List[str]: return permissions -def split_permission_name(permission:str) -> Tuple[str, str]: +def split_permission_name(permission: str) -> Tuple[str, str]: parts = permission.split(".") resource, action = ".".join(parts[:-1]), parts[-1] return (resource, action) diff --git a/services/web/server/src/simcore_service_webserver/security_roles.py b/services/web/server/src/simcore_service_webserver/security_roles.py index b2b388f1570..dcd69710af2 100644 --- a/services/web/server/src/simcore_service_webserver/security_roles.py +++ b/services/web/server/src/simcore_service_webserver/security_roles.py @@ -18,61 +18,54 @@ # If only needed to discrimiate a resource use `resource.sub_resource.*` # ROLES_PERMISSIONS = { - UserRole.ANONYMOUS: { - "can": [] # Add only permissions here to handles that do not require login. - # Anonymous user can only access - }, - UserRole.GUEST: { - "can": [ - # Anonymous users need access to the filesystem because files are being transferred - "project.update", - "storage.locations.*", # "storage.datcore.read" - "storage.files.*", - - "project.open", - "project.read", # "studies.user.read", - # "studies.templates.read" - "project.node.read", - # NOTE: All services* are not necessary since it only requires login - # and there is no distinction among logged in users. - # TODO: kept temporarily as a way to denote resources - "services.pipeline.*", # "study.update", - # "study.start", - # "study.stop", - "services.interactive.*",# "study.node.start" - "services.catalog.*", - ] - }, - UserRole.USER: { - "can": [ - "project.create", # "studies.user.create", - "project.close", - "project.delete", # "study.node.create", - # "study.node.delete", - # "study.node.rename", - # "study.edge.create", - # "study.edge.delete" - "project.node.create", - "project.node.delete", - "project.tag.*", # "study.tag" - "user.profile.update", # "preferences.user.update", - # "preferences.role.update" - "user.tokens.*", # "preferences.token.create", - # "preferences.token.delete" - "tag.crud.*" # "preferences.tag" - - # NOTE: All services* are not necessary since it only requires login - # and there is no distinction among logged in users. - # TODO: kept temporarily as a way to denote resources - ], - "inherits": [UserRole.GUEST, UserRole.ANONYMOUS] - }, - UserRole.TESTER: { - "can": [ - "project.template.create", - ], - "inherits": [UserRole.USER] - } + UserRole.ANONYMOUS: { + "can": [] # Add only permissions here to handles that do not require login. + # Anonymous user can only access + }, + UserRole.GUEST: { + "can": [ + # Anonymous users need access to the filesystem because files are being transferred + "project.update", + "storage.locations.*", # "storage.datcore.read" + "storage.files.*", + "project.open", + "project.read", # "studies.user.read", + # "studies.templates.read" + "project.node.read", + # NOTE: All services* are not necessary since it only requires login + # and there is no distinction among logged in users. + # TODO: kept temporarily as a way to denote resources + "services.pipeline.*", # "study.update", + # "study.start", + # "study.stop", + "services.interactive.*", # "study.node.start" + "services.catalog.*", + ] + }, + UserRole.USER: { + "can": [ + "project.create", # "studies.user.create", + "project.close", + "project.delete", # "study.node.create", + # "study.node.delete", + # "study.node.rename", + # "study.edge.create", + # "study.edge.delete" + "project.node.create", + "project.node.delete", + "project.tag.*", # "study.tag" + "user.profile.update", # "preferences.user.update", + # "preferences.role.update" + "user.tokens.*", # "preferences.token.create", + # "preferences.token.delete" + "tag.crud.*" # "preferences.tag" + # NOTE: All services* are not necessary since it only requires login + # and there is no distinction among logged in users. + # TODO: kept temporarily as a way to denote resources + ], + "inherits": [UserRole.GUEST, UserRole.ANONYMOUS], + }, + UserRole.TESTER: {"can": ["project.template.create",], "inherits": [UserRole.USER]}, } # @@ -100,8 +93,8 @@ ### "study.node.start", # "study.node.data.push", <----------??? # "study.node.data.delete", <----------??? -#XX "study.edge.create", -#XX "study.edge.delete" +# XX "study.edge.create", +# XX "study.edge.delete" # ], # "tester": [ # "services.all.read", <----------??? diff --git a/services/web/server/src/simcore_service_webserver/session.py b/services/web/server/src/simcore_service_webserver/session.py index 13d366c014f..f5d42ecf222 100644 --- a/services/web/server/src/simcore_service_webserver/session.py +++ b/services/web/server/src/simcore_service_webserver/session.py @@ -31,6 +31,7 @@ async def my_handler(request) logger = logging.getLogger(__file__) + def generate_key(): # secret_key must be 32 url-safe base64-encoded bytes fernet_key = fernet.Fernet.generate_key() @@ -46,17 +47,17 @@ def setup_session(app: web.Application): cfg = app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] # secret key needed by EncryptedCookieStorage: is *bytes* key with length of *32* - secret_key_bytes = cfg["secret_key"].encode('utf-8') - if len(secret_key_bytes)==0: + secret_key_bytes = cfg["secret_key"].encode("utf-8") + if len(secret_key_bytes) == 0: raise ValueError("Empty %s.secret_key in config. Expected at least length 32") - while len(secret_key_bytes)<32: + while len(secret_key_bytes) < 32: secret_key_bytes += secret_key_bytes # EncryptedCookieStorage urlsafe_b64decode inside if passes bytes storage = EncryptedCookieStorage( - secret_key=secret_key_bytes[:32], - cookie_name="API_SESSION") + secret_key=secret_key_bytes[:32], cookie_name="API_SESSION" + ) aiohttp_session.setup(app, storage) @@ -65,7 +66,4 @@ def setup_session(app: web.Application): get_session = aiohttp_session.get_session -__all__ = ( - 'setup_session', - 'get_session' -) +__all__ = ("setup_session", "get_session") diff --git a/services/web/server/src/simcore_service_webserver/session_config.py b/services/web/server/src/simcore_service_webserver/session_config.py index b5128d9ca2b..620ed3f99ee 100644 --- a/services/web/server/src/simcore_service_webserver/session_config.py +++ b/services/web/server/src/simcore_service_webserver/session_config.py @@ -5,8 +5,6 @@ """ import trafaret as T -CONFIG_SECTION_NAME = 'session' +CONFIG_SECTION_NAME = "session" -schema = T.Dict({ - "secret_key": T.String -}) +schema = T.Dict({"secret_key": T.String}) diff --git a/services/web/server/src/simcore_service_webserver/socketio/__init__.py b/services/web/server/src/simcore_service_webserver/socketio/__init__.py index e3e8622b94e..0b526bc75c6 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/__init__.py +++ b/services/web/server/src/simcore_service_webserver/socketio/__init__.py @@ -15,16 +15,16 @@ log = logging.getLogger(__name__) + @app_module_setup(__name__, ModuleCategory.SYSTEM, logger=log) def setup(app: web.Application): mgr = None sio = AsyncServer(async_mode="aiohttp", client_manager=mgr, logging=log) sio.attach(app) - app[APP_CLIENT_SOCKET_SERVER_KEY] = sio + app[APP_CLIENT_SOCKET_SERVER_KEY] = sio handlers_utils.register_handlers(app, handlers) + # alias setup_sockets = setup -__all__ = ( - "setup_sockets" -) +__all__ = "setup_sockets" diff --git a/services/web/server/src/simcore_service_webserver/socketio/config.py b/services/web/server/src/simcore_service_webserver/socketio/config.py index ac73cd57777..4bfa8b308e1 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/config.py +++ b/services/web/server/src/simcore_service_webserver/socketio/config.py @@ -11,16 +11,18 @@ from servicelib.application_keys import APP_CONFIG_KEY from socketio import AsyncServer -CONFIG_SECTION_NAME = 'socketio' +CONFIG_SECTION_NAME = "socketio" APP_CLIENT_SOCKET_SERVER_KEY = __name__ + ".socketio_socketio" APP_CLIENT_SOCKET_DECORATED_HANDLERS_KEY = __name__ + ".socketio_handlers" -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Or(T.Bool(), T.Int()), -}) +schema = T.Dict( + {T.Key("enabled", default=True, optional=True): T.Or(T.Bool(), T.Int()),} +) + def get_config(app: web.Application) -> Dict: return app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] + def get_socket_server(app: web.Application) -> AsyncServer: return app[APP_CLIENT_SOCKET_SERVER_KEY] diff --git a/services/web/server/src/simcore_service_webserver/socketio/events.py b/services/web/server/src/simcore_service_webserver/socketio/events.py index f37f07eb6e5..a07e00d5111 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/events.py +++ b/services/web/server/src/simcore_service_webserver/socketio/events.py @@ -1,22 +1,51 @@ """ -This module takes care of sending events to the connected webclient through the socket.io interface. +This module takes care of sending events to the connected webclient through the socket.io interface """ import asyncio import json -from typing import Any, Dict +from typing import Any, Dict, List -from aiohttp import web +from aiohttp.web import Application from ..resource_manager.websocket_manager import managed_resource -from .config import get_socket_server +from .config import AsyncServer, get_socket_server -async def post_messages(app: web.Application, user_id: str, messages: Dict[str, Any]) -> None: - sio = get_socket_server(app) - with managed_resource(user_id, None, app) as rt: - socket_ids = await rt.find_socket_ids() +async def post_messages( + app: Application, user_id: str, messages: Dict[str, Any] +) -> None: + sio: AsyncServer = get_socket_server(app) + + with managed_resource(user_id, None, app) as registry: + socket_ids: List[str] = await registry.find_socket_ids() for sid in socket_ids: - # we only send the data to the right sockets (there might be several tabs open) - tasks = [sio.emit(event, json.dumps(data), room=sid) for event, data in messages.items()] - asyncio.ensure_future(asyncio.gather(*tasks)) + # We only send the data to the right sockets + # Notice that there might be several tabs open + tasks = [ + sio.emit(event_name, json.dumps(data), room=sid) + for event_name, data in messages.items() + ] + asyncio.ensure_future( + asyncio.gather( + *tasks + ) # TODO: PC->SAN??, return_exceptions=True othewise will error '_GatheringFuture exception was never retrieved' + ) + + +# FIXME: PC->SAN: I wonder if here is the reason for this unhandled +# +# { +# "txt": " exception=OSError()>", +# "type": "", +# "done": true, +# "cancelled": false, +# "stack": null, +# "exception": ": " +# }, +# and https://github.com/miguelgrinberg/python-engineio/blob/master/engineio/async_drivers/aiohttp.py#L114) shows that ``IOError = OSError`` is raised +# when received data is corrupted!! +# +# +# It might be that sio.emit raise exception, which propagates throw gather +# diff --git a/services/web/server/src/simcore_service_webserver/socketio/handlers.py b/services/web/server/src/simcore_service_webserver/socketio/handlers.py index 57aadcfbdbb..2fb99cf8edc 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/handlers.py +++ b/services/web/server/src/simcore_service_webserver/socketio/handlers.py @@ -24,6 +24,7 @@ log = logging.getLogger(__file__) + async def connect(sid: str, environ: Dict, app: web.Application) -> bool: """socketio reserved handler for when the fontend connects through socket.io @@ -44,8 +45,11 @@ async def connect(sid: str, environ: Dict, app: web.Application) -> bool: return True + @login_required -async def authenticate_user(sid: str, app: web.Application, request: web.Request) -> None: +async def authenticate_user( + sid: str, app: web.Application, request: web.Request +) -> None: """throws web.HTTPUnauthorized when the user is not recognized. Keeps the original request. """ user_id = request.get(RQT_USERID_KEY, ANONYMOUS_USER_ID) @@ -65,9 +69,13 @@ async def authenticate_user(sid: str, app: web.Application, request: web.Request log.info("socketio connection from user %s", user_id) await rt.set_socket_id(sid) + async def disconnect_other_sockets(sio, sockets: List[str]) -> None: log.debug("disconnecting sockets %s", sockets) - logout_tasks = [sio.emit("logout", to=sid, data={"reason": "user logged out"}) for sid in sockets] + logout_tasks = [ + sio.emit("logout", to=sid, data={"reason": "user logged out"}) + for sid in sockets + ] await asyncio.gather(*logout_tasks, return_exceptions=True) # let the client react await asyncio.sleep(3) @@ -75,9 +83,12 @@ async def disconnect_other_sockets(sio, sockets: List[str]) -> None: disconnect_tasks = [sio.disconnect(sid=sid) for sid in sockets] await asyncio.gather(*disconnect_tasks, return_exceptions=True) + @observe(event="SIGNAL_USER_LOGOUT") -async def user_logged_out(user_id: str, client_session_id: Optional[str], app: web.Application) -> None: - log.debug("user %s must be disconnected", user_id) +async def user_logged_out( + user_id: str, client_session_id: Optional[str], app: web.Application +) -> None: + log.debug("user %s must be disconnected", user_id) # find the sockets related to the user sio = get_socket_server(app) with managed_resource(user_id, client_session_id, app) as rt: @@ -93,7 +104,6 @@ async def user_logged_out(user_id: str, client_session_id: Optional[str], app: w # let's do it as a task so it does not block us here asyncio.ensure_future(disconnect_other_sockets(sio, sockets)) - async def disconnect(sid: str, app: web.Application) -> None: """socketio reserved handler for when the socket.io connection is disconnected. @@ -105,7 +115,7 @@ async def disconnect(sid: str, app: web.Application) -> None: log.debug("client in room %s disconnecting", sid) sio = get_socket_server(app) async with sio.session(sid) as socketio_session: - if "user_id" in socketio_session: + if "user_id" in socketio_session: user_id = socketio_session["user_id"] client_session_id = socketio_session["client_session_id"] with managed_resource(user_id, client_session_id, app) as rt: diff --git a/services/web/server/src/simcore_service_webserver/socketio/handlers_utils.py b/services/web/server/src/simcore_service_webserver/socketio/handlers_utils.py index 1cbdc1cbd98..e97b26ac1e3 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/handlers_utils.py +++ b/services/web/server/src/simcore_service_webserver/socketio/handlers_utils.py @@ -12,26 +12,38 @@ def socket_io_handler(app: web.Application): I.e. python-socketio handler expect functions of type `async def function(sid, *args, **kwargs)` This allows to create a function of type `async def function(sid, *args, **kwargs, app: web.Application) """ + def decorator(func): @wraps(func) async def wrapped(*args, **kwargs): return await func(*args, **kwargs, app=app) + return wrapped + return decorator + def has_socket_io_handler_signature(fun) -> bool: # last parameter is web.Application - return list(inspect.signature(fun).parameters.values())[-1].annotation == web.Application + return ( + list(inspect.signature(fun).parameters.values())[-1].annotation + == web.Application + ) + def register_handlers(app: web.Application, module: ModuleType): sio = get_socket_server(app) - predicate = lambda obj: inspect.isfunction(obj) and \ - has_socket_io_handler_signature(obj) and \ - inspect.iscoroutinefunction(obj) and \ - inspect.getmodule(obj) == module + predicate = ( + lambda obj: inspect.isfunction(obj) + and has_socket_io_handler_signature(obj) + and inspect.iscoroutinefunction(obj) + and inspect.getmodule(obj) == module + ) member_fcts = inspect.getmembers(module, predicate) # convert handler - partial_fcts = [socket_io_handler(app)(func_handler) for _, func_handler in member_fcts] + partial_fcts = [ + socket_io_handler(app)(func_handler) for _, func_handler in member_fcts + ] app[APP_CLIENT_SOCKET_DECORATED_HANDLERS_KEY] = partial_fcts # register the fcts for func in partial_fcts: diff --git a/services/web/server/src/simcore_service_webserver/statics.py b/services/web/server/src/simcore_service_webserver/statics.py index 8dcce0abb04..85dfc53383c 100644 --- a/services/web/server/src/simcore_service_webserver/statics.py +++ b/services/web/server/src/simcore_service_webserver/statics.py @@ -26,7 +26,7 @@ def get_client_outdir(app: web.Application) -> Path: # pylint 2.3.0 produces 'E1101: Instance of 'Path' has no 'expanduser' member (no-member)' ONLY # with the installed code and not with the development code! - client_dir = Path(cfg["client_outdir"]).expanduser() #pylint: disable=E1101 + client_dir = Path(cfg["client_outdir"]).expanduser() # pylint: disable=E1101 if not client_dir.exists(): txt = reason = "Front-end application is not available" if cfg["testing"]: @@ -34,6 +34,7 @@ def get_client_outdir(app: web.Application) -> Path: raise web.HTTPServiceUnavailable(reason=reason, text=txt) return client_dir + async def index(request: web.Request): """ Serves boot application under index @@ -47,9 +48,9 @@ async def index(request: web.Request): def write_statics_file(directory): statics = {} - statics['stackName'] = os.environ.get('SWARM_STACK_NAME') - statics['buildDate'] = os.environ.get('BUILD_DATE') - with open(directory / 'statics.json', 'w') as statics_file: + statics["stackName"] = os.environ.get("SWARM_STACK_NAME") + statics["buildDate"] = os.environ.get("BUILD_DATE") + with open(directory / "statics.json", "w") as statics_file: json.dump(statics, statics_file) @@ -61,16 +62,17 @@ def setup_statics(app: web.Application): outdir = get_client_outdir(app) # Checks integrity of RIA source before serving - EXPECTED_FOLDERS = ('osparc', 'resource', 'transpiled') + EXPECTED_FOLDERS = ("osparc", "resource", "transpiled") folders = [x for x in outdir.iterdir() if x.is_dir()] for name in EXPECTED_FOLDERS: folder_names = [path.name for path in folders] if name not in folder_names: raise web.HTTPServiceUnavailable( - reason="Invalid front-end source-output folders" \ - " Expected %s, got %s in %s" %(EXPECTED_FOLDERS, folder_names, outdir), - text ="Front-end application is not available" + reason="Invalid front-end source-output folders" + " Expected %s, got %s in %s" + % (EXPECTED_FOLDERS, folder_names, outdir), + text="Front-end application is not available", ) # TODO: map ui to /ui or create an alias!? @@ -79,10 +81,10 @@ def setup_statics(app: web.Application): # NOTE: source-output and build-output have both the same subfolder structure # TODO: check whether this can be done at oncen for path in folders: - app.router.add_static('/' + path.name, path) + app.router.add_static("/" + path.name, path) # Create statics file - write_statics_file(outdir / 'resource') + write_statics_file(outdir / "resource") except web.HTTPServiceUnavailable as ex: log.exception(ex.text) diff --git a/services/web/server/src/simcore_service_webserver/storage.py b/services/web/server/src/simcore_service_webserver/storage.py index 12a72dca3f5..734283adb61 100644 --- a/services/web/server/src/simcore_service_webserver/storage.py +++ b/services/web/server/src/simcore_service_webserver/storage.py @@ -10,14 +10,14 @@ from . import storage_routes from .storage_config import get_config -from servicelib.application_setup import app_module_setup,ModuleCategory +from servicelib.application_setup import app_module_setup, ModuleCategory log = logging.getLogger(__name__) @app_module_setup(__name__, ModuleCategory.ADDON, logger=log) def setup(app: web.Application): - specs = app[APP_OPENAPI_SPECS_KEY] # validated openapi specs + specs = app[APP_OPENAPI_SPECS_KEY] # validated openapi specs routes = storage_routes.create(specs) app.router.add_routes(routes) @@ -28,7 +28,4 @@ def setup(app: web.Application): get_storage_config = get_config -__all__ = ( - 'setup_storage', - 'get_storage_config' -) +__all__ = ("setup_storage", "get_storage_config") diff --git a/services/web/server/src/simcore_service_webserver/storage_api.py b/services/web/server/src/simcore_service_webserver/storage_api.py index c24e1195bec..fb969adc196 100644 --- a/services/web/server/src/simcore_service_webserver/storage_api.py +++ b/services/web/server/src/simcore_service_webserver/storage_api.py @@ -17,25 +17,31 @@ def _get_storage_client(app: web.Application): cfg = get_config(app) # storage service API endpoint - endpoint = URL.build(scheme='http', - host=cfg['host'], - port=cfg['port']).with_path(cfg["version"]) + endpoint = URL.build(scheme="http", host=cfg["host"], port=cfg["port"]).with_path( + cfg["version"] + ) session = get_client_session(app) return session, endpoint -async def copy_data_folders_from_project(app, source_project, destination_project, nodes_map, user_id): +async def copy_data_folders_from_project( + app, source_project, destination_project, nodes_map, user_id +): # TODO: optimize if project has actualy data or not before doing the call client, api_endpoint = _get_storage_client(app) # /simcore-s3/folders: url = (api_endpoint / "simcore-s3/folders").with_query(user_id=user_id) - async with client.post( url , json={ - 'source':source_project, - 'destination': destination_project, - 'nodes_map': nodes_map - }, ssl=False) as resp: + async with client.post( + url, + json={ + "source": source_project, + "destination": destination_project, + "nodes_map": nodes_map, + }, + ssl=False, + ) as resp: payload = await resp.json() updated_project, error = unwrap_envelope(payload) if error: @@ -49,23 +55,33 @@ async def copy_data_folders_from_project(app, source_project, destination_projec async def _delete(session, target_url): async with session.delete(target_url, ssl=False) as resp: - log.info("delete_data_folders_of_project request responded with status %s", resp.status ) + log.info( + "delete_data_folders_of_project request responded with status %s", + resp.status, + ) # NOTE: context will automatically close connection + async def delete_data_folders_of_project(app, project_id, user_id): # SEE api/specs/storage/v0/openapi.yaml session, api_endpoint = _get_storage_client(app) - url = (api_endpoint / f"simcore-s3/folders/{project_id}").with_query(user_id=user_id) - + url = (api_endpoint / f"simcore-s3/folders/{project_id}").with_query( + user_id=user_id + ) await _delete(session, url) - #asyncio.ensure_future(_delete(session, url)) - #loop = asyncio.get_event_loop() - #loop.run_until_complete(_delete(session, url)) + # asyncio.ensure_future(_delete(session, url)) + # loop = asyncio.get_event_loop() + # loop.run_until_complete(_delete(session, url)) + -async def delete_data_folders_of_project_node(app, project_id: str, node_id: str, user_id: str): +async def delete_data_folders_of_project_node( + app, project_id: str, node_id: str, user_id: str +): # SEE api/specs/storage/v0/openapi.yaml session, api_endpoint = _get_storage_client(app) - url = (api_endpoint / f"simcore-s3/folders/{project_id}").with_query(user_id=user_id, node_id=node_id) + url = (api_endpoint / f"simcore-s3/folders/{project_id}").with_query( + user_id=user_id, node_id=node_id + ) await _delete(session, url) diff --git a/services/web/server/src/simcore_service_webserver/storage_config.py b/services/web/server/src/simcore_service_webserver/storage_config.py index 758c3a8c9a2..f6fc8a8da29 100644 --- a/services/web/server/src/simcore_service_webserver/storage_config.py +++ b/services/web/server/src/simcore_service_webserver/storage_config.py @@ -9,17 +9,23 @@ from aiohttp import ClientSession, web from servicelib.application_keys import APP_CLIENT_SESSION_KEY, APP_CONFIG_KEY -CONFIG_SECTION_NAME = 'storage' +CONFIG_SECTION_NAME = "storage" + +schema = T.Dict( + { + T.Key("enabled", default=True, optional=True): T.Bool(), + T.Key("host", default="storage"): T.String(), + T.Key("port", default=11111): T.Int(), + T.Key("version", default="v0"): T.Regexp( + regexp=r"^v\d+" + ), # storage API version basepath + } +) -schema = T.Dict({ - T.Key("enabled", default=True, optional=True): T.Bool(), - T.Key("host", default="storage"): T.String(), - T.Key("port", default=11111): T.Int(), - T.Key("version", default="v0"): T.Regexp(regexp=r'^v\d+') # storage API version basepath -}) def get_config(app: web.Application) -> Dict: return app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] + def get_client_session(app: web.Application) -> ClientSession: return app[APP_CLIENT_SESSION_KEY] diff --git a/services/web/server/src/simcore_service_webserver/storage_handlers.py b/services/web/server/src/simcore_service_webserver/storage_handlers.py index f92c7f0b116..700829442db 100644 --- a/services/web/server/src/simcore_service_webserver/storage_handlers.py +++ b/services/web/server/src/simcore_service_webserver/storage_handlers.py @@ -21,20 +21,21 @@ def _resolve_storage_url(request: web.Request) -> URL: cfg = get_config(request.app) # storage service API endpoint - endpoint = URL.build(scheme='http', - host=cfg['host'], - port=cfg['port']).with_path(cfg["version"]) + endpoint = URL.build(scheme="http", host=cfg["host"], port=cfg["port"]).with_path( + cfg["version"] + ) BASEPATH_INDEX = 3 # strip basepath from webserver API path (i.e. webserver api version) # >>> URL('http://storage:1234/v5/storage/asdf/').raw_parts[3:] # ('asdf', '') - suffix = "/".join( request.url.raw_parts[BASEPATH_INDEX:] ) + suffix = "/".join(request.url.raw_parts[BASEPATH_INDEX:]) # TODO: check request.query to storage! unsafe!? url = (endpoint / suffix).with_query(request.query).update_query(user_id=userid) return url + async def _request_storage(request: web.Request, method: str): await extract_and_validate(request) @@ -51,36 +52,41 @@ async def _request_storage(request: web.Request, method: str): return payload -#--------------------------------------------------------------------- +# --------------------------------------------------------------------- + @login_required async def get_storage_locations(request: web.Request): await check_permission(request, "storage.locations.*") - payload = await _request_storage(request, 'GET') + payload = await _request_storage(request, "GET") return payload + @login_required async def get_datasets_metadata(request: web.Request): await check_permission(request, "storage.files.*") - payload = await _request_storage(request, 'GET') + payload = await _request_storage(request, "GET") return payload + @login_required async def get_files_metadata(request: web.Request): await check_permission(request, "storage.files.*") - payload = await _request_storage(request, 'GET') + payload = await _request_storage(request, "GET") return payload + @login_required async def get_files_metadata_dataset(request: web.Request): await check_permission(request, "storage.files.*") - payload = await _request_storage(request, 'GET') + payload = await _request_storage(request, "GET") return payload + @login_required async def get_file_metadata(request: web.Request): await check_permission(request, "storage.files.*") - payload = await _request_storage(request, 'GET') + payload = await _request_storage(request, "GET") return payload @@ -95,19 +101,19 @@ async def update_file_meta_data(request: web.Request): @login_required async def download_file(request: web.Request): await check_permission(request, "storage.files.*") - payload = await _request_storage(request, 'GET') + payload = await _request_storage(request, "GET") return payload @login_required async def upload_file(request: web.Request): await check_permission(request, "storage.files.*") - payload = await _request_storage(request, 'PUT') + payload = await _request_storage(request, "PUT") return payload @login_required async def delete_file(request: web.Request): await check_permission(request, "storage.files.*") - payload = await _request_storage(request, 'DELETE') + payload = await _request_storage(request, "DELETE") return payload diff --git a/services/web/server/src/simcore_service_webserver/storage_routes.py b/services/web/server/src/simcore_service_webserver/storage_routes.py index b682604d03b..a1f31ae18d8 100644 --- a/services/web/server/src/simcore_service_webserver/storage_routes.py +++ b/services/web/server/src/simcore_service_webserver/storage_routes.py @@ -17,7 +17,7 @@ def create(specs: openapi.Spec) -> List[web.RouteDef]: # TODO: consider the case in which server creates routes for both v0 and v1!!! # TODO: should this be taken from servers instead? - BASEPATH = '/v' + specs.info.version.split('.')[0] + BASEPATH = "/v" + specs.info.version.split(".")[0] log.debug("creating %s ", __name__) routes = [] @@ -25,42 +25,62 @@ def create(specs: openapi.Spec) -> List[web.RouteDef]: # TODO: routing will be done automatically using operation_id/tags, etc... # storage -- - path, handler = '/storage/locations', storage_handlers.get_storage_locations - operation_id = specs.paths[path].operations['get'].operation_id - routes.append(web.get(BASEPATH+path, handler, name=operation_id)) - - path, handler = '/storage/locations/{location_id}/datasets', storage_handlers.get_datasets_metadata - operation_id = specs.paths[path].operations['get'].operation_id - routes.append(web.get(BASEPATH+path, handler, name=operation_id)) - - path, handle = '/storage/locations/{location_id}/files/metadata', storage_handlers.get_files_metadata - operation_id = specs.paths[path].operations['get'].operation_id - routes.append(web.get(BASEPATH+path, handle, name=operation_id)) - - path, handle = '/storage/locations/{location_id}/datasets/{dataset_id}/metadata', storage_handlers.get_files_metadata_dataset - operation_id = specs.paths[path].operations['get'].operation_id - routes.append(web.get(BASEPATH+path, handle, name=operation_id)) - - path, handle = '/storage/locations/{location_id}/files/{fileId}/metadata', storage_handlers.get_file_metadata - operation_id = specs.paths[path].operations['get'].operation_id - routes.append(web.get(BASEPATH+path, handle, name=operation_id)) + path, handler = "/storage/locations", storage_handlers.get_storage_locations + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handler, name=operation_id)) + + path, handler = ( + "/storage/locations/{location_id}/datasets", + storage_handlers.get_datasets_metadata, + ) + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handler, name=operation_id)) + + path, handle = ( + "/storage/locations/{location_id}/files/metadata", + storage_handlers.get_files_metadata, + ) + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) + + path, handle = ( + "/storage/locations/{location_id}/datasets/{dataset_id}/metadata", + storage_handlers.get_files_metadata_dataset, + ) + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) + + path, handle = ( + "/storage/locations/{location_id}/files/{fileId}/metadata", + storage_handlers.get_file_metadata, + ) + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) # TODO: Implements update # path, handle = '/{location_id}/files/{fileId}/metadata', handlers.update_file_metadata # operation_id = specs.paths[path].operations['patch'].operation_id # routes.append( web.patch(BASEPATH+path, handle, name=operation_id) ) - path, handle = '/storage/locations/{location_id}/files/{fileId}', storage_handlers.download_file - operation_id = specs.paths[path].operations['get'].operation_id - routes.append(web.get(BASEPATH+path, handle, name=operation_id)) - - path, handle = '/storage/locations/{location_id}/files/{fileId}', storage_handlers.delete_file - operation_id = specs.paths[path].operations['delete'].operation_id - routes.append(web.delete(BASEPATH+path, handle, name=operation_id)) - - path, handle = '/storage/locations/{location_id}/files/{fileId}', storage_handlers.upload_file - operation_id = specs.paths[path].operations['put'].operation_id - routes.append(web.put(BASEPATH+path, handle, name=operation_id)) - + path, handle = ( + "/storage/locations/{location_id}/files/{fileId}", + storage_handlers.download_file, + ) + operation_id = specs.paths[path].operations["get"].operation_id + routes.append(web.get(BASEPATH + path, handle, name=operation_id)) + + path, handle = ( + "/storage/locations/{location_id}/files/{fileId}", + storage_handlers.delete_file, + ) + operation_id = specs.paths[path].operations["delete"].operation_id + routes.append(web.delete(BASEPATH + path, handle, name=operation_id)) + + path, handle = ( + "/storage/locations/{location_id}/files/{fileId}", + storage_handlers.upload_file, + ) + operation_id = specs.paths[path].operations["put"].operation_id + routes.append(web.put(BASEPATH + path, handle, name=operation_id)) return routes diff --git a/services/web/server/src/simcore_service_webserver/studies_access.py b/services/web/server/src/simcore_service_webserver/studies_access.py index 68bb360049b..41f0947f3ad 100644 --- a/services/web/server/src/simcore_service_webserver/studies_access.py +++ b/services/web/server/src/simcore_service_webserver/studies_access.py @@ -28,6 +28,7 @@ BASE_UUID = uuid.UUID("71e0eb5e-0797-4469-89ba-00a0df4d338a") + @lru_cache() def compose_uuid(template_uuid, user_id, query="") -> str: """ Creates a new uuid composing a project's and user ids such that @@ -35,7 +36,9 @@ def compose_uuid(template_uuid, user_id, query="") -> str: Enforces a constraint: a user CANNOT have multiple copies of the same template """ - new_uuid = str( uuid.uuid5(BASE_UUID, str(template_uuid) + str(user_id) + str(query)) ) + new_uuid = str( + uuid.uuid5(BASE_UUID, str(template_uuid) + str(user_id) + str(query)) + ) return new_uuid @@ -50,6 +53,7 @@ async def get_public_project(app: web.Application, project_uuid: str): prj = await db.get_template_project(project_uuid, only_published=True) return prj + # TODO: from .users import create_temporary_user async def create_temporary_user(request: web.Request): """ @@ -59,6 +63,7 @@ async def create_temporary_user(request: web.Request): from .login.handlers import ACTIVE, GUEST from .login.utils import get_client_ip, get_random_string from .security_api import encrypt_password + # from .utils import generate_passphrase # from .utils import generate_password @@ -68,20 +73,22 @@ async def create_temporary_user(request: web.Request): # FIXME: # username = generate_passphrase(number_of_words=2).replace(" ", "_").replace("'", "") username = get_random_string(min_len=5) email = username + "@guest-at-osparc.io" - # TODO: temporarily while developing, a fixed password - password = "guest" #generate_password() - - user = await db.create_user({ - 'name': username, - 'email': email, - 'password_hash': encrypt_password(password), - 'status': ACTIVE, - 'role': GUEST, - 'created_ip': get_client_ip(request), - }) + password = get_random_string(min_len=12) + + user = await db.create_user( + { + "name": username, + "email": email, + "password_hash": encrypt_password(password), + "status": ACTIVE, + "role": GUEST, + "created_ip": get_client_ip(request), + } + ) return user + # TODO: from .users import get_user? async def get_authorized_user(request: web.Request) -> Dict: from .login.cfg import get_storage @@ -89,11 +96,14 @@ async def get_authorized_user(request: web.Request) -> Dict: db = get_storage(request.app) userid = await authorized_userid(request) - user = await db.get_user({'id': userid}) + user = await db.get_user({"id": userid}) return user + # TODO: from .projects import ...? -async def copy_study_to_account(request: web.Request, template_project: Dict, user: Dict): +async def copy_study_to_account( + request: web.Request, template_project: Dict, user: Dict +): """ Creates a copy of the study to a given project in user's account @@ -111,7 +121,9 @@ async def copy_study_to_account(request: web.Request, template_project: Dict, us template_parameters = dict(request.query) # assign id to copy - project_uuid = compose_uuid(template_project["uuid"], user["id"], str(template_parameters)) + project_uuid = compose_uuid( + template_project["uuid"], user["id"], str(template_parameters) + ) try: # Avoids multiple copies of the same template on each account @@ -121,12 +133,16 @@ async def copy_study_to_account(request: web.Request, template_project: Dict, us except ProjectNotFoundError: # new project from template - project = await clone_project(request, template_project, user["id"], forced_copy_project_id=project_uuid) + project = await clone_project( + request, template_project, user["id"], forced_copy_project_id=project_uuid + ) # check project inputs and substitute template_parameters if template_parameters: log.info("Substituting parameters '%s' in template", template_parameters) - project = substitute_parameterized_inputs(project, template_parameters) or project + project = ( + substitute_parameterized_inputs(project, template_parameters) or project + ) await db.add_project(project, user["id"], force_project_uuid=True) @@ -145,8 +161,10 @@ async def access_study(request: web.Request) -> web.Response: template_project = await get_public_project(request.app, project_id) if not template_project: - raise web.HTTPNotFound(reason=f"Requested study ({project_id}) has not been published.\ - Please contact the data curators for more information.") + raise web.HTTPNotFound( + reason=f"Requested study ({project_id}) has not been published.\ + Please contact the data curators for more information." + ) user = None is_anonymous_user = await is_anonymous(request) @@ -159,28 +177,39 @@ async def access_study(request: web.Request) -> web.Response: if not user: raise RuntimeError("Unable to start user session") - log.debug("Granted access to study '%d' for user %s. Copying study over ...", template_project.get('name'), user.get('email')) + log.debug( + "Granted access to study '%d' for user %s. Copying study over ...", + template_project.get("name"), + user.get("email"), + ) copied_project_id = await copy_study_to_account(request, template_project, user) log.debug("Study %s copied", copied_project_id) try: - redirect_url = request.app.router[INDEX_RESOURCE_NAME].url_for().with_fragment("/study/{}".format(copied_project_id)) + redirect_url = ( + request.app.router[INDEX_RESOURCE_NAME] + .url_for() + .with_fragment("/study/{}".format(copied_project_id)) + ) except KeyError: - log.error("Cannot redirect to website because route was not registered. Probably qx output was not ready and it was disabled (see statics.py)") - raise RuntimeError("Unable to serve front-end. Study has been anyway copied over to user.") + log.error( + "Cannot redirect to website because route was not registered. Probably qx output was not ready and it was disabled (see statics.py)" + ) + raise RuntimeError( + "Unable to serve front-end. Study has been anyway copied over to user." + ) response = web.HTTPFound(location=redirect_url) if is_anonymous_user: log.debug("Auto login for anonymous user %s", user["name"]) - identity = user['email'] + identity = user["email"] await remember(request, response, identity) raise response -@app_module_setup(__name__, ModuleCategory.ADDON, - logger=log) +@app_module_setup(__name__, ModuleCategory.ADDON, logger=log) def setup(app: web.Application): cfg = app[APP_CONFIG_KEY]["main"] @@ -188,18 +217,20 @@ def setup(app: web.Application): study_handler = access_study if not cfg["studies_access_enabled"]: study_handler = login_required(access_study) - log.warning("'%s' config explicitly disables anonymous users from this feature", __name__) + log.warning( + "'%s' config explicitly disables anonymous users from this feature", + __name__, + ) # TODO: make sure that these routes are filtered properly in active middlewares - app.router.add_routes([ - web.get(r"/study/{id}", study_handler, name="study"), - ]) + app.router.add_routes( + [web.get(r"/study/{id}", study_handler, name="study"),] + ) return True + # alias setup_studies_access = setup -__all__ = ( - 'setup_studies_access' -) +__all__ = "setup_studies_access" diff --git a/services/web/server/src/simcore_service_webserver/tag_handlers.py b/services/web/server/src/simcore_service_webserver/tag_handlers.py index 642da4332ff..3ef6db725c6 100644 --- a/services/web/server/src/simcore_service_webserver/tag_handlers.py +++ b/services/web/server/src/simcore_service_webserver/tag_handlers.py @@ -10,10 +10,13 @@ @login_required async def list_tags(request: web.Request): - await check_permission(request, 'tag.crud.*') + await check_permission(request, "tag.crud.*") uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] async with engine.acquire() as conn: - columns = [col for col in tags.columns if col.key != 'user_id'] # pylint: disable=not-an-iterable + # pylint: disable=not-an-iterable + columns = [ + col for col in tags.columns if col.key != "user_id" + ] query = sa.select(columns).where(tags.c.user_id == uid) result = [] async for row_proxy in conn.execute(query): @@ -24,21 +27,21 @@ async def list_tags(request: web.Request): @login_required async def update_tag(request: web.Request): - await check_permission(request, 'tag.crud.*') + await check_permission(request, "tag.crud.*") uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] - tag_id = request.match_info.get('tag_id') + tag_id = request.match_info.get("tag_id") tag_data = await request.json() async with engine.acquire() as conn: # pylint: disable=no-value-for-parameter - query = tags.update().values( - name=tag_data['name'], - description=tag_data['description'], - color=tag_data['color'] - ).where(and_(tags.c.id == tag_id, tags.c.user_id == uid)).returning( - tags.c.id, - tags.c.name, - tags.c.description, - tags.c.color + query = ( + tags.update() + .values( + name=tag_data["name"], + description=tag_data["description"], + color=tag_data["color"], + ) + .where(and_(tags.c.id == tag_id, tags.c.user_id == uid)) + .returning(tags.c.id, tags.c.name, tags.c.description, tags.c.color) ) async with conn.execute(query) as result: if result.rowcount == 1: @@ -49,21 +52,20 @@ async def update_tag(request: web.Request): @login_required async def create_tag(request: web.Request): - await check_permission(request, 'tag.crud.*') + await check_permission(request, "tag.crud.*") uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] tag_data = await request.json() async with engine.acquire() as conn: # pylint: disable=no-value-for-parameter - query = tags.insert().values( - user_id=uid, - name=tag_data['name'], - description=tag_data['description'], - color=tag_data['color'] - ).returning( - tags.c.id, - tags.c.name, - tags.c.description, - tags.c.color + query = ( + tags.insert() + .values( + user_id=uid, + name=tag_data["name"], + description=tag_data["description"], + color=tag_data["color"], + ) + .returning(tags.c.id, tags.c.name, tags.c.description, tags.c.color) ) async with conn.execute(query) as result: if result.rowcount == 1: @@ -74,13 +76,11 @@ async def create_tag(request: web.Request): @login_required async def delete_tag(request: web.Request): - await check_permission(request, 'tag.crud.*') + await check_permission(request, "tag.crud.*") uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] - tag_id = request.match_info.get('tag_id') + tag_id = request.match_info.get("tag_id") async with engine.acquire() as conn: # pylint: disable=no-value-for-parameter - query = tags.delete().where( - and_(tags.c.id == tag_id, tags.c.user_id == uid) - ) + query = tags.delete().where(and_(tags.c.id == tag_id, tags.c.user_id == uid)) async with conn.execute(query) as result: - raise web.HTTPNoContent(content_type='application/json') + raise web.HTTPNoContent(content_type="application/json") diff --git a/services/web/server/src/simcore_service_webserver/tags.py b/services/web/server/src/simcore_service_webserver/tags.py index 0392024bf0c..51873a4c354 100644 --- a/services/web/server/src/simcore_service_webserver/tags.py +++ b/services/web/server/src/simcore_service_webserver/tags.py @@ -6,26 +6,32 @@ from aiohttp import web from servicelib.application_setup import ModuleCategory, app_module_setup -from servicelib.rest_routing import (get_handlers_from_namespace, - iter_path_operations, - map_handlers_with_operations) +from servicelib.rest_routing import ( + get_handlers_from_namespace, + iter_path_operations, + map_handlers_with_operations, +) from . import tag_handlers from .rest_config import APP_OPENAPI_SPECS_KEY logger = logging.getLogger(__name__) -@app_module_setup(__name__, ModuleCategory.ADDON, - depends=['simcore_service_webserver.rest'], - logger=logger) + +@app_module_setup( + __name__, + ModuleCategory.ADDON, + depends=["simcore_service_webserver.rest"], + logger=logger, +) def setup(app: web.Application): # routes specs = app[APP_OPENAPI_SPECS_KEY] routes = map_handlers_with_operations( - get_handlers_from_namespace(tag_handlers), - filter(lambda o: "tag" in o[3], iter_path_operations(specs)), - strict=True + get_handlers_from_namespace(tag_handlers), + filter(lambda o: "tag" in o[3], iter_path_operations(specs)), + strict=True, ) app.router.add_routes(routes) @@ -33,6 +39,4 @@ def setup(app: web.Application): # alias setup_tags = setup -__all__ = ( - 'setup_tags' -) +__all__ = "setup_tags" diff --git a/services/web/server/src/simcore_service_webserver/tracing/__init__.py b/services/web/server/src/simcore_service_webserver/tracing/__init__.py index 064f73e43da..ffb1a0621dc 100644 --- a/services/web/server/src/simcore_service_webserver/tracing/__init__.py +++ b/services/web/server/src/simcore_service_webserver/tracing/__init__.py @@ -8,22 +8,22 @@ from servicelib.tracing import schema -CONFIG_SECTION_NAME = 'tracing' +CONFIG_SECTION_NAME = "tracing" log = logging.getLogger(__name__) + @app_module_setup(__name__, ModuleCategory.ADDON, logger=log) def setup(app: web.Application): config = app[APP_CONFIG_KEY] - host=config["main"]["host"] - port=config["main"]["port"] - return setup_tracing(app, "simcore_service_webserver", host, port, config["tracing"]) + host = config["main"]["host"] + port = config["main"]["port"] + return setup_tracing( + app, "simcore_service_webserver", host, port, config["tracing"] + ) + # alias setup_app_tracing = setup tracing_section_name = CONFIG_SECTION_NAME -__all__ = ( - "setup_app_tracing", - "schema", - "tracing_section_name" -) +__all__ = ("setup_app_tracing", "schema", "tracing_section_name") diff --git a/services/web/server/src/simcore_service_webserver/users.py b/services/web/server/src/simcore_service_webserver/users.py index 50032ff6636..b2d176083d7 100644 --- a/services/web/server/src/simcore_service_webserver/users.py +++ b/services/web/server/src/simcore_service_webserver/users.py @@ -6,26 +6,32 @@ from aiohttp import web from servicelib.application_setup import ModuleCategory, app_module_setup -from servicelib.rest_routing import (get_handlers_from_namespace, - iter_path_operations, - map_handlers_with_operations) +from servicelib.rest_routing import ( + get_handlers_from_namespace, + iter_path_operations, + map_handlers_with_operations, +) from . import users_handlers from .rest_config import APP_OPENAPI_SPECS_KEY logger = logging.getLogger(__name__) -@app_module_setup(__name__, ModuleCategory.ADDON, - depends=['simcore_service_webserver.rest'], - logger=logger) + +@app_module_setup( + __name__, + ModuleCategory.ADDON, + depends=["simcore_service_webserver.rest"], + logger=logger, +) def setup(app: web.Application): # routes specs = app[APP_OPENAPI_SPECS_KEY] routes = map_handlers_with_operations( - get_handlers_from_namespace(users_handlers), - filter(lambda o: "me" in o[1].split("/"), iter_path_operations(specs)), - strict=True + get_handlers_from_namespace(users_handlers), + filter(lambda o: "me" in o[1].split("/"), iter_path_operations(specs)), + strict=True, ) app.router.add_routes(routes) @@ -33,6 +39,4 @@ def setup(app: web.Application): # alias setup_users = setup -__all__ = ( - 'setup_users' -) +__all__ = "setup_users" diff --git a/services/web/server/src/simcore_service_webserver/users_handlers.py b/services/web/server/src/simcore_service_webserver/users_handlers.py index bd0ed4d7b09..22766349a55 100644 --- a/services/web/server/src/simcore_service_webserver/users_handlers.py +++ b/services/web/server/src/simcore_service_webserver/users_handlers.py @@ -26,22 +26,23 @@ async def get_my_profile(request: web.Request): @retry(**PostgresRetryPolicyUponOperation(logger).kwargs) async def _query_db(uid, engine): async with engine.acquire() as conn: - query = sa.select([ - users.c.email, - users.c.role, - users.c.name]).where(users.c.id == uid) + query = sa.select([users.c.email, users.c.role, users.c.name]).where( + users.c.id == uid + ) result = await conn.execute(query) return await result.first() - row = await _query_db(uid=request[RQT_USERID_KEY], engine=request.app[APP_DB_ENGINE_KEY]) - parts = row['name'].split(".") + [""] + row = await _query_db( + uid=request[RQT_USERID_KEY], engine=request.app[APP_DB_ENGINE_KEY] + ) + parts = row["name"].split(".") + [""] return { - 'login': row['email'], - 'first_name': parts[0], - 'last_name': parts[1], - 'role': row['role'].name.capitalize(), - 'gravatar_id': gravatar_hash(row['email']) + "login": row["email"], + "first_name": parts[0], + "last_name": parts[1], + "role": row["role"].name.capitalize(), + "gravatar_id": gravatar_hash(row["email"]), } @@ -55,22 +56,18 @@ async def update_my_profile(request: web.Request): body = await request.json() async with engine.acquire() as conn: - query = sa.select([users.c.name]).where( - users.c.id == uid) + query = sa.select([users.c.name]).where(users.c.id == uid) default_name = await conn.scalar(query) parts = default_name.split(".") + [""] - name = body.get('first_name', parts[0]) + "." + body.get('last_name', parts[1]) + name = body.get("first_name", parts[0]) + "." + body.get("last_name", parts[1]) async with engine.acquire() as conn: - query = (users.update() - .where(users.c.id == uid) - .values(name=name) - ) + query = users.update().where(users.c.id == uid).values(name=name) resp = await conn.execute(query) - assert resp.rowcount == 1 # nosec + assert resp.rowcount == 1 # nosec - raise web.HTTPNoContent(content_type='application/json') + raise web.HTTPNoContent(content_type="application/json") # me/tokens/ ------------------------------------------------------ @@ -87,13 +84,13 @@ async def create_tokens(request: web.Request): # TODO: if service already, then IntegrityError is raised! How to deal with db exceptions?? async with engine.acquire() as conn: stmt = tokens.insert().values( - user_id=uid, - token_service=body['service'], - token_data=body) + user_id=uid, token_service=body["service"], token_data=body + ) await conn.execute(stmt) - raise web.HTTPCreated(text=json.dumps({'data': body}), - content_type="application/json") + raise web.HTTPCreated( + text=json.dumps({"data": body}), content_type="application/json" + ) @login_required @@ -106,9 +103,7 @@ async def list_tokens(request: web.Request): user_tokens = [] async with engine.acquire() as conn: - query = (sa.select([tokens.c.token_data]) - .where(tokens.c.user_id == uid) - ) + query = sa.select([tokens.c.token_data]).where(tokens.c.user_id == uid) async for row in conn.execute(query): user_tokens.append(row["token_data"]) @@ -120,13 +115,11 @@ async def get_token(request: web.Request): await check_permission(request, "user.tokens.*") uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] - service_id = request.match_info['service'] + service_id = request.match_info["service"] async with engine.acquire() as conn: - query = (sa.select([tokens.c.token_data]) - .where(sql.and_( - tokens.c.user_id == uid, - tokens.c.token_service == service_id) ) + query = sa.select([tokens.c.token_data]).where( + sql.and_(tokens.c.user_id == uid, tokens.c.token_service == service_id) ) result = await conn.execute(query) row = await result.first() @@ -142,17 +135,15 @@ async def update_token(request: web.Request): await check_permission(request, "user.tokens.*") uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] - service_id = request.match_info['service'] + service_id = request.match_info["service"] # TODO: validate body = await request.json() # TODO: optimize to a single call? async with engine.acquire() as conn: - query = (sa.select([tokens.c.token_data, tokens.c.token_id]) - .where(sql.and_( - tokens.c.user_id == uid, - tokens.c.token_service == service_id) ) + query = sa.select([tokens.c.token_data, tokens.c.token_id]).where( + sql.and_(tokens.c.user_id == uid, tokens.c.token_service == service_id) ) result = await conn.execute(query) row = await result.first() @@ -161,26 +152,24 @@ async def update_token(request: web.Request): tid = row["token_id"] data.update(body) - query = (tokens.update() - .where(tokens.c.token_id == tid ) - .values(token_data=data) - ) + query = tokens.update().where(tokens.c.token_id == tid).values(token_data=data) resp = await conn.execute(query) - assert resp.rowcount == 1 # nosec + assert resp.rowcount == 1 # nosec + + raise web.HTTPNoContent(content_type="application/json") - raise web.HTTPNoContent(content_type='application/json') @login_required async def delete_token(request: web.Request): await check_permission(request, "user.tokens.*") uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] - service_id = request.match_info.get('service') + service_id = request.match_info.get("service") async with engine.acquire() as conn: - query = tokens.delete().where(sql.and_(tokens.c.user_id == uid, - tokens.c.token_service == service_id) - ) + query = tokens.delete().where( + sql.and_(tokens.c.user_id == uid, tokens.c.token_service == service_id) + ) await conn.execute(query) - raise web.HTTPNoContent(content_type='application/json') + raise web.HTTPNoContent(content_type="application/json") diff --git a/services/web/server/tests/conftest.py b/services/web/server/tests/conftest.py index 876d126aca9..b71afc71d47 100644 --- a/services/web/server/tests/conftest.py +++ b/services/web/server/tests/conftest.py @@ -23,19 +23,21 @@ logging.getLogger("sqlalchemy").setLevel(logging.WARNING) ## HELPERS -sys.path.append(str(current_dir / 'helpers')) +sys.path.append(str(current_dir / "helpers")) ## FIXTURES: standard paths -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def package_dir() -> Path: """ osparc-simcore installed directory """ dirpath = Path(simcore_service_webserver.__file__).resolve().parent assert dirpath.exists() return dirpath -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def osparc_simcore_root_dir() -> Path: """ osparc-simcore repo root dir """ WILDCARD = "services/web/server" @@ -47,23 +49,26 @@ def osparc_simcore_root_dir() -> Path: msg = f"'{root_dir}' does not look like the git root directory of osparc-simcore" assert root_dir.exists(), msg assert any(root_dir.glob(WILDCARD)), msg - assert any(root_dir.glob(".git")), msg + assert any(root_dir.glob(".git")), msg return root_dir + @pytest.fixture(scope="session") def env_devel_file(osparc_simcore_root_dir) -> Path: env_devel_fpath = osparc_simcore_root_dir / ".env-devel" assert env_devel_fpath.exists() return env_devel_fpath -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def api_specs_dir(osparc_simcore_root_dir: Path) -> Path: - specs_dir = osparc_simcore_root_dir/ "api" / "specs" / "webserver" + specs_dir = osparc_simcore_root_dir / "api" / "specs" / "webserver" assert specs_dir.exists() return specs_dir -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def fake_data_dir() -> Path: dirpath = (current_dir / "data").resolve() assert dirpath.exists() diff --git a/services/web/server/tests/data/static/resource/.gitignore b/services/web/server/tests/data/static/resource/.gitignore new file mode 100644 index 00000000000..eeeed7541d2 --- /dev/null +++ b/services/web/server/tests/data/static/resource/.gitignore @@ -0,0 +1,2 @@ +## Keeps folder but ignore all contents +* diff --git a/services/web/server/tests/data/static/resource/.gitkeep b/services/web/server/tests/data/static/resource/.gitkeep deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/services/web/server/tests/helpers/utils_assert.py b/services/web/server/tests/helpers/utils_assert.py index f6e6eddaf6b..c09d3aea5a5 100644 --- a/services/web/server/tests/helpers/utils_assert.py +++ b/services/web/server/tests/helpers/utils_assert.py @@ -4,7 +4,9 @@ from servicelib.rest_responses import unwrap_envelope -async def assert_status(response: web.Response, expected_cls:web.HTTPException, expected_msg: str=None): +async def assert_status( + response: web.Response, expected_cls: web.HTTPException, expected_msg: str = None +): data, error = unwrap_envelope(await response.json()) assert response.status == expected_cls.status_code, (data, error) @@ -24,24 +26,29 @@ async def assert_status(response: web.Response, expected_cls:web.HTTPException, return data, error -async def assert_error(response: web.Response, expected_cls:web.HTTPException, expected_msg: str=None): + +async def assert_error( + response: web.Response, expected_cls: web.HTTPException, expected_msg: str = None +): data, error = unwrap_envelope(await response.json()) return do_assert_error(data, error, expected_cls, expected_msg) -def do_assert_error(data, error, expected_cls:web.HTTPException, expected_msg: str=None): +def do_assert_error( + data, error, expected_cls: web.HTTPException, expected_msg: str = None +): assert not data, pformat(data) assert error, pformat(error) # TODO: improve error messages - assert len(error['errors']) == 1 + assert len(error["errors"]) == 1 - err = error['errors'][0] + err = error["errors"][0] if expected_msg: - assert expected_msg in err['message'] + assert expected_msg in err["message"] if expected_cls != web.HTTPInternalServerError: # otherwise, code is exactly the name of the Exception class - assert expected_cls.__name__ == err['code'] + assert expected_cls.__name__ == err["code"] return data, error diff --git a/services/web/server/tests/helpers/utils_docker.py b/services/web/server/tests/helpers/utils_docker.py index 2b0bcb4de33..9caf8c9d492 100644 --- a/services/web/server/tests/helpers/utils_docker.py +++ b/services/web/server/tests/helpers/utils_docker.py @@ -1,4 +1,3 @@ - import logging import os import subprocess @@ -12,11 +11,13 @@ log = logging.getLogger(__name__) + @retry( - wait=wait_fixed(2), - stop=stop_after_attempt(10), - after=after_log(log, logging.WARN)) -def get_service_published_port(service_name: str, target_port: Optional[int]=None) -> str: + wait=wait_fixed(2), stop=stop_after_attempt(10), after=after_log(log, logging.WARN) +) +def get_service_published_port( + service_name: str, target_port: Optional[int] = None +) -> str: """ WARNING: ENSURE that service name exposes a port in Dockerfile file or docker-compose config file """ @@ -25,25 +26,35 @@ def get_service_published_port(service_name: str, target_port: Optional[int]=Non services = [x for x in client.services.list() if service_name in x.name] if not services: - raise RuntimeError(f"Cannot find published port for service '{service_name}'. Probably services still not started.") + raise RuntimeError( + f"Cannot find published port for service '{service_name}'. Probably services still not started." + ) service_ports = services[0].attrs["Endpoint"].get("Ports") if not service_ports: - raise RuntimeError(f"Cannot find published port for service '{service_name}' in endpoint. Probably services still not started.") + raise RuntimeError( + f"Cannot find published port for service '{service_name}' in endpoint. Probably services still not started." + ) published_port = None - msg = ", ".join( f"{p.get('TargetPort')} -> {p.get('PublishedPort')}" for p in service_ports ) + msg = ", ".join( + f"{p.get('TargetPort')} -> {p.get('PublishedPort')}" for p in service_ports + ) if target_port is None: - if len(service_ports)>1: - log.warning("Multiple ports published in service '%s': %s. Defaulting to first", service_name, msg) + if len(service_ports) > 1: + log.warning( + "Multiple ports published in service '%s': %s. Defaulting to first", + service_name, + msg, + ) published_port = service_ports[0]["PublishedPort"] else: target_port = int(target_port) for p in service_ports: - if p['TargetPort'] == target_port: - published_port = p['PublishedPort'] + if p["TargetPort"] == target_port: + published_port = p["PublishedPort"] break if published_port is None: @@ -55,7 +66,8 @@ def get_service_published_port(service_name: str, target_port: Optional[int]=Non def run_docker_compose_config( docker_compose_paths: Union[List[Path], Path], workdir: Path, - destination_path: Optional[Path]=None) -> Dict: + destination_path: Optional[Path] = None, +) -> Dict: """ Runs docker-compose config to validate and resolve a compose file configuration - Composes all configurations passed in 'docker_compose_paths' @@ -64,19 +76,27 @@ def run_docker_compose_config( """ if not isinstance(docker_compose_paths, List): - docker_compose_paths = [docker_compose_paths, ] + docker_compose_paths = [ + docker_compose_paths, + ] temp_dir = None if destination_path is None: - temp_dir = Path(tempfile.mkdtemp(prefix='')) - destination_path = temp_dir / 'docker-compose.yml' + temp_dir = Path(tempfile.mkdtemp(prefix="")) + destination_path = temp_dir / "docker-compose.yml" - config_paths = [ f"-f {os.path.relpath(docker_compose_path, workdir)}" for docker_compose_path in docker_compose_paths] + config_paths = [ + f"-f {os.path.relpath(docker_compose_path, workdir)}" + for docker_compose_path in docker_compose_paths + ] configs_prefix = " ".join(config_paths) - subprocess.run( f"docker-compose {configs_prefix} config > {destination_path}", - shell=True, check=True, - cwd=workdir) + subprocess.run( + f"docker-compose {configs_prefix} config > {destination_path}", + shell=True, + check=True, + cwd=workdir, + ) with destination_path.open() as f: config = yaml.safe_load(f) diff --git a/services/web/server/tests/helpers/utils_environs.py b/services/web/server/tests/helpers/utils_environs.py index 29ead968d46..f02d4b1822e 100644 --- a/services/web/server/tests/helpers/utils_environs.py +++ b/services/web/server/tests/helpers/utils_environs.py @@ -8,7 +8,8 @@ import yaml -VARIABLE_SUBSTITUTION = re.compile(r'\$\{(\w+)(?:(:{0,1}[-?]{0,1})(.*))?\}$') +VARIABLE_SUBSTITUTION = re.compile(r"\$\{(\w+)(?:(:{0,1}[-?]{0,1})(.*))?\}$") + def load_env(file_handler) -> Dict: """ Deserializes an environment file like .env-devel and @@ -16,7 +17,7 @@ def load_env(file_handler) -> Dict: Analogous to json.load """ - PATTERN_ENVIRON_EQUAL= re.compile(r"^(\w+)=(.*)$") + PATTERN_ENVIRON_EQUAL = re.compile(r"^(\w+)=(.*)$") # Works even for `POSTGRES_EXPORTER_DATA_SOURCE_NAME=postgresql://simcore:simcore@postgres:5432/simcoredb?sslmode=disable` environ = {} @@ -27,8 +28,14 @@ def load_env(file_handler) -> Dict: environ[key] = str(value) return environ -def eval_environs_in_docker_compose(docker_compose: Dict, docker_compose_dir: Path, - host_environ: Dict=None, *, use_env_devel=True): + +def eval_environs_in_docker_compose( + docker_compose: Dict, + docker_compose_dir: Path, + host_environ: Dict = None, + *, + use_env_devel=True +): """ Resolves environments in docker compose and sets them under 'environment' section TODO: deprecated. Use instead docker-compose config in services/web/server/tests/integration/fixtures/docker_compose.py @@ -36,16 +43,22 @@ def eval_environs_in_docker_compose(docker_compose: Dict, docker_compose_dir: Pa """ content = deepcopy(docker_compose) for _name, service in content["services"].items(): - replace_environs_in_docker_compose_service(service, docker_compose_dir, - host_environ, use_env_devel=use_env_devel) + replace_environs_in_docker_compose_service( + service, docker_compose_dir, host_environ, use_env_devel=use_env_devel + ) return content + from typing import List -def replace_environs_in_docker_compose_service(service_section: Dict, + +def replace_environs_in_docker_compose_service( + service_section: Dict, docker_compose_dir: Path, - host_environ: Dict=None, - *, use_env_devel=True): + host_environ: Dict = None, + *, + use_env_devel=True +): """ Resolves environments in docker-compose's service section, drops any reference to env_file and sets all environs 'environment' section @@ -73,25 +86,30 @@ def replace_environs_in_docker_compose_service(service_section: Dict, for item in environ_items: key, value = item.split("=") m = VARIABLE_SUBSTITUTION.match(value) - if m: # There is a variable as value in docker-compose - envkey = m.groups()[0] # Variable name - if len(m.groups()) == 3: # There is a default value + if m: # There is a variable as value in docker-compose + envkey = m.groups()[0] # Variable name + if len(m.groups()) == 3: # There is a default value default_value = m.groups()[2] if envkey in host_environ: - value = host_environ[envkey] # Use host environ - if default_value and len(value) == 0 and m.groups()[1] == ':-': - value = default_value # Unless it is empty and default exists + value = host_environ[envkey] # Use host environ + if default_value and len(value) == 0 and m.groups()[1] == ":-": + value = default_value # Unless it is empty and default exists elif default_value: - value = default_value # Use default if exists - + value = default_value # Use default if exists + service_environ[key] = value service_section["environment"] = service_environ -def eval_service_environ(docker_compose_path:Path, service_name:str, - host_environ: Dict=None, - image_environ: Dict=None, - *, use_env_devel=True) -> Dict: + +def eval_service_environ( + docker_compose_path: Path, + service_name: str, + host_environ: Dict = None, + image_environ: Dict = None, + *, + use_env_devel=True +) -> Dict: """ Deduces a service environment with it runs in a stack from confirmation :param docker_compose_path: path to stack configuration @@ -110,8 +128,9 @@ def eval_service_environ(docker_compose_path:Path, service_name:str, content = yaml.safe_load(f) service = content["services"][service_name] - replace_environs_in_docker_compose_service(service, docker_compose_dir, - host_environ, use_env_devel=use_env_devel) + replace_environs_in_docker_compose_service( + service, docker_compose_dir, host_environ, use_env_devel=use_env_devel + ) host_environ = host_environ or {} image_environ = image_environ or {} diff --git a/services/web/server/tests/helpers/utils_login.py b/services/web/server/tests/helpers/utils_login.py index c3d503a08b0..1b42e940875 100644 --- a/services/web/server/tests/helpers/utils_login.py +++ b/services/web/server/tests/helpers/utils_login.py @@ -6,11 +6,11 @@ from simcore_service_webserver.db_models import UserRole, UserStatus from simcore_service_webserver.login.cfg import cfg, get_storage from simcore_service_webserver.login.registration import create_invitation -from simcore_service_webserver.login.utils import (encrypt_password, - get_random_string) +from simcore_service_webserver.login.utils import encrypt_password, get_random_string from utils_assert import assert_status -TEST_MARKS = re.compile(r'TEST (\w+):(.*)') +TEST_MARKS = re.compile(r"TEST (\w+):(.*)") + def parse_test_marks(text): """ Checs for marks as @@ -34,28 +34,28 @@ async def create_user(data=None): data = data or {} password = get_random_string(10) params = { - 'name': get_random_string(10), - 'email': '{}@gmail.com'.format(get_random_string(10)), - 'password_hash': encrypt_password(password) + "name": get_random_string(10), + "email": "{}@gmail.com".format(get_random_string(10)), + "password_hash": encrypt_password(password), } params.update(data) - params.setdefault('status', UserStatus.ACTIVE.name) - params.setdefault('role', UserRole.USER.name) - params.setdefault('created_ip', '127.0.0.1') + params.setdefault("status", UserStatus.ACTIVE.name) + params.setdefault("role", UserRole.USER.name) + params.setdefault("created_ip", "127.0.0.1") user = await cfg.STORAGE.create_user(params) - user['raw_password'] = password + user["raw_password"] = password return user + async def log_client_in(client, user_data=None, *, enable_check=True): # creates user directly in db user = await create_user(user_data) # login - url = client.app.router['auth_login'].url_for() - r = await client.post(url, json={ - 'email': user['email'], - 'password': user['raw_password'], - }) + url = client.app.router["auth_login"].url_for() + r = await client.post( + url, json={"email": user["email"], "password": user["raw_password"],} + ) if enable_check: await assert_status(r, web.HTTPOk, cfg.MSG_LOGGED_IN) @@ -65,11 +65,12 @@ async def log_client_in(client, user_data=None, *, enable_check=True): # CONTEXT MANAGERS ------------------------------ + class NewUser: - def __init__(self, params=None, app: web.Application=None): + def __init__(self, params=None, app: web.Application = None): self.params = params self.user = None - self.db = get_storage(app) if app else cfg.STORAGE # FIXME: + self.db = get_storage(app) if app else cfg.STORAGE # FIXME: async def __aenter__(self): self.user = await create_user(self.params) @@ -86,9 +87,12 @@ def __init__(self, client, params=None, *, check_if_succeeds=True): self.enable_check = check_if_succeeds async def __aenter__(self): - self.user = await log_client_in(self.client, self.params, enable_check=self.enable_check) + self.user = await log_client_in( + self.client, self.params, enable_check=self.enable_check + ) return self.user + class NewInvitation(NewUser): def __init__(self, client, guest="", host=None): super().__init__(host, client.app) diff --git a/services/web/server/tests/helpers/utils_projects.py b/services/web/server/tests/helpers/utils_projects.py index 0c3f810d986..77e5bc0f88e 100644 --- a/services/web/server/tests/helpers/utils_projects.py +++ b/services/web/server/tests/helpers/utils_projects.py @@ -12,14 +12,23 @@ from aiohttp import web -from simcore_service_webserver.projects.projects_db import APP_PROJECT_DBAPI, DB_EXCLUSIVE_COLUMNS +from simcore_service_webserver.projects.projects_db import ( + APP_PROJECT_DBAPI, + DB_EXCLUSIVE_COLUMNS, +) from simcore_service_webserver.resources import resources -fake_template_resources = ['data/'+name for name in resources.listdir('data') - if re.match(r"^fake-template-(.+).json", name) ] +fake_template_resources = [ + "data/" + name + for name in resources.listdir("data") + if re.match(r"^fake-template-(.+).json", name) +] -fake_project_resources = ['data/'+name for name in resources.listdir('data') - if re.match(r"^fake-user-(.+).json", name) ] +fake_project_resources = [ + "data/" + name + for name in resources.listdir("data") + if re.match(r"^fake-user-(.+).json", name) +] def load_data(name): @@ -27,7 +36,9 @@ def load_data(name): return json.load(fp) -async def create_project(app: web.Application, params: Dict=None, user_id=None, *, force_uuid=False) -> Dict: +async def create_project( + app: web.Application, params: Dict = None, user_id=None, *, force_uuid=False +) -> Dict: """ Injects new project in database for user or as template :param params: predefined project properties (except for non-writeable e.g. uuid), defaults to None @@ -39,12 +50,14 @@ async def create_project(app: web.Application, params: Dict=None, user_id=None, """ params = params or {} - project_data = load_data('data/fake-template-projects.isan.json')[0] + project_data = load_data("data/fake-template-projects.isan.json")[0] project_data.update(params) db = app[APP_PROJECT_DBAPI] - project_uuid = await db.add_project(project_data, user_id, force_project_uuid=force_uuid) + project_uuid = await db.add_project( + project_data, user_id, force_project_uuid=force_uuid + ) assert project_uuid == project_data["uuid"] for key in DB_EXCLUSIVE_COLUMNS: @@ -54,7 +67,10 @@ async def create_project(app: web.Application, params: Dict=None, user_id=None, async def delete_all_projects(app: web.Application): - from simcore_service_webserver.projects.projects_models import projects, user_to_projects + from simcore_service_webserver.projects.projects_models import ( + projects, + user_to_projects, + ) db = app[APP_PROJECT_DBAPI] async with db.engine.acquire() as conn: @@ -66,7 +82,15 @@ async def delete_all_projects(app: web.Application): class NewProject: - def __init__(self, params: Dict=None, app: web.Application=None, clear_all=True, user_id=None, *, force_uuid=False): + def __init__( + self, + params: Dict = None, + app: web.Application = None, + clear_all=True, + user_id=None, + *, + force_uuid=False + ): self.params = params self.user_id = user_id self.app = app @@ -76,10 +100,14 @@ def __init__(self, params: Dict=None, app: web.Application=None, clear_all=True, if not self.clear_all: # TODO: add delete_project. Deleting a single project implies having to delete as well all dependencies created - raise ValueError("UNDER DEVELOPMENT: Currently can only delete all projects ") + raise ValueError( + "UNDER DEVELOPMENT: Currently can only delete all projects " + ) async def __aenter__(self): - self.prj = await create_project(self.app, self.params, self.user_id, force_uuid=self.force_uuid) + self.prj = await create_project( + self.app, self.params, self.user_id, force_uuid=self.force_uuid + ) return self.prj async def __aexit__(self, *args): diff --git a/services/web/server/tests/helpers/utils_tokens.py b/services/web/server/tests/helpers/utils_tokens.py index 6cc65a62b23..b84a1196aff 100644 --- a/services/web/server/tests/helpers/utils_tokens.py +++ b/services/web/server/tests/helpers/utils_tokens.py @@ -29,7 +29,7 @@ async def create_token_in_db(engine, **data): "token_data": { "token_secret": get_random_string(3), "token_key": get_random_string(4), - } + }, } params.update(data) @@ -40,11 +40,17 @@ async def create_token_in_db(engine, **data): return dict(row) -async def get_token_from_db(engine, *, token_id=None, user_id=None, token_service=None, token_data=None): +async def get_token_from_db( + engine, *, token_id=None, user_id=None, token_service=None, token_data=None +): async with engine.acquire() as conn: - expr = to_expression(token_id=token_id, user_id=user_id, - token_service=token_service, token_data=token_data) - stmt = sa.select([tokens, ]).where(expr) + expr = to_expression( + token_id=token_id, + user_id=user_id, + token_service=token_service, + token_data=token_data, + ) + stmt = sa.select([tokens,]).where(expr) result = await conn.execute(stmt) row = await result.first() return dict(row) if row else None @@ -66,8 +72,10 @@ def to_expression(**params): expressions = [] for key, value in params.items(): if value is not None: - statement = (cast(getattr(tokens.c, key), String) == json.dumps(value)) \ - if isinstance(getattr(tokens.c, key).type, JSON) \ + statement = ( + (cast(getattr(tokens.c, key), String) == json.dumps(value)) + if isinstance(getattr(tokens.c, key).type, JSON) else (getattr(tokens.c, key) == value) + ) expressions.append(statement) return reduce(and_, expressions) diff --git a/services/web/server/tests/integration/computation/conftest.py b/services/web/server/tests/integration/computation/conftest.py index 73c39911ff9..77e6cc02961 100644 --- a/services/web/server/tests/integration/computation/conftest.py +++ b/services/web/server/tests/integration/computation/conftest.py @@ -15,19 +15,22 @@ current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def mock_workbench_payload(): file_path = current_dir / "workbench_sleeper_payload.json" with file_path.open() as fp: return json.load(fp) -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def mock_project(fake_data_dir, mock_workbench_payload): with (fake_data_dir / "fake-project.json").open() as fp: project = json.load(fp) project["workbench"] = mock_workbench_payload["workbench"] return project + @pytest.fixture async def logged_user(client, user_role: UserRole): """ adds a user in db and logs in with client @@ -37,29 +40,31 @@ async def logged_user(client, user_role: UserRole): async with LoggedUser( client, {"role": user_role.name}, - check_if_succeeds = user_role!=UserRole.ANONYMOUS + check_if_succeeds=user_role != UserRole.ANONYMOUS, ) as user: yield user + @pytest.fixture async def user_project(client, mock_project, logged_user): mock_project["prjOwner"] = logged_user["name"] async with NewProject( - mock_project, - client.app, - user_id=logged_user["id"] + mock_project, client.app, user_id=logged_user["id"] ) as project: yield project + @pytest.fixture def project_id() -> str: return str(uuid.uuid4()) + @pytest.fixture(scope="session") def node_uuid() -> str: return "some_node_id" + @pytest.fixture(scope="session") def user_id() -> str: return "some_id" diff --git a/services/web/server/tests/integration/computation/test_computation.py b/services/web/server/tests/integration/computation/test_computation.py index 784c8147a74..5dbf108f8ba 100644 --- a/services/web/server/tests/integration/computation/test_computation.py +++ b/services/web/server/tests/integration/computation/test_computation.py @@ -16,7 +16,10 @@ from servicelib.application import create_safe_application from servicelib.application_keys import APP_CONFIG_KEY from simcore_sdk.models.pipeline_models import ( - SUCCESS, ComputationalPipeline, ComputationalTask) + SUCCESS, + ComputationalPipeline, + ComputationalTask, +) from simcore_service_webserver.computation import setup_computation from simcore_service_webserver.db import setup_db from simcore_service_webserver.login import setup_login @@ -36,29 +39,24 @@ # TODO: create conftest at computation/ folder level # Selection of core and tool services started in this swarm fixture (integration) -core_services = [ - 'director', - 'rabbit', - 'postgres', - 'sidecar', - 'storage' -] +core_services = ["director", "rabbit", "postgres", "sidecar", "storage"] ops_services = [ - 'minio', -# 'adminer', -# 'portainer' + "minio", + # 'adminer', + # 'portainer' ] + @pytest.fixture -def client(loop, aiohttp_client, - app_config, ## waits until swarm with *_services are up - ): +def client( + loop, aiohttp_client, app_config, ## waits until swarm with *_services are up +): assert app_config["rest"]["version"] == API_VERSION - app_config['storage']['enabled'] = False - app_config['main']['testing'] = True - app_config['db']['init_tables'] = True # inits postgres_service + app_config["storage"]["enabled"] = False + app_config["main"]["testing"] = True + app_config["db"]["init_tables"] = True # inits postgres_service pprint(app_config) @@ -76,13 +74,18 @@ def client(loop, aiohttp_client, setup_projects(app) setup_computation(app) - yield loop.run_until_complete(aiohttp_client(app, server_kwargs={ - 'port': app_config["main"]["port"], - 'host': app_config['main']['host'] - })) + yield loop.run_until_complete( + aiohttp_client( + app, + server_kwargs={ + "port": app_config["main"]["port"], + "host": app_config["main"]["host"], + }, + ) + ) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def mock_workbench_adjacency_list(): file_path = current_dir / "workbench_sleeper_dag_adjacency_list.json" with file_path.open() as fp: @@ -90,19 +93,28 @@ def mock_workbench_adjacency_list(): # HELPERS ---------------------------------- -def assert_db_contents(project_id, postgres_session, - mock_workbench_payload, mock_workbench_adjacency_list, - check_outputs:bool - ): +def assert_db_contents( + project_id, + postgres_session, + mock_workbench_payload, + mock_workbench_adjacency_list, + check_outputs: bool, +): # pylint: disable=no-member - pipeline_db = postgres_session.query(ComputationalPipeline)\ - .filter(ComputationalPipeline.project_id == project_id).one() + pipeline_db = ( + postgres_session.query(ComputationalPipeline) + .filter(ComputationalPipeline.project_id == project_id) + .one() + ) assert pipeline_db.project_id == project_id assert pipeline_db.dag_adjacency_list == mock_workbench_adjacency_list # check db comp_tasks - tasks_db = postgres_session.query(ComputationalTask)\ - .filter(ComputationalTask.project_id == project_id).all() + tasks_db = ( + postgres_session.query(ComputationalTask) + .filter(ComputationalTask.project_id == project_id) + .all() + ) mock_pipeline = mock_workbench_payload assert len(tasks_db) == len(mock_pipeline) @@ -119,14 +131,21 @@ def assert_db_contents(project_id, postgres_session, assert task_db.image["name"] == mock_pipeline[task_db.node_id]["key"] assert task_db.image["tag"] == mock_pipeline[task_db.node_id]["version"] + def assert_sleeper_services_completed(project_id, postgres_session): # pylint: disable=no-member # we wait 15 secs before testing... time.sleep(15) - pipeline_db = postgres_session.query(ComputationalPipeline)\ - .filter(ComputationalPipeline.project_id == project_id).one() - tasks_db = postgres_session.query(ComputationalTask)\ - .filter(ComputationalTask.project_id == project_id).all() + pipeline_db = ( + postgres_session.query(ComputationalPipeline) + .filter(ComputationalPipeline.project_id == project_id) + .one() + ) + tasks_db = ( + postgres_session.query(ComputationalTask) + .filter(ComputationalTask.project_id == project_id) + .all() + ) for task_db in tasks_db: if "sleeper" in task_db.image["name"]: assert task_db.state == SUCCESS @@ -135,26 +154,34 @@ def assert_sleeper_services_completed(project_id, postgres_session): # TESTS ------------------------------------------ async def test_check_health(docker_stack, client): # TODO: check health of all core_services in list above! - resp = await client.get( API_VERSION + "/") + resp = await client.get(API_VERSION + "/") data, _ = await assert_status(resp, web.HTTPOk) - assert data['name'] == 'simcore_service_webserver' - assert data['status'] == 'SERVICE_RUNNING' - - -@pytest.mark.parametrize("user_role,expected_response", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_start_pipeline(client, postgres_session, celery_service, sleeper_service, - logged_user, user_project, - mock_workbench_adjacency_list, - expected_response - ): + assert data["name"] == "simcore_service_webserver" + assert data["status"] == "SERVICE_RUNNING" + + +@pytest.mark.parametrize( + "user_role,expected_response", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_start_pipeline( + client, + postgres_session, + celery_service, + sleeper_service, + logged_user, + user_project, + mock_workbench_adjacency_list, + expected_response, +): project_id = user_project["uuid"] - mock_workbench_payload = user_project['workbench'] + mock_workbench_payload = user_project["workbench"] url = client.app.router["start_pipeline"].url_for(project_id=project_id) assert url == URL(API_PREFIX + "/computation/pipeline/{}/start".format(project_id)) @@ -166,26 +193,39 @@ async def test_start_pipeline(client, postgres_session, celery_service, sleeper_ if not error: assert "pipeline_name" in data assert "project_id" in data - assert data['project_id'] == project_id - - assert_db_contents(project_id, postgres_session, mock_workbench_payload, - mock_workbench_adjacency_list, check_outputs=False) + assert data["project_id"] == project_id + + assert_db_contents( + project_id, + postgres_session, + mock_workbench_payload, + mock_workbench_adjacency_list, + check_outputs=False, + ) # assert_sleeper_services_completed(project_id, postgres_session) -@pytest.mark.parametrize("user_role,expected_response", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPNoContent), - (UserRole.USER, web.HTTPNoContent), - (UserRole.TESTER, web.HTTPNoContent), -]) -async def test_update_pipeline(client, docker_stack, postgres_session, - logged_user, user_project, - mock_workbench_payload, mock_workbench_adjacency_list, - expected_response - ): +@pytest.mark.parametrize( + "user_role,expected_response", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPNoContent), + (UserRole.USER, web.HTTPNoContent), + (UserRole.TESTER, web.HTTPNoContent), + ], +) +async def test_update_pipeline( + client, + docker_stack, + postgres_session, + logged_user, + user_project, + mock_workbench_payload, + mock_workbench_adjacency_list, + expected_response, +): project_id = user_project["uuid"] - assert user_project['workbench'] == mock_workbench_payload['workbench'] + assert user_project["workbench"] == mock_workbench_payload["workbench"] url = client.app.router["update_pipeline"].url_for(project_id=project_id) assert url == URL(API_PREFIX + "/computation/pipeline/{}".format(project_id)) @@ -197,5 +237,10 @@ async def test_update_pipeline(client, docker_stack, postgres_session, if not error: # check db comp_pipeline - assert_db_contents(project_id, postgres_session, mock_workbench_payload['workbench'], - mock_workbench_adjacency_list, check_outputs=True) + assert_db_contents( + project_id, + postgres_session, + mock_workbench_payload["workbench"], + mock_workbench_adjacency_list, + check_outputs=True, + ) diff --git a/services/web/server/tests/integration/computation/test_rabbit.py b/services/web/server/tests/integration/computation/test_rabbit.py index a98de09d611..be2fdf92000 100644 --- a/services/web/server/tests/integration/computation/test_rabbit.py +++ b/services/web/server/tests/integration/computation/test_rabbit.py @@ -30,24 +30,22 @@ API_VERSION = "v0" # Selection of core and tool services started in this swarm fixture (integration) -core_services = [ - 'postgres', - 'redis', - 'rabbit' -] +core_services = ["postgres", "redis", "rabbit"] + +ops_services = [] -ops_services = [ -] @pytest.fixture -def client(loop, aiohttp_client, - app_config, ## waits until swarm with *_services are up - rabbit_service ## waits until rabbit is responsive - ): +def client( + loop, + aiohttp_client, + app_config, ## waits until swarm with *_services are up + rabbit_service, ## waits until rabbit is responsive +): assert app_config["rest"]["version"] == API_VERSION - app_config['storage']['enabled'] = False - app_config["db"]["init_tables"] = True # inits postgres_service + app_config["storage"]["enabled"] = False + app_config["db"]["init_tables"] = True # inits postgres_service # fake config app = create_safe_application() @@ -63,31 +61,45 @@ def client(loop, aiohttp_client, setup_sockets(app) setup_resource_manager(app) - yield loop.run_until_complete(aiohttp_client(app, server_kwargs={ - 'port': app_config["main"]["port"], - 'host': app_config['main']['host'] - })) + yield loop.run_until_complete( + aiohttp_client( + app, + server_kwargs={ + "port": app_config["main"]["port"], + "host": app_config["main"]["host"], + }, + ) + ) + @pytest.fixture def rabbit_config(app_config): rb_config = app_config[CONFIG_SECTION_NAME] yield rb_config + @pytest.fixture def rabbit_broker(rabbit_config): rabbit_broker = eval_broker(rabbit_config) yield rabbit_broker + @pytest.fixture async def pika_connection(loop, rabbit_broker): - connection = await aio_pika.connect(rabbit_broker, ssl=True, connection_attempts=100) + connection = await aio_pika.connect( + rabbit_broker, ssl=True, connection_attempts=100 + ) yield connection await connection.close() + # ------------------------------------------ + @pytest.fixture -async def rabbit_channels(loop, pika_connection, rabbit_config: Dict) -> Dict[str, aio_pika.Exchange]: +async def rabbit_channels( + loop, pika_connection, rabbit_config: Dict +) -> Dict[str, aio_pika.Exchange]: async def create(channel_name: str) -> aio_pika.Exchange: # create rabbit pika exchange channel channel = await pika_connection.channel() @@ -97,18 +109,17 @@ async def create(channel_name: str) -> aio_pika.Exchange: ) return pika_exchange - return { - "log": await create("log"), - "progress": await create("progress") - } + return {"log": await create("log"), "progress": await create("progress")} -def _create_rabbit_message(message_name: str, node_uuid: str, user_id: str, project_id: str, param: Any) -> Dict: +def _create_rabbit_message( + message_name: str, node_uuid: str, user_id: str, project_id: str, param: Any +) -> Dict: message = { - "Channel":message_name.title(), + "Channel": message_name.title(), "Node": node_uuid, "user_id": user_id, - "project_id": project_id + "project_id": project_id, } if message_name == "log": @@ -117,26 +128,43 @@ def _create_rabbit_message(message_name: str, node_uuid: str, user_id: str, proj message["Progress"] = param return message + @pytest.fixture def client_session_id(): return str(uuid4()) -async def _publish_messages(num_messages: int, node_uuid: str, user_id: str, project_id: str, rabbit_channels: Dict[str, aio_pika.Exchange]) -> Tuple[Dict, Dict]: - log_messages = [_create_rabbit_message("log", node_uuid, user_id, project_id, f"log number {n}") for n in range(num_messages)] - progress_messages = [_create_rabbit_message("progress", node_uuid, user_id, project_id, n/num_messages) for n in range(num_messages)] +async def _publish_messages( + num_messages: int, + node_uuid: str, + user_id: str, + project_id: str, + rabbit_channels: Dict[str, aio_pika.Exchange], +) -> Tuple[Dict, Dict]: + log_messages = [ + _create_rabbit_message("log", node_uuid, user_id, project_id, f"log number {n}") + for n in range(num_messages) + ] + progress_messages = [ + _create_rabbit_message( + "progress", node_uuid, user_id, project_id, n / num_messages + ) + for n in range(num_messages) + ] # send the messages over rabbit for n in range(num_messages): await rabbit_channels["log"].publish( aio_pika.Message( - body=json.dumps(log_messages[n]).encode(), - content_type="text/json"), routing_key = "" + body=json.dumps(log_messages[n]).encode(), content_type="text/json" + ), + routing_key="", ) await rabbit_channels["progress"].publish( aio_pika.Message( - body=json.dumps(progress_messages[n]).encode(), - content_type="text/json"), routing_key = "" + body=json.dumps(progress_messages[n]).encode(), content_type="text/json" + ), + routing_key="", ) return (log_messages, progress_messages) @@ -150,14 +178,22 @@ async def _wait_until(pred: Callable, timeout: int): await sleep(1) pytest.fail("waited too long for getting websockets events") -@pytest.mark.parametrize("user_role", [ - (UserRole.GUEST), - (UserRole.USER), - (UserRole.TESTER), -]) -async def test_rabbit_websocket_computation(loop, logged_user, user_project, - socketio_client, client_session_id, mocker, - rabbit_channels, node_uuid, user_id, project_id): + +@pytest.mark.parametrize( + "user_role", [(UserRole.GUEST), (UserRole.USER), (UserRole.TESTER),] +) +async def test_rabbit_websocket_computation( + loop, + logged_user, + user_project, + socketio_client, + client_session_id, + mocker, + rabbit_channels, + node_uuid, + user_id, + project_id, +): # corresponding websocket event names websocket_log_event = "logger" @@ -173,22 +209,34 @@ async def test_rabbit_websocket_computation(loop, logged_user, user_project, NUMBER_OF_MESSAGES = 1 TIMEOUT_S = 20 - await _publish_messages(NUMBER_OF_MESSAGES, node_uuid, user_id, project_id, rabbit_channels) + await _publish_messages( + NUMBER_OF_MESSAGES, node_uuid, user_id, project_id, rabbit_channels + ) await sleep(1) mock_log_handler_fct.assert_not_called() mock_node_update_handler_fct.assert_not_called() # publish messages with correct user id, but no project - log_messages, _ = await _publish_messages(NUMBER_OF_MESSAGES, node_uuid, logged_user["id"], project_id, rabbit_channels) + log_messages, _ = await _publish_messages( + NUMBER_OF_MESSAGES, node_uuid, logged_user["id"], project_id, rabbit_channels + ) + def predicate() -> bool: return mock_log_handler_fct.call_count == (NUMBER_OF_MESSAGES) + await _wait_until(predicate, TIMEOUT_S) log_calls = [call(json.dumps(message)) for message in log_messages] mock_log_handler_fct.assert_has_calls(log_calls, any_order=True) mock_node_update_handler_fct.assert_not_called() # publish message with correct user id, project but not node mock_log_handler_fct.reset_mock() - log_messages, _ = await _publish_messages(NUMBER_OF_MESSAGES, node_uuid, logged_user["id"], user_project["uuid"], rabbit_channels) + log_messages, _ = await _publish_messages( + NUMBER_OF_MESSAGES, + node_uuid, + logged_user["id"], + user_project["uuid"], + rabbit_channels, + ) await _wait_until(predicate, TIMEOUT_S) log_calls = [call(json.dumps(message)) for message in log_messages] mock_log_handler_fct.assert_has_calls(log_calls, any_order=True) @@ -198,10 +246,19 @@ def predicate() -> bool: # publish message with correct user id, project node mock_log_handler_fct.reset_mock() node_uuid = list(user_project["workbench"])[0] - log_messages, progress_messages = await _publish_messages(NUMBER_OF_MESSAGES, node_uuid, logged_user["id"], user_project["uuid"], rabbit_channels) + log_messages, progress_messages = await _publish_messages( + NUMBER_OF_MESSAGES, + node_uuid, + logged_user["id"], + user_project["uuid"], + rabbit_channels, + ) + def predicate2() -> bool: - return mock_log_handler_fct.call_count == (NUMBER_OF_MESSAGES) and \ - mock_node_update_handler_fct.call_count == (NUMBER_OF_MESSAGES) + return mock_log_handler_fct.call_count == ( + NUMBER_OF_MESSAGES + ) and mock_node_update_handler_fct.call_count == (NUMBER_OF_MESSAGES) + await _wait_until(predicate2, TIMEOUT_S) log_calls = [call(json.dumps(message)) for message in log_messages] mock_log_handler_fct.assert_has_calls(log_calls, any_order=True) diff --git a/services/web/server/tests/integration/conftest.py b/services/web/server/tests/integration/conftest.py index 81488bb026b..3152a13bdef 100644 --- a/services/web/server/tests/integration/conftest.py +++ b/services/web/server/tests/integration/conftest.py @@ -37,15 +37,18 @@ "fixtures.celery_service", "fixtures.postgres_service", "fixtures.redis_service", - "fixtures.websocket_client" + "fixtures.websocket_client", ] current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent log = logging.getLogger(__name__) + @pytest.fixture(scope="module") -def webserver_environ(request, docker_stack: Dict, simcore_docker_compose: Dict) -> Dict[str, str]: +def webserver_environ( + request, docker_stack: Dict, simcore_docker_compose: Dict +) -> Dict[str, str]: """ Started already swarm with integration stack (via dependency with 'docker_stack') @@ -56,15 +59,19 @@ def webserver_environ(request, docker_stack: Dict, simcore_docker_compose: Dict) """ assert "webserver" not in docker_stack["services"] - dockerfile_environ = {'SIMCORE_WEB_OUTDIR': "undefined" } # TODO: parse webserver dockerfile ?? - docker_compose_environ = simcore_docker_compose['services']['webserver'].get('environment',{}) + dockerfile_environ = { + "SIMCORE_WEB_OUTDIR": "undefined" + } # TODO: parse webserver dockerfile ?? + docker_compose_environ = simcore_docker_compose["services"]["webserver"].get( + "environment", {} + ) environ = {} environ.update(dockerfile_environ) environ.update(docker_compose_environ) # get the list of core services the test module wants - core_services = getattr(request.module, 'core_services', []) + core_services = getattr(request.module, "core_services", []) # OVERRIDES: # One of the biggest differences with respect to the real system @@ -72,26 +79,32 @@ def webserver_environ(request, docker_stack: Dict, simcore_docker_compose: Dict) # version tha loads only the subsystems under test. For that reason, # the test webserver is built-up in webserver_service fixture that runs # on the host. - services_with_published_ports = [name for name in core_services - if 'ports' in simcore_docker_compose['services'][name] ] + services_with_published_ports = [ + name + for name in core_services + if "ports" in simcore_docker_compose["services"][name] + ] for name in services_with_published_ports: - host_key = f'{name.upper()}_HOST' - port_key = f'{name.upper()}_PORT' + host_key = f"{name.upper()}_HOST" + port_key = f"{name.upper()}_PORT" # published port is sometimes dynamically defined by the swarm - assert host_key in environ, "Variables names expected to be prefix with service names in docker-compose" + assert ( + host_key in environ + ), "Variables names expected to be prefix with service names in docker-compose" assert port_key in environ # to swarm boundary since webserver is installed in the host and therefore outside the swarm's network published_port = get_service_published_port(name, int(environ.get(port_key))) - environ[host_key] = '127.0.0.1' + environ[host_key] = "127.0.0.1" environ[port_key] = published_port - pprint(environ) # NOTE: displayed only if error + pprint(environ) # NOTE: displayed only if error return environ -@pytest.fixture(scope='module') + +@pytest.fixture(scope="module") def _webserver_dev_config(webserver_environ: Dict, docker_stack: Dict) -> Dict: """ Swarm with integration stack already started @@ -106,18 +119,22 @@ def _webserver_dev_config(webserver_environ: Dict, docker_stack: Dict) -> Dict: with app_resources.stream("config/server-docker-dev.yaml") as f: cfg = yaml.safe_load(f) # test webserver works in host - cfg["main"]['host'] = '127.0.0.1' + cfg["main"]["host"] = "127.0.0.1" - with config_file_path.open('wt') as f: + with config_file_path.open("wt") as f: yaml.dump(cfg, f, default_flow_style=False) # Emulates cli config_environ = {} config_environ.update(webserver_environ) - config_environ.update( create_environ(skip_host_environ=True) ) # TODO: can be done monkeypathcing os.environ and calling create_environ as well + config_environ.update( + create_environ(skip_host_environ=True) + ) # TODO: can be done monkeypathcing os.environ and calling create_environ as well # validates - cfg_dict = trafaret_config.read_and_validate(config_file_path, app_schema, vars=config_environ) + cfg_dict = trafaret_config.read_and_validate( + config_file_path, app_schema, vars=config_environ + ) # WARNING: changes to this fixture during testing propagates to other tests. Use cfg = deepcopy(cfg_dict) # FIXME: freeze read/only json obj @@ -129,6 +146,7 @@ def _webserver_dev_config(webserver_environ: Dict, docker_stack: Dict) -> Dict: return cfg_dict + @pytest.fixture(scope="function") def app_config(_webserver_dev_config: Dict, aiohttp_unused_port) -> Dict: """ diff --git a/services/web/server/tests/integration/fixtures/celery_service.py b/services/web/server/tests/integration/fixtures/celery_service.py index bad896a07b8..a0e17ab8e8d 100644 --- a/services/web/server/tests/integration/fixtures/celery_service.py +++ b/services/web/server/tests/integration/fixtures/celery_service.py @@ -23,10 +23,11 @@ def celery_service(_webserver_dev_config, docker_stack): wait_till_celery_responsive(url) yield url + @tenacity.retry(wait=tenacity.wait_fixed(0.1), stop=tenacity.stop_after_delay(60)) def wait_till_celery_responsive(url): - app = celery.Celery('tasks', broker=url) + app = celery.Celery("tasks", broker=url) - status = celery.bin.celery.CeleryCommand.commands['status']() + status = celery.bin.celery.CeleryCommand.commands["status"]() status.app = status.get_app() - status.run() # raises celery.bin.base.Error if cannot run + status.run() # raises celery.bin.base.Error if cannot run diff --git a/services/web/server/tests/integration/fixtures/docker_compose.py b/services/web/server/tests/integration/fixtures/docker_compose.py index 52179bb2ab9..5555c919a7e 100644 --- a/services/web/server/tests/integration/fixtures/docker_compose.py +++ b/services/web/server/tests/integration/fixtures/docker_compose.py @@ -27,7 +27,7 @@ def devel_environ(env_devel_file: Path) -> Dict[str, str]: """ Loads and extends .env-devel """ - PATTERN_ENVIRON_EQUAL= re.compile(r"^(\w+)=(.*)$") + PATTERN_ENVIRON_EQUAL = re.compile(r"^(\w+)=(.*)$") env_devel = {} with env_devel_file.open() as f: for line in f: @@ -37,26 +37,28 @@ def devel_environ(env_devel_file: Path) -> Dict[str, str]: env_devel[key] = str(value) # Customized EXTENSION: change some of the environ to accomodate the test case ---- - if 'REGISTRY_SSL' in env_devel: - env_devel['REGISTRY_SSL'] = 'False' - if 'REGISTRY_URL' in env_devel: - env_devel['REGISTRY_URL'] = "{}:5000".format(_get_ip()) - if 'REGISTRY_USER' in env_devel: - env_devel['REGISTRY_USER'] = "simcore" - if 'REGISTRY_PW' in env_devel: - env_devel['REGISTRY_PW'] = "" - if 'REGISTRY_AUTH' in env_devel: - env_devel['REGISTRY_AUTH'] = False - - if 'SWARM_STACK_NAME' not in os.environ: - env_devel['SWARM_STACK_NAME'] = "simcore" + if "REGISTRY_SSL" in env_devel: + env_devel["REGISTRY_SSL"] = "False" + if "REGISTRY_URL" in env_devel: + env_devel["REGISTRY_URL"] = "{}:5000".format(_get_ip()) + if "REGISTRY_USER" in env_devel: + env_devel["REGISTRY_USER"] = "simcore" + if "REGISTRY_PW" in env_devel: + env_devel["REGISTRY_PW"] = "" + if "REGISTRY_AUTH" in env_devel: + env_devel["REGISTRY_AUTH"] = False + + if "SWARM_STACK_NAME" not in os.environ: + env_devel["SWARM_STACK_NAME"] = "simcore" return env_devel @pytest.fixture(scope="module") def temp_folder(request, tmpdir_factory) -> Path: - tmp = Path(tmpdir_factory.mktemp("docker_compose_{}".format(request.module.__name__))) + tmp = Path( + tmpdir_factory.mktemp("docker_compose_{}".format(request.module.__name__)) + ) yield tmp @@ -71,7 +73,7 @@ def env_file(osparc_simcore_root_dir: Path, devel_environ: Dict[str, str]) -> Pa if env_path.exists(): shutil.copy(env_path, backup_path) - with env_path.open('wt') as fh: + with env_path.open("wt") as fh: print(f"# TEMPORARY .env auto-generated from env_path in {__file__}") for key, value in devel_environ.items(): print(f"{key}={value}", file=fh) @@ -84,35 +86,42 @@ def env_file(osparc_simcore_root_dir: Path, devel_environ: Dict[str, str]) -> Pa backup_path.unlink() - @pytest.fixture("module") -def simcore_docker_compose(osparc_simcore_root_dir: Path, env_file: Path, temp_folder: Path) -> Dict: +def simcore_docker_compose( + osparc_simcore_root_dir: Path, env_file: Path, temp_folder: Path +) -> Dict: """ Resolves docker-compose for simcore stack in local host Produces same as `make .stack-simcore-version.yml` in a temporary folder """ - COMPOSE_FILENAMES = [ - "docker-compose.yml", - "docker-compose.local.yml" - ] + COMPOSE_FILENAMES = ["docker-compose.yml", "docker-compose.local.yml"] # ensures .env at git_root_dir assert env_file.exists() assert env_file.parent == osparc_simcore_root_dir # target docker-compose path - docker_compose_paths = [osparc_simcore_root_dir / "services" / filename - for filename in COMPOSE_FILENAMES] - assert all(docker_compose_path.exists() for docker_compose_path in docker_compose_paths) + docker_compose_paths = [ + osparc_simcore_root_dir / "services" / filename + for filename in COMPOSE_FILENAMES + ] + assert all( + docker_compose_path.exists() for docker_compose_path in docker_compose_paths + ) - config = run_docker_compose_config(docker_compose_paths, + config = run_docker_compose_config( + docker_compose_paths, workdir=env_file.parent, - destination_path=temp_folder / "simcore_docker_compose.yml") + destination_path=temp_folder / "simcore_docker_compose.yml", + ) return config + @pytest.fixture("module") -def ops_docker_compose(osparc_simcore_root_dir: Path, env_file: Path, temp_folder: Path) -> Dict: +def ops_docker_compose( + osparc_simcore_root_dir: Path, env_file: Path, temp_folder: Path +) -> Dict: """ Filters only services in docker-compose-ops.yml and returns yaml data Produces same as `make .stack-ops.yml` in a temporary folder @@ -122,36 +131,47 @@ def ops_docker_compose(osparc_simcore_root_dir: Path, env_file: Path, temp_folde assert env_file.parent == osparc_simcore_root_dir # target docker-compose path - docker_compose_path = osparc_simcore_root_dir / "services" / "docker-compose-ops.yml" + docker_compose_path = ( + osparc_simcore_root_dir / "services" / "docker-compose-ops.yml" + ) assert docker_compose_path.exists() - config = run_docker_compose_config(docker_compose_path, + config = run_docker_compose_config( + docker_compose_path, workdir=env_file.parent, - destination_path=temp_folder / "ops_docker_compose.yml") + destination_path=temp_folder / "ops_docker_compose.yml", + ) return config -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def core_services_config_file(request, temp_folder, simcore_docker_compose): """ Creates a docker-compose config file for every stack of services in'core_services' module variable File is created in a temp folder """ - core_services = getattr(request.module, 'core_services', []) # TODO: PC->SAN could also be defined as a fixture instead of a single variable (as with docker_compose) - assert core_services, f"Expected at least one service in 'core_services' within '{request.module.__name__}'" + core_services = getattr( + request.module, "core_services", [] + ) # TODO: PC->SAN could also be defined as a fixture instead of a single variable (as with docker_compose) + assert ( + core_services + ), f"Expected at least one service in 'core_services' within '{request.module.__name__}'" - docker_compose_path = Path(temp_folder / 'simcore_docker_compose.filtered.yml') + docker_compose_path = Path(temp_folder / "simcore_docker_compose.filtered.yml") - _filter_services_and_dump(core_services, simcore_docker_compose, docker_compose_path) + _filter_services_and_dump( + core_services, simcore_docker_compose, docker_compose_path + ) return docker_compose_path -@pytest.fixture(scope='module') + +@pytest.fixture(scope="module") def ops_services_config_file(request, temp_folder, ops_docker_compose): """ Creates a docker-compose config file for every stack of services in 'ops_services' module variable File is created in a temp folder """ - ops_services = getattr(request.module, 'ops_services', []) - docker_compose_path = Path(temp_folder / 'ops_docker_compose.filtered.yml') + ops_services = getattr(request.module, "ops_services", []) + docker_compose_path = Path(temp_folder / "ops_docker_compose.filtered.yml") _filter_services_and_dump(ops_services, ops_docker_compose, docker_compose_path) @@ -159,40 +179,42 @@ def ops_services_config_file(request, temp_folder, ops_docker_compose): # HELPERS --------------------------------------------- -def _get_ip()->str: +def _get_ip() -> str: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: # doesn't even have to be reachable - s.connect(('10.255.255.255', 1)) + s.connect(("10.255.255.255", 1)) IP = s.getsockname()[0] - except Exception: #pylint: disable=W0703 - IP = '127.0.0.1' + except Exception: # pylint: disable=W0703 + IP = "127.0.0.1" finally: s.close() return IP -def _filter_services_and_dump(include: List, services_compose: Dict, docker_compose_path: Path): +def _filter_services_and_dump( + include: List, services_compose: Dict, docker_compose_path: Path +): content = deepcopy(services_compose) # filters services - remove = [name for name in content['services'] if name not in include] + remove = [name for name in content["services"] if name not in include] for name in remove: - content['services'].pop(name, None) + content["services"].pop(name, None) for name in include: - service = content['services'][name] + service = content["services"][name] # removes builds (No more) if "build" in service: service.pop("build", None) # updates current docker-compose (also versioned ... do not change by hand) - with docker_compose_path.open('wt') as fh: - if 'TRAVIS' in os.environ: + with docker_compose_path.open("wt") as fh: + if "TRAVIS" in os.environ: # in travis we do not have access to file print("{:-^100}".format(str(docker_compose_path))) yaml.dump(content, sys.stdout, default_flow_style=False) - print("-"*100) + print("-" * 100) else: # locally we have access to file print(f"Saving config to '{docker_compose_path}'") diff --git a/services/web/server/tests/integration/fixtures/docker_registry.py b/services/web/server/tests/integration/fixtures/docker_registry.py index bea2ab0412c..a0d49514f7e 100644 --- a/services/web/server/tests/integration/fixtures/docker_registry.py +++ b/services/web/server/tests/integration/fixtures/docker_registry.py @@ -9,15 +9,17 @@ import tenacity import time + @pytest.fixture(scope="session") def docker_registry(): # run the registry outside of the stack docker_client = docker.from_env() - container = docker_client.containers.run("registry:2", - ports={"5000":"5000"}, + container = docker_client.containers.run( + "registry:2", + ports={"5000": "5000"}, environment=["REGISTRY_STORAGE_DELETE_ENABLED=true"], - restart_policy={"Name":"always"}, - detach=True + restart_policy={"Name": "always"}, + detach=True, ) host = "127.0.0.1" port = 5000 @@ -28,7 +30,7 @@ def docker_registry(): # test the registry docker_client = docker.from_env() # get the hello world example from docker hub - hello_world_image = docker_client.images.pull("hello-world","latest") + hello_world_image = docker_client.images.pull("hello-world", "latest") # login to private registry docker_client.login(registry=url, username="simcore") # tag the image @@ -50,6 +52,7 @@ def docker_registry(): while docker_client.containers.list(filters={"name": container.name}): time.sleep(1) + @tenacity.retry(wait=tenacity.wait_fixed(1), stop=tenacity.stop_after_delay(60)) def _wait_till_registry_is_responsive(url): docker_client = docker.from_env() @@ -57,7 +60,7 @@ def _wait_till_registry_is_responsive(url): return True -#pull from itisfoundation/sleeper and push into local registry +# pull from itisfoundation/sleeper and push into local registry @pytest.fixture(scope="session") def sleeper_service(docker_registry) -> str: """ Adds a itisfoundation/sleeper in docker registry @@ -73,6 +76,7 @@ def sleeper_service(docker_registry) -> str: assert image yield repo + @pytest.fixture(scope="session") def jupyter_service(docker_registry) -> str: """ Adds a itisfoundation/jupyter-base-notebook in docker registry diff --git a/services/web/server/tests/integration/fixtures/docker_swarm.py b/services/web/server/tests/integration/fixtures/docker_swarm.py index 46255a9de31..8b1abf5f571 100644 --- a/services/web/server/tests/integration/fixtures/docker_swarm.py +++ b/services/web/server/tests/integration/fixtures/docker_swarm.py @@ -14,12 +14,13 @@ import yaml -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def docker_client(): client = docker.from_env() yield client -@pytest.fixture(scope='module') + +@pytest.fixture(scope="module") def docker_swarm(docker_client): try: docker_client.swarm.reload() @@ -30,33 +31,31 @@ def docker_swarm(docker_client): yield # teardown assert docker_client.swarm.leave(force=True) - -@pytest.fixture(scope='module') -def docker_stack(docker_swarm, docker_client, core_services_config_file: Path, ops_services_config_file: Path): - stacks = { - 'simcore': core_services_config_file, - 'ops': ops_services_config_file - } +@pytest.fixture(scope="module") +def docker_stack( + docker_swarm, + docker_client, + core_services_config_file: Path, + ops_services_config_file: Path, +): + stacks = {"simcore": core_services_config_file, "ops": ops_services_config_file} # make up-version stacks_up = [] for stack_name, stack_config_file in stacks.items(): - subprocess.run( f"docker stack deploy -c {stack_config_file.name} {stack_name}", - shell=True, check=True, - cwd=stack_config_file.parent) + subprocess.run( + f"docker stack deploy -c {stack_config_file.name} {stack_name}", + shell=True, + check=True, + cwd=stack_config_file.parent, + ) stacks_up.append(stack_name) # wait for the stack to come up def _wait_for_services(retry_count, max_wait_time_s): - pre_states = [ - "NEW", - "PENDING", - "ASSIGNED", - "PREPARING", - "STARTING" - ] + pre_states = ["NEW", "PENDING", "ASSIGNED", "PREPARING", "STARTING"] services = docker_client.services.list() WAIT_TIME_BEFORE_RETRY = 5 start_time = time.time() @@ -71,21 +70,22 @@ def _wait_for_services(retry_count, max_wait_time_s): print(f"Waiting for {service.name}...") time.sleep(WAIT_TIME_BEFORE_RETRY) - def _print_services(msg): from pprint import pprint + print("{:*^100}".format("docker services running " + msg)) for service in docker_client.services.list(): pprint(service.attrs) - print("-"*100) + print("-" * 100) + RETRY_COUNT = 12 WAIT_TIME_BEFORE_FAILING = 60 _wait_for_services(RETRY_COUNT, WAIT_TIME_BEFORE_FAILING) _print_services("[BEFORE TEST]") yield { - 'stacks': stacks_up, - 'services': [service.name for service in docker_client.services.list()] + "stacks": stacks_up, + "services": [service.name for service in docker_client.services.list()], } _print_services("[AFTER TEST]") @@ -111,10 +111,14 @@ def _print_services(msg): for stack in stacks_up: subprocess.run(f"docker stack rm {stack}", shell=True, check=True) - while docker_client.services.list(filters={"label":f"com.docker.stack.namespace={stack}"}): + while docker_client.services.list( + filters={"label": f"com.docker.stack.namespace={stack}"} + ): time.sleep(WAIT_BEFORE_RETRY_SECS) - while docker_client.networks.list(filters={"label":f"com.docker.stack.namespace={stack}"}): + while docker_client.networks.list( + filters={"label": f"com.docker.stack.namespace={stack}"} + ): time.sleep(WAIT_BEFORE_RETRY_SECS) _print_services("[AFTER REMOVED]") diff --git a/services/web/server/tests/integration/fixtures/postgres_service.py b/services/web/server/tests/integration/fixtures/postgres_service.py index b5ecd1c9ae9..1e183463406 100644 --- a/services/web/server/tests/integration/fixtures/postgres_service.py +++ b/services/web/server/tests/integration/fixtures/postgres_service.py @@ -12,7 +12,7 @@ from sqlalchemy.orm import sessionmaker -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def postgres_db(_webserver_dev_config, webserver_environ, docker_stack): cfg = deepcopy(_webserver_dev_config["db"]["postgres"]) url = DSN.format(**cfg) @@ -30,13 +30,15 @@ def postgres_db(_webserver_dev_config, webserver_environ, docker_stack): metadata.drop_all(engine) engine.dispose() -@pytest.fixture(scope='module') + +@pytest.fixture(scope="module") def postgres_session(postgres_db): Session = sessionmaker(postgres_db) session = Session() yield session session.close() + @tenacity.retry(**PostgresRetryPolicyUponInitialization().kwargs) def wait_till_postgres_responsive(url): """Check if something responds to ``url`` """ diff --git a/services/web/server/tests/integration/fixtures/rabbit_service.py b/services/web/server/tests/integration/fixtures/rabbit_service.py index bee2052a840..5ff28f6c3fe 100644 --- a/services/web/server/tests/integration/fixtures/rabbit_service.py +++ b/services/web/server/tests/integration/fixtures/rabbit_service.py @@ -22,6 +22,7 @@ async def rabbit_service(_webserver_dev_config: Dict, docker_stack): url = "amqp://{}:{}@{}:{}".format(user, password, host, port) await wait_till_rabbit_responsive(url) + @tenacity.retry(wait=tenacity.wait_fixed(0.1), stop=tenacity.stop_after_delay(60)) async def wait_till_rabbit_responsive(url: str): await aio_pika.connect(url) diff --git a/services/web/server/tests/integration/fixtures/redis_service.py b/services/web/server/tests/integration/fixtures/redis_service.py index 2f3ab61f373..66dab115689 100644 --- a/services/web/server/tests/integration/fixtures/redis_service.py +++ b/services/web/server/tests/integration/fixtures/redis_service.py @@ -12,7 +12,7 @@ from yarl import URL -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") async def redis_service(loop, _webserver_dev_config, webserver_environ, docker_stack): cfg = deepcopy(_webserver_dev_config["resource_manager"]["redis"]) @@ -32,7 +32,8 @@ async def wait_till_redis_responsive(redis_url: URL) -> bool: await client.wait_closed() return True -@pytest.fixture(scope='module') + +@pytest.fixture(scope="module") async def redis_client(loop, redis_service): client = await aioredis.create_redis_pool(str(redis_service), encoding="utf-8") yield client diff --git a/services/web/server/tests/integration/fixtures/websocket_client.py b/services/web/server/tests/integration/fixtures/websocket_client.py index 04a1affd111..174a6452827 100644 --- a/services/web/server/tests/integration/fixtures/websocket_client.py +++ b/services/web/server/tests/integration/fixtures/websocket_client.py @@ -26,21 +26,26 @@ async def security_cookie(loop, client) -> str: cookie = resp.request_info.headers["Cookie"] yield cookie + @pytest.fixture() async def socketio_url(loop, client) -> str: - SOCKET_IO_PATH = '/socket.io/' + SOCKET_IO_PATH = "/socket.io/" return str(client.make_url(SOCKET_IO_PATH)) + @pytest.fixture() async def socketio_client(socketio_url: str, security_cookie: str): clients = [] async def connect(client_session_id): sio = socketio.AsyncClient() - url = str(URL(socketio_url).with_query({'client_session_id': client_session_id})) - await sio.connect(url, headers={'Cookie': security_cookie}) + url = str( + URL(socketio_url).with_query({"client_session_id": client_session_id}) + ) + await sio.connect(url, headers={"Cookie": security_cookie}) clients.append(sio) return sio + yield connect for sio in clients: await sio.disconnect() diff --git a/services/web/server/tests/integration/test_project_workflow.py b/services/web/server/tests/integration/test_project_workflow.py index 91a154636ca..51261eb14aa 100644 --- a/services/web/server/tests/integration/test_project_workflow.py +++ b/services/web/server/tests/integration/test_project_workflow.py @@ -35,27 +35,27 @@ # Selection of core and tool services started in this swarm fixture (integration) core_services = [ - 'director', - 'postgres', - 'redis', + "director", + "postgres", + "redis", ] ops_services = [ -# 'adminer' + # 'adminer' ] @pytest.fixture -def client(loop, aiohttp_client, - app_config, ## waits until swarm with *_services are up - ): +def client( + loop, aiohttp_client, app_config, ## waits until swarm with *_services are up +): assert app_config["rest"]["version"] == API_VERSION - app_config['main']['testing'] = True - app_config['db']['init_tables'] = True + app_config["main"]["testing"] = True + app_config["db"]["init_tables"] = True - app_config['storage']['enabled'] = False - app_config['rabbit']['enabled'] = False + app_config["storage"]["enabled"] = False + app_config["rabbit"]["enabled"] = False pprint(app_config) @@ -69,10 +69,15 @@ def client(loop, aiohttp_client, setup_resource_manager(app) assert setup_projects(app) - yield loop.run_until_complete(aiohttp_client(app, server_kwargs={ - 'port': app_config["main"]["port"], - 'host': app_config['main']['host'] - })) + yield loop.run_until_complete( + aiohttp_client( + app, + server_kwargs={ + "port": app_config["main"]["port"], + "host": app_config["main"]["host"], + }, + ) + ) @pytest.fixture(scope="session") @@ -82,6 +87,7 @@ def fake_template_projects(package_dir: Path) -> Dict: with projects_file.open() as fp: return json.load(fp) + @pytest.fixture(scope="session") def fake_template_projects_isan(package_dir: Path) -> Dict: projects_file = package_dir / "data" / "fake-template-projects.isan.json" @@ -89,6 +95,7 @@ def fake_template_projects_isan(package_dir: Path) -> Dict: with projects_file.open() as fp: return json.load(fp) + @pytest.fixture(scope="session") def fake_template_projects_osparc(package_dir: Path) -> Dict: projects_file = package_dir / "data" / "fake-template-projects.osparc.json" @@ -96,29 +103,30 @@ def fake_template_projects_osparc(package_dir: Path) -> Dict: with projects_file.open() as fp: return json.load(fp) + @pytest.fixture def fake_db(): Fake.reset() yield Fake Fake.reset() + @pytest.fixture def fake_project_data(fake_data_dir: Path) -> Dict: with (fake_data_dir / "fake-project.json").open() as fp: return json.load(fp) + @pytest.fixture -async def logged_user(client): #, role: UserRole): +async def logged_user(client): # , role: UserRole): """ adds a user in db and logs in with client NOTE: role fixture is defined as a parametrization below """ - role = UserRole.USER # TODO: parameterize roles + role = UserRole.USER # TODO: parameterize roles async with LoggedUser( - client, - {"role": role.name}, - check_if_succeeds = role!=UserRole.ANONYMOUS + client, {"role": role.name}, check_if_succeeds=role != UserRole.ANONYMOUS ) as user: yield user await delete_all_projects(client.app) @@ -128,10 +136,14 @@ async def logged_user(client): #, role: UserRole): def computational_system_mock(mocker): # director needs access to service registry which unfortunately cannot be provided for testing. For that reason we need to mock # interaction with director - mock_fun = mocker.patch('simcore_service_webserver.projects.projects_handlers.update_pipeline_db', return_value=Future()) + mock_fun = mocker.patch( + "simcore_service_webserver.projects.projects_handlers.update_pipeline_db", + return_value=Future(), + ) mock_fun.return_value.set_result("") return mock_fun + @pytest.fixture async def storage_subsystem_mock(loop, mocker): """ @@ -140,21 +152,29 @@ async def storage_subsystem_mock(loop, mocker): Patched functions are exposed within projects but call storage subsystem """ # requests storage to copy data - mock = mocker.patch('simcore_service_webserver.projects.projects_api.copy_data_folders_from_project') + mock = mocker.patch( + "simcore_service_webserver.projects.projects_api.copy_data_folders_from_project" + ) + async def _mock_copy_data_from_project(*args): return args[2] mock.side_effect = _mock_copy_data_from_project # requests storage to delete data - #mock1 = mocker.patch('simcore_service_webserver.projects.projects_handlers.delete_data_folders_of_project', return_value=None) - mock1 = mocker.patch('simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project', return_value=Future()) + # mock1 = mocker.patch('simcore_service_webserver.projects.projects_handlers.delete_data_folders_of_project', return_value=None) + mock1 = mocker.patch( + "simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project", + return_value=Future(), + ) mock1.return_value.set_result("") return mock, mock1 + # Tests CRUD operations -------------------------------------------- # TODO: merge both unit/with_postgress/test_projects + async def _request_list(client) -> List[Dict]: # GET /v0/projects url = client.app.router["list_projects"].url_for() @@ -164,6 +184,7 @@ async def _request_list(client) -> List[Dict]: return projects + async def _request_get(client, pid) -> Dict: url = client.app.router["get_project"].url_for(project_id=pid) resp = await client.get(url) @@ -172,6 +193,7 @@ async def _request_get(client, pid) -> Dict: return project + async def _request_create(client, project): url = client.app.router["create_projects"].url_for() resp = await client.post(url, json=project) @@ -180,6 +202,7 @@ async def _request_create(client, project): return new_project + async def _request_update(client, project, pid): # PUT /v0/projects/{project_id} url = client.app.router["replace_project"].url_for(project_id=pid) @@ -189,6 +212,7 @@ async def _request_update(client, project, pid): return updated_project + async def _request_delete(client, pid): url = client.app.router["delete_project"].url_for(project_id=pid) resp = await client.delete(url) @@ -196,8 +220,13 @@ async def _request_delete(client, pid): await assert_status(resp, web.HTTPNoContent) - -async def test_workflow(client, fake_project_data, logged_user, computational_system_mock, storage_subsystem_mock): +async def test_workflow( + client, + fake_project_data, + logged_user, + computational_system_mock, + storage_subsystem_mock, +): # empty list projects = await _request_list(client) assert not projects @@ -209,13 +238,15 @@ async def test_workflow(client, fake_project_data, logged_user, computational_sy projects = await _request_list(client) assert len(projects) == 1 for key in projects[0].keys(): - if key not in ('uuid', 'prjOwner', 'creationDate', 'lastChangeDate'): + if key not in ("uuid", "prjOwner", "creationDate", "lastChangeDate"): assert projects[0][key] == fake_project_data[key] modified_project = deepcopy(projects[0]) modified_project["name"] = "some other name" modified_project["description"] = "John Raynor killed Kerrigan" - modified_project["workbench"]["ReNamed"] = modified_project["workbench"].pop( list(modified_project["workbench"].keys())[0] ) + modified_project["workbench"]["ReNamed"] = modified_project["workbench"].pop( + list(modified_project["workbench"].keys())[0] + ) modified_project["workbench"]["ReNamed"]["position"]["x"] = 0 # modify pid = modified_project["uuid"] @@ -226,13 +257,13 @@ async def test_workflow(client, fake_project_data, logged_user, computational_sy assert len(projects) == 1 for key in projects[0].keys(): - if key not in ('lastChangeDate', ): + if key not in ("lastChangeDate",): assert projects[0][key] == modified_project[key] # get project = await _request_get(client, pid) for key in project.keys(): - if key not in ('lastChangeDate', ): + if key not in ("lastChangeDate",): assert project[key] == modified_project[key] # delete @@ -270,10 +301,13 @@ async def test_delete_invalid_project(client, logged_user): await assert_status(resp, web.HTTPNotFound) -async def test_list_template_projects(client, logged_user, fake_db, +async def test_list_template_projects( + client, + logged_user, + fake_db, fake_template_projects, fake_template_projects_isan, - fake_template_projects_osparc + fake_template_projects_osparc, ): fake_db.load_template_projects() url = client.app.router["list_projects"].url_for() @@ -282,6 +316,8 @@ async def test_list_template_projects(client, logged_user, fake_db, projects, _ = await assert_status(resp, web.HTTPOk) # fake-template-projects.json + fake-template-projects.isan.json + fake-template-projects.osparc.json - assert len(projects) == (len(fake_template_projects) + \ - len(fake_template_projects_isan) + \ - len(fake_template_projects_osparc)) + assert len(projects) == ( + len(fake_template_projects) + + len(fake_template_projects_isan) + + len(fake_template_projects_osparc) + ) diff --git a/services/web/server/tests/sandbox/TODO - integration-proxy/conftest.py b/services/web/server/tests/sandbox/TODO - integration-proxy/conftest.py index 87a6717d978..709aa2aa64b 100644 --- a/services/web/server/tests/sandbox/TODO - integration-proxy/conftest.py +++ b/services/web/server/tests/sandbox/TODO - integration-proxy/conftest.py @@ -1,4 +1,3 @@ - """ Tests reverse proxy within an environment having a selection of core and tool services running in a swarm """ @@ -37,15 +36,9 @@ MAX_BOOT_TIME_SECS = 20 # Selection of core and tool services started in this swarm fixture (integration) -core_services = [ - 'director', - '' -] +core_services = ["director", ""] -ops_services = [ - 'adminer', - 'portainer' -] +ops_services = ["adminer", "portainer"] @pytest.fixture(scope="session") @@ -53,11 +46,13 @@ def here() -> Path: return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def osparc_simcore_root_dir(here) -> Path: root_dir = here.parent.parent.parent.parent.parent.resolve() assert root_dir.exists(), "Is this service within osparc-simcore repo?" - assert any(root_dir.glob("services/web/server")), "%s not look like rootdir" % root_dir + assert any(root_dir.glob("services/web/server")), ( + "%s not look like rootdir" % root_dir + ) return root_dir @@ -75,21 +70,25 @@ def _load_docker_compose(docker_compose_path) -> Dict[str, str]: content = yaml.safe_load(f) return content + @pytest.fixture("session") def services_docker_compose(osparc_simcore_root_dir) -> Dict[str, str]: docker_compose_path = osparc_simcore_root_dir / "services" / "docker-compose.yml" return _load_docker_compose(docker_compose_path) + @pytest.fixture("session") def ops_docker_compose(osparc_simcore_root_dir) -> Dict[str, str]: - docker_compose_path = osparc_simcore_root_dir / "services" / "docker-compose-ops.yml" + docker_compose_path = ( + osparc_simcore_root_dir / "services" / "docker-compose-ops.yml" + ) return _load_docker_compose(docker_compose_path) @pytest.fixture("session") def devel_environ(env_devel_file) -> Dict[str, str]: """ Environ dict from .env-devel """ - PATTERN_ENVIRON_EQUAL= re.compile(r"^(\w+)=(.*)$") + PATTERN_ENVIRON_EQUAL = re.compile(r"^(\w+)=(.*)$") env_devel = {} with env_devel_file.open() as f: for line in f: @@ -107,9 +106,11 @@ def webserver_environ(devel_environ, services_docker_compose) -> Dict[str, str]: """ Environment variables for the webserver application """ - dockerfile_environ = {'SIMCORE_WEB_OUTDIR': "undefined" } # TODO: parse webserver dockerfile ?? + dockerfile_environ = { + "SIMCORE_WEB_OUTDIR": "undefined" + } # TODO: parse webserver dockerfile ?? - service = services_docker_compose['services']['webserver'] + service = services_docker_compose["services"]["webserver"] docker_compose_environ = resolve_environ(service, devel_environ) environ = {} @@ -123,12 +124,15 @@ def webserver_environ(devel_environ, services_docker_compose) -> Dict[str, str]: # the test webserver is built-up in webserver_service fixture that runs # on the host. for name in core_services: - environ['%s_HOST' % name.upper()] = '127.0.0.1' - environ['%s_PORT' % name.upper()] = \ - services_docker_compose['services'][name]['ports'][0].split(':')[ - 0] # takes port exposed + environ["%s_HOST" % name.upper()] = "127.0.0.1" + environ["%s_PORT" % name.upper()] = services_docker_compose["services"][name][ + "ports" + ][0].split(":")[ + 0 + ] # takes port exposed # to swarm boundary since webserver is installed in the host and therefore outside the swarm's network from pprint import pprint + pprint(environ) return environ @@ -137,14 +141,15 @@ def webserver_environ(devel_environ, services_docker_compose) -> Dict[str, str]: @pytest.fixture def app_config(here, webserver_environ) -> Dict: config_file_path = here / "config.yaml" + def _recreate_config_file(): with app_resources.stream("config/server-docker-dev.yaml") as f: cfg = yaml.safe_load(f) # test webserver works in host - cfg["main"]['host'] = '127.0.0.1' + cfg["main"]["host"] = "127.0.0.1" cfg["director"]["host"] = "127.0.0.1" - with config_file_path.open('wt') as f: + with config_file_path.open("wt") as f: yaml.dump(cfg, f, default_flow_style=False) _recreate_config_file() @@ -154,10 +159,14 @@ def _recreate_config_file(): # Emulates cli config_environ = {} config_environ.update(webserver_environ) - config_environ.update( create_environ(skip_host_environ=True) ) # TODO: can be done monkeypathcing os.environ and calling create_environ as well + config_environ.update( + create_environ(skip_host_environ=True) + ) # TODO: can be done monkeypathcing os.environ and calling create_environ as well # validates - cfg_dict = trafaret_config.read_and_validate(config_file_path, app_schema, vars=config_environ) + cfg_dict = trafaret_config.read_and_validate( + config_file_path, app_schema, vars=config_environ + ) yield cfg_dict @@ -166,17 +175,18 @@ def _recreate_config_file(): config_file_path.unlink() - # DOCKER STACK ------------------------------------------- -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def docker_compose_file(here, services_docker_compose, devel_environ): """ Overrides pytest-docker fixture """ - docker_compose_path = here / 'docker-compose.yml' + docker_compose_path = here / "docker-compose.yml" # creates a docker-compose file only with SERVICES and replaces environ - _recreate_compose_file(core_services, services_docker_compose, docker_compose_path, devel_environ) + _recreate_compose_file( + core_services, services_docker_compose, docker_compose_path, devel_environ + ) logger.info(get_content_formatted(docker_compose_path)) @@ -186,29 +196,32 @@ def docker_compose_file(here, services_docker_compose, devel_environ): docker_compose_path.unlink() - -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def docker_client(): client = docker.from_env() yield client -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def docker_swarm(docker_client): docker_client.swarm.init() yield assert docker_client.swarm.leave(force=True) == True -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def docker_stack(docker_swarm, docker_client, docker_compose_file: Path): """ """ - assert subprocess.run( + assert ( + subprocess.run( "docker stack deploy -c {} services".format(docker_compose_file.name), shell=True, - cwd=docker_compose_file.parent - ).returncode == 0 + cwd=docker_compose_file.parent, + ).returncode + == 0 + ) # NOTE: # ``failed to create service services_apihub: Error response from daemon: network services_default not found``` # workaround is to restart daemon: ``sudo systemctl restart docker``` @@ -224,7 +237,6 @@ def docker_stack(docker_swarm, docker_client, docker_compose_file: Path): assert subprocess.run("docker stack rm services", shell=True).returncode == 0 - # CORE SERVICES --------------------------------------------- # @pytest.fixture(scope='session') # def director_service(docker_services, docker_ip): @@ -235,16 +247,15 @@ def docker_stack(docker_swarm, docker_client, docker_compose_file: Path): # return docker_ip, docker_services.port_for('director', 8001) - - # HELPERS --------------------------------------------- # TODO: should be reused integration-* + def get_content_formatted(textfile: Path) -> str: return "{:=^10s}\n{}\n{:=^10s}".format( - str(textfile), - textfile.read_text("utf8"), - '') + str(textfile), textfile.read_text("utf8"), "" + ) + def resolve_environ(service, environ): _environs = {} @@ -262,21 +273,23 @@ def resolve_environ(service, environ): return _environs -def _recreate_compose_file(keep, services_compose, docker_compose_path: Path, devel_environ): +def _recreate_compose_file( + keep, services_compose, docker_compose_path: Path, devel_environ +): # reads service/docker-compose.yml content = deepcopy(services_compose) # remove unnecessary services - remove = [name for name in content['services'] if name not in keep] + remove = [name for name in content["services"] if name not in keep] for name in remove: - content['services'].pop(name, None) + content["services"].pop(name, None) for name in keep: - service = content['services'][name] + service = content["services"][name] # remove builds if "build" in service: service.pop("build", None) - service['image'] = "services_{}:latest".format(name) + service["image"] = "services_{}:latest".format(name) # replaces environs if "environment" in service: _environs = {} @@ -285,7 +298,9 @@ def _recreate_compose_file(keep, services_compose, docker_compose_path: Path, de if value.startswith("${") and value.endswith("}"): value = devel_environ.get(value[2:-1], value) _environs[key] = value - service["environment"] = [ "{}={}".format(k,v) for k,v in _environs.items() ] + service["environment"] = [ + "{}={}".format(k, v) for k, v in _environs.items() + ] # updates current docker-compose (also versioned ... do not change by hand) - with docker_compose_path.open('wt') as f: + with docker_compose_path.open("wt") as f: yaml.dump(content, f, default_flow_style=False) diff --git a/services/web/server/tests/sandbox/TODO - integration-proxy/test_application_proxy.py b/services/web/server/tests/sandbox/TODO - integration-proxy/test_application_proxy.py index bbd65aeedac..8e41d7aa844 100644 --- a/services/web/server/tests/sandbox/TODO - integration-proxy/test_application_proxy.py +++ b/services/web/server/tests/sandbox/TODO - integration-proxy/test_application_proxy.py @@ -15,28 +15,35 @@ import simcore_service_webserver.reverse_proxy.handlers.paraview as rp_paraview from servicelib.application import create_safe_application from servicelib.rest_responses import unwrap_envelope -from simcore_service_webserver.application import (setup_app_proxy, - setup_director, setup_rest) +from simcore_service_webserver.application import ( + setup_app_proxy, + setup_director, + setup_rest, +) from simcore_service_webserver.reverse_proxy.settings import PROXY_MOUNTPOINT -API_VERSION = 'v0' +API_VERSION = "v0" + + @pytest.fixture -#def webserver_service(loop, app_config, director_service, aiohttp_unused_port, aiohttp_server, here): -#def webserver_service(loop, app_config, aiohttp_unused_port, aiohttp_server, here): -def webserver_service(docker_stack, loop, app_config, aiohttp_unused_port, aiohttp_server, here): +# def webserver_service(loop, app_config, director_service, aiohttp_unused_port, aiohttp_server, here): +# def webserver_service(loop, app_config, aiohttp_unused_port, aiohttp_server, here): +def webserver_service( + docker_stack, loop, app_config, aiohttp_unused_port, aiohttp_server, here +): # OVERRIDES app_config: # - server lives with the testing framework - port = app_config['main']['port'] = aiohttp_unused_port() - host = app_config['main']['host'] = '127.0.0.1' + port = app_config["main"]["port"] = aiohttp_unused_port() + host = app_config["main"]["host"] = "127.0.0.1" # - disable some subsystems - app_config['rabbit']['enabled'] = False - app_config['db']['enabled'] = False - app_config['storage']['enabled'] = False + app_config["rabbit"]["enabled"] = False + app_config["db"]["enabled"] = False + app_config["storage"]["enabled"] = False # TODO: parse_and_validate config_app_path = here / "config.app.yaml" - with (config_app_path).open('wt') as f: + with (config_app_path).open("wt") as f: yaml.dump(app_config, f, default_flow_style=False) # app @@ -44,7 +51,7 @@ def webserver_service(docker_stack, loop, app_config, aiohttp_unused_port, aioht setup_rest(app) setup_director(app, disable_login=True) - setup_app_proxy(app) # <-----------|UNDER TEST + setup_app_proxy(app) # <-----------|UNDER TEST server = loop.run_until_complete(aiohttp_server(app, port=port)) @@ -52,25 +59,32 @@ def webserver_service(docker_stack, loop, app_config, aiohttp_unused_port, aioht config_app_path.unlink() + @pytest.fixture -def client(loop, webserver_service, aiohttp_client): +def client(loop, webserver_service, aiohttp_client): """ webserver's API client """ - client = loop.run_until_complete(aiohttp_client(webserver_service) ) + client = loop.run_until_complete(aiohttp_client(webserver_service)) return client # TESTS ---------------------------------------------------------------------------- # + [(service_key, "????", "NJKfISIRB-%d"%i) for i, service_key in enumerate(rp_jupyter.SUPPORTED_IMAGE_NAME)] -@pytest.mark.parametrize("service_key,service_version,service_uuid", [ - (rp_jupyter.SUPPORTED_IMAGE_NAME[0], "1.7.0", "NJKfISIRB"), - ("simcore/services/dynamic/raw-graphs", "2.8.0", "4J6GoxSNL"), - ("simcore/services/dynamic/modeler/webserver", "2.7.0", "4k4zZL90S"), - #(rp_paraview.SUPPORTED_IMAGE_NAME, "1.0.5", "EkE7LSU0r"), - ]) -async def test_reverse_proxy_workflow(client, service_key, service_version, service_uuid): + +@pytest.mark.parametrize( + "service_key,service_version,service_uuid", + [ + (rp_jupyter.SUPPORTED_IMAGE_NAME[0], "1.7.0", "NJKfISIRB"), + ("simcore/services/dynamic/raw-graphs", "2.8.0", "4J6GoxSNL"), + ("simcore/services/dynamic/modeler/webserver", "2.7.0", "4k4zZL90S"), + # (rp_paraview.SUPPORTED_IMAGE_NAME, "1.0.5", "EkE7LSU0r"), + ], +) +async def test_reverse_proxy_workflow( + client, service_key, service_version, service_uuid +): """ client <--> webserver <--> director @@ -79,59 +93,68 @@ async def test_reverse_proxy_workflow(client, service_key, service_version, serv - Tests webserserver.reverser proxy subsystem as well """ # List services in registry ------------------------------------------------ - resp = await client.get("/"+API_VERSION+"/services?service_type=interactive") - assert resp.status == 200, (await resp.text()) + resp = await client.get("/" + API_VERSION + "/services?service_type=interactive") + assert resp.status == 200, await resp.text() payload = await resp.json() data, error = unwrap_envelope(payload) assert data assert not error - assert any(srv['key']==service_key and srv['version']==service_version for srv in data), \ - "version of service NOT listed in registry" + assert any( + srv["key"] == service_key and srv["version"] == service_version for srv in data + ), "version of service NOT listed in registry" # Start backend dynamic service ------------------------------------------------ - resp = await client.post( URL("/"+API_VERSION+"/running_interactive_services").with_query( - service_key=service_key, - service_version =service_version, - service_uuid = service_uuid) + resp = await client.post( + URL("/" + API_VERSION + "/running_interactive_services").with_query( + service_key=service_key, + service_version=service_version, + service_uuid=service_uuid, + ) ) - assert resp.status == 201, (await resp.text()) + assert resp.status == 201, await resp.text() payload = await resp.json() data, error = unwrap_envelope(payload) assert data assert not error - service_basepath = data['service_basepath'] + service_basepath = data["service_basepath"] assert service_basepath == PROXY_MOUNTPOINT + "/" + service_uuid # Wait until service is responsive---------------------------------------------- - #TODO: all dynamic services boot time should be bounded!! + # TODO: all dynamic services boot time should be bounded!! WAIT_FIXED_SECS = 5 MAX_TRIALS = 5 count = 0 - while count>> msg: %s', pprint.pformat(msg)) + logger.info(">>> msg: %s", pprint.pformat(msg)) mt = msg.type md = msg.data if mt == aiohttp.WSMsgType.TEXT: @@ -46,27 +47,29 @@ async def ws_forward(ws_from, ws_to): await ws_to.close(code=ws_to.close_code, message=msg.extra) else: raise ValueError( - 'unexpected message type: %s' % pprint.pformat(msg)) + "unexpected message type: %s" % pprint.pformat(msg) + ) - await asyncio.wait([ws_forward(ws_server, ws_client), ws_forward(ws_client, ws_server)], return_when=asyncio.FIRST_COMPLETED) + await asyncio.wait( + [ws_forward(ws_server, ws_client), ws_forward(ws_client, ws_server)], + return_when=asyncio.FIRST_COMPLETED, + ) return ws_server else: async with client.request( - req.method, baseUrl+mountPoint+proxyPath, + req.method, + baseUrl + mountPoint + proxyPath, headers=reqH, allow_redirects=False, - data=await req.read() + data=await req.read(), ) as res: headers = res.headers.copy() body = await res.read() - return web.Response( - headers=headers, - status=res.status, - body=body - ) + return web.Response(headers=headers, status=res.status, body=body) return ws_server + app = web.Application() -app.router.add_route('*', mountPoint + '{proxyPath:.*}', handler) +app.router.add_route("*", mountPoint + "{proxyPath:.*}", handler) web.run_app(app, port=3984) diff --git a/services/web/server/tests/sandbox/paraview-proxy.py b/services/web/server/tests/sandbox/paraview-proxy.py index 3dc6de60a0d..1b5651789f7 100644 --- a/services/web/server/tests/sandbox/paraview-proxy.py +++ b/services/web/server/tests/sandbox/paraview-proxy.py @@ -9,29 +9,30 @@ logger = logging.getLogger(__name__) -baseUrl = 'http://0.0.0.0:8080' -mountPoint = '/fakeUuid' +baseUrl = "http://0.0.0.0:8080" +mountPoint = "/fakeUuid" async def handler(req): - proxyPath = req.match_info.get( - 'proxyPath', 'no proxyPath placeholder defined') + proxyPath = req.match_info.get("proxyPath", "no proxyPath placeholder defined") reqH = req.headers.copy() - if reqH['connection'].lower() == 'upgrade' and reqH['upgrade'].lower() == 'websocket' and req.method == 'GET': + if ( + reqH["connection"].lower() == "upgrade" + and reqH["upgrade"].lower() == "websocket" + and req.method == "GET" + ): ws_server = web.WebSocketResponse() await ws_server.prepare(req) - logger.info('##### WS_SERVER %s', pprint.pformat(ws_server)) + logger.info("##### WS_SERVER %s", pprint.pformat(ws_server)) client_session = aiohttp.ClientSession(cookies=req.cookies) - async with client_session.ws_connect( - baseUrl+proxyPath, - ) as ws_client: - logger.info('##### WS_CLIENT %s', pprint.pformat(ws_client)) + async with client_session.ws_connect(baseUrl + proxyPath,) as ws_client: + logger.info("##### WS_CLIENT %s", pprint.pformat(ws_client)) async def ws_forward(ws_from, ws_to): async for msg in ws_from: - #logger.info('>>> msg: %s',pprint.pformat(msg)) + # logger.info('>>> msg: %s',pprint.pformat(msg)) mt = msg.type md = msg.data if mt == aiohttp.WSMsgType.TEXT: @@ -46,34 +47,36 @@ async def ws_forward(ws_from, ws_to): await ws_to.close(code=ws_to.close_code, message=msg.extra) else: raise ValueError( - 'unexpected message type: %s' % pprint.pformat(msg)) + "unexpected message type: %s" % pprint.pformat(msg) + ) - await asyncio.wait([ws_forward(ws_server, ws_client), ws_forward(ws_client, ws_server)], return_when=asyncio.FIRST_COMPLETED) + await asyncio.wait( + [ws_forward(ws_server, ws_client), ws_forward(ws_client, ws_server)], + return_when=asyncio.FIRST_COMPLETED, + ) return ws_server else: async with client.request( - req.method, baseUrl+proxyPath, + req.method, + baseUrl + proxyPath, headers=reqH, allow_redirects=False, - data=await req.read() + data=await req.read(), ) as res: headers = res.headers.copy() - del headers['content-length'] + del headers["content-length"] body = await res.read() - if proxyPath == '/Visualizer.js': - body = body.replace(b'"/ws"', b'"%s/ws"' % - mountPoint.encode(), 1) + if proxyPath == "/Visualizer.js": + body = body.replace(b'"/ws"', b'"%s/ws"' % mountPoint.encode(), 1) body = body.replace( - b'"/paraview/"', b'"%s/paraview/"' % mountPoint.encode(), 1) + b'"/paraview/"', b'"%s/paraview/"' % mountPoint.encode(), 1 + ) logger.info("fixed Visualizer.js paths on the fly") - return web.Response( - headers=headers, - status=res.status, - body=body - ) + return web.Response(headers=headers, status=res.status, body=body) return ws_server + app = web.Application() -app.router.add_route('*', mountPoint + '{proxyPath:.*}', handler) +app.router.add_route("*", mountPoint + "{proxyPath:.*}", handler) web.run_app(app, port=3985) diff --git a/services/web/server/tests/sandbox/reverse_proxy.py b/services/web/server/tests/sandbox/reverse_proxy.py index f9c479334ef..2b147a641c0 100644 --- a/services/web/server/tests/sandbox/reverse_proxy.py +++ b/services/web/server/tests/sandbox/reverse_proxy.py @@ -16,11 +16,8 @@ from simcore_service_webserver.reverse_proxy import APP_SOCKETS_KEY if __name__ == "__main__": - BASE_URL = 'http://0.0.0.0:8888' - MOUNT_POINT = '/x/12345' - - - + BASE_URL = "http://0.0.0.0:8888" + MOUNT_POINT = "/x/12345" def adapter(req: web.Request): return rp_handlers.generic.handler(req, service_url=BASE_URL) @@ -28,5 +25,5 @@ def adapter(req: web.Request): app = web.Application() app[APP_SOCKETS_KEY] = list() - app.router.add_route('*', MOUNT_POINT + '/{proxyPath:.*}', adapter) + app.router.add_route("*", MOUNT_POINT + "/{proxyPath:.*}", adapter) web.run_app(app, port=3984) diff --git a/services/web/server/tests/unit/conftest.py b/services/web/server/tests/unit/conftest.py index 1353d4c9705..d68411e366a 100644 --- a/services/web/server/tests/unit/conftest.py +++ b/services/web/server/tests/unit/conftest.py @@ -28,14 +28,14 @@ log = logging.getLogger(__name__) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def here(): cdir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent assert cdir == current_dir, "Somebody changing current_dir?" return cdir -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def fake_static_dir(fake_data_dir: Path) -> Dict: return fake_data_dir / "static" @@ -45,10 +45,12 @@ def fake_project(fake_data_dir: Path) -> Dict: with (fake_data_dir / "fake-project.json").open() as fp: yield json.load(fp) + @pytest.fixture def api_version_prefix() -> str: return "v0" + @pytest.fixture def empty_project(): def create(): @@ -60,15 +62,18 @@ def create(): "creationDate": now_str(), "lastChangeDate": now_str(), "thumbnail": "", - "workbench": {} + "workbench": {}, } return empty_project + return create @pytest.fixture def project_schema_file(api_version_prefix) -> Path: - prj_schema_path = resources.get_path(f"api/{api_version_prefix}/schemas/project-v0.0.1.json") + prj_schema_path = resources.get_path( + f"api/{api_version_prefix}/schemas/project-v0.0.1.json" + ) assert prj_schema_path.exists() return prj_schema_path @@ -78,7 +83,8 @@ def activity_data(fake_data_dir: Path) -> Dict: with (fake_data_dir / "test_activity_data.json").open() as fp: yield json.load(fp) + @pytest.fixture def test_tags_data(fake_data_dir: Path) -> Dict: - with (fake_data_dir / 'test_tags_data.json').open() as fp: - yield json.load(fp).get('added_tags') + with (fake_data_dir / "test_tags_data.json").open() as fp: + yield json.load(fp).get("added_tags") diff --git a/services/web/server/tests/unit/test_activity.py b/services/web/server/tests/unit/test_activity.py index a03d21cb101..79068d489c3 100644 --- a/services/web/server/tests/unit/test_activity.py +++ b/services/web/server/tests/unit/test_activity.py @@ -28,49 +28,65 @@ def future_with_result(result): @pytest.fixture def mocked_login_required(mocker): mock = mocker.patch( - 'simcore_service_webserver.login.decorators.login_required', - lambda h: h) + "simcore_service_webserver.login.decorators.login_required", lambda h: h + ) importlib.reload(handlers) return mock + @pytest.fixture def mocked_monitoring(loop, mocker, activity_data): - prometheus_data = activity_data.get('prometheus') - cpu_ret = prometheus_data.get('cpu_return') - mocker.patch('simcore_service_webserver.activity.handlers.get_cpu_usage', - return_value=future_with_result(cpu_ret)) + prometheus_data = activity_data.get("prometheus") + cpu_ret = prometheus_data.get("cpu_return") + mocker.patch( + "simcore_service_webserver.activity.handlers.get_cpu_usage", + return_value=future_with_result(cpu_ret), + ) - mem_ret = prometheus_data.get('memory_return') - mocker.patch('simcore_service_webserver.activity.handlers.get_memory_usage', - return_value=future_with_result(mem_ret)) + mem_ret = prometheus_data.get("memory_return") + mocker.patch( + "simcore_service_webserver.activity.handlers.get_memory_usage", + return_value=future_with_result(mem_ret), + ) + + labels_ret = prometheus_data.get("labels_return") + mocker.patch( + "simcore_service_webserver.activity.handlers.get_container_metric_for_labels", + return_value=future_with_result(labels_ret), + ) - labels_ret = prometheus_data.get('labels_return') - mocker.patch('simcore_service_webserver.activity.handlers.get_container_metric_for_labels', - return_value=future_with_result(labels_ret)) + celery_data = activity_data.get("celery") + celery_ret = celery_data.get("celery_return") + mocker.patch( + "simcore_service_webserver.activity.handlers.get_celery_reserved", + return_value=future_with_result(celery_ret), + ) - celery_data = activity_data.get('celery') - celery_ret = celery_data.get('celery_return') - mocker.patch('simcore_service_webserver.activity.handlers.get_celery_reserved', - return_value=future_with_result(celery_ret)) @pytest.fixture def mocked_monitoring_down(mocker): mocker.patch( - 'simcore_service_webserver.activity.handlers.query_prometheus', - side_effect=ClientConnectionError) + "simcore_service_webserver.activity.handlers.query_prometheus", + side_effect=ClientConnectionError, + ) mocker.patch( - 'simcore_service_webserver.activity.handlers.celery_reserved', - side_effect=ClientConnectionError) + "simcore_service_webserver.activity.handlers.celery_reserved", + side_effect=ClientConnectionError, + ) return mocker + @pytest.fixture def app_config(fake_data_dir: Path, osparc_simcore_root_dir: Path): - with open(fake_data_dir/"test_activity_config.yml") as fh: + with open(fake_data_dir / "test_activity_config.yml") as fh: content = fh.read() - config = content.replace("${OSPARC_SIMCORE_REPO_ROOTDIR}", str(osparc_simcore_root_dir)) + config = content.replace( + "${OSPARC_SIMCORE_REPO_ROOTDIR}", str(osparc_simcore_root_dir) + ) return yaml.safe_load(config) + @pytest.fixture def client(loop, aiohttp_client, app_config): app = create_safe_application(app_config) @@ -85,35 +101,37 @@ def client(loop, aiohttp_client, app_config): async def test_has_login_required(client): - resp = await client.get('/v0/activity/status') + resp = await client.get("/v0/activity/status") await assert_status(resp, web.HTTPUnauthorized) + async def test_monitoring_up(mocked_login_required, mocked_monitoring, client): - QUEUED_NODE_ID = '35f95ad4-67b8-4ed8-bd55-84a5d600e687' - RUNNING_NODE_ID = '894dd8d5-de3b-4767-950c-7c3ed8f51d8c' + QUEUED_NODE_ID = "35f95ad4-67b8-4ed8-bd55-84a5d600e687" + RUNNING_NODE_ID = "894dd8d5-de3b-4767-950c-7c3ed8f51d8c" - resp = await client.get('/v0/activity/status') + resp = await client.get("/v0/activity/status") data, _ = await assert_status(resp, web.HTTPOk) - assert QUEUED_NODE_ID in data, 'Queued node not present' - assert RUNNING_NODE_ID in data, 'Running node not present' + assert QUEUED_NODE_ID in data, "Queued node not present" + assert RUNNING_NODE_ID in data, "Running node not present" celery = data.get(QUEUED_NODE_ID) prometheus = data.get(RUNNING_NODE_ID) - assert 'queued' in celery, 'There is no queued key for queued node' - assert celery.get('queued'), 'Queued should be True for queued node' + assert "queued" in celery, "There is no queued key for queued node" + assert celery.get("queued"), "Queued should be True for queued node" + + assert "limits" in prometheus, "There is no limits key for executing node" + assert "stats" in prometheus, "There is no stats key for executed node" - assert 'limits' in prometheus, 'There is no limits key for executing node' - assert 'stats' in prometheus, 'There is no stats key for executed node' + limits = prometheus.get("limits") + assert limits.get("cpus") == 4.0, "Incorrect value: Cpu limit" + assert limits.get("mem") == 2048.0, "Incorrect value: Memory limit" - limits = prometheus.get('limits') - assert limits.get('cpus') == 4.0, 'Incorrect value: Cpu limit' - assert limits.get('mem') == 2048.0, 'Incorrect value: Memory limit' + stats = prometheus.get("stats") + assert stats.get("cpuUsage") == 3.9952102200000006, "Incorrect value: Cpu usage" + assert stats.get("memUsage") == 177.664, "Incorrect value: Memory usage" - stats = prometheus.get('stats') - assert stats.get('cpuUsage') == 3.9952102200000006, 'Incorrect value: Cpu usage' - assert stats.get('memUsage') == 177.664, 'Incorrect value: Memory usage' async def test_monitoring_down(mocked_login_required, mocked_monitoring_down, client): - resp = await client.get('/v0/activity/status') + resp = await client.get("/v0/activity/status") await assert_status(resp, web.HTTPNoContent) diff --git a/services/web/server/tests/unit/test_configs.py b/services/web/server/tests/unit/test_configs.py index bb8919f5f8b..e3fb1a7c363 100644 --- a/services/web/server/tests/unit/test_configs.py +++ b/services/web/server/tests/unit/test_configs.py @@ -18,16 +18,19 @@ from simcore_service_webserver.application_config import create_schema from simcore_service_webserver.cli import parse, setup_parser from simcore_service_webserver.login import APP_CONFIG_KEY -from simcore_service_webserver.login import \ - CONFIG_SECTION_NAME as LOGIN_SECTION -from simcore_service_webserver.login import (DB_SECTION, SMTP_SECTION, - _create_login_config) +from simcore_service_webserver.login import CONFIG_SECTION_NAME as LOGIN_SECTION +from simcore_service_webserver.login import ( + DB_SECTION, + SMTP_SECTION, + _create_login_config, +) from simcore_service_webserver.login.cfg import DEFAULTS as CONFIG_DEFAULTS from simcore_service_webserver.login.cfg import Cfg from simcore_service_webserver.resources import resources from utils_environs import eval_service_environ, load_env -config_yaml_filenames = [str(name) for name in resources.listdir("config") ] +config_yaml_filenames = [str(name) for name in resources.listdir("config")] + @pytest.fixture("session") def app_config_schema(): @@ -55,39 +58,50 @@ def devel_environ(env_devel_file): env_devel = load_env(f) return env_devel + @pytest.fixture("session") -def service_webserver_environ(services_docker_compose_file, devel_environ, osparc_simcore_root_dir): +def service_webserver_environ( + services_docker_compose_file, devel_environ, osparc_simcore_root_dir +): """ Creates a dict with the environment variables inside of a webserver container """ host_environ = devel_environ image_environ = { - 'SIMCORE_WEB_OUTDIR': 'home/scu/services/web/client', # defined in Dockerfile - 'OSPARC_SIMCORE_REPO_ROOTDIR': str(osparc_simcore_root_dir) # defined if pip install --edit (but not in travis!) + "SIMCORE_WEB_OUTDIR": "home/scu/services/web/client", # defined in Dockerfile + "OSPARC_SIMCORE_REPO_ROOTDIR": str( + osparc_simcore_root_dir + ), # defined if pip install --edit (but not in travis!) } - webserver_environ = eval_service_environ(services_docker_compose_file, "webserver", - host_environ, image_environ, use_env_devel=True) + webserver_environ = eval_service_environ( + services_docker_compose_file, + "webserver", + host_environ, + image_environ, + use_env_devel=True, + ) return webserver_environ - @pytest.fixture("session") def app_submodules_with_setup_funs(package_dir) -> List: """ subsystem = all modules in package with a setup function """ + def is_py_module(path: Path) -> bool: - return not path.name.startswith((".", "__")) and \ - ( path.suffix == ".py" or any(path.glob("__init__.py")) ) + return not path.name.startswith((".", "__")) and ( + path.suffix == ".py" or any(path.glob("__init__.py")) + ) modules = [] for path in package_dir.iterdir(): if is_py_module(path): name = path.name.replace(path.suffix, "") module = importlib.import_module("." + name, package_dir.name) - if module.__name__ != 'simcore_service_webserver.application': + if module.__name__ != "simcore_service_webserver.application": if any(inspect.getmembers(module, is_setup_function)): modules.append(module) @@ -102,32 +116,36 @@ def app_subsystems(app_submodules_with_setup_funs) -> List[Dict]: setup_members = inspect.getmembers(module, is_setup_function) if setup_members: # finds setup for module - module_name = module.__name__.replace(".__init__", '') + module_name = module.__name__.replace(".__init__", "") setup_fun = None for name, fun in setup_members: - if fun.metadata()['module_name'] == module_name: + if fun.metadata()["module_name"] == module_name: setup_fun = fun break - assert setup_fun, f"None of {setup_members} are setup funs for {module_name}" + assert ( + setup_fun + ), f"None of {setup_members} are setup funs for {module_name}" metadata.append(setup_fun.metadata()) return metadata - # TESTS ---------------------------------------------------------------------- + @pytest.mark.parametrize("configfile", config_yaml_filenames) def test_correctness_under_environ(configfile, service_webserver_environ): parser = setup_parser(argparse.ArgumentParser("test-parser")) - with mock.patch('os.environ', service_webserver_environ): + with mock.patch("os.environ", service_webserver_environ): cmd = ["-c", configfile] config = parse(cmd, parser) for key, value in config.items(): - assert value != 'None', "Use instead Null in {} for {}".format(configfile, key) + assert value != "None", "Use instead Null in {} for {}".format( + configfile, key + ) # adds some defaults checks here @@ -137,14 +155,16 @@ def test_setup_per_app_subsystem(app_submodules_with_setup_funs): setup_members = inspect.getmembers(module, is_setup_function) if setup_members: # finds setup for module - module_name = module.__name__.replace(".__init__", '') + module_name = module.__name__.replace(".__init__", "") setup_fun = None for name, fun in setup_members: - if fun.metadata()['module_name'] == module_name: + if fun.metadata()["module_name"] == module_name: setup_fun = fun break - assert setup_fun, f"None of {setup_members} are setup funs for {module_name}" + assert ( + setup_fun + ), f"None of {setup_members} are setup funs for {module_name}" def test_schema_sections(app_config_schema, app_subsystems): @@ -153,7 +173,10 @@ def test_schema_sections(app_config_schema, app_subsystems): Every section in the config-file (except for 'version' and 'main') is named after an application's subsystem """ - section_names= [ metadata['config_section'] for metadata in app_subsystems] + ['version', 'main'] + section_names = [metadata["config_section"] for metadata in app_subsystems] + [ + "version", + "main", + ] for section in app_config_schema.keys: assert section.name in section_names, "Check application config schema!" @@ -163,27 +186,28 @@ def test_schema_sections(app_config_schema, app_subsystems): def test_creation_of_login_config(configfile, service_webserver_environ): parser = setup_parser(argparse.ArgumentParser("test-parser")) - with mock.patch('os.environ', service_webserver_environ): + with mock.patch("os.environ", service_webserver_environ): app_config = parse(["-c", configfile], parser) for key, value in app_config.items(): - assert value != 'None', "Use instead Null in {} for {}".format(configfile, key) + assert value != "None", "Use instead Null in {} for {}".format( + configfile, key + ) # sections of app config used assert LOGIN_SECTION in app_config.keys() assert SMTP_SECTION in app_config.keys() assert DB_SECTION in app_config.keys() - # creates update config - fake_app = { APP_CONFIG_KEY: app_config} + fake_app = {APP_CONFIG_KEY: app_config} fake_storage = object() update_cfg = _create_login_config(fake_app, fake_storage) assert all( - value.lower() is not ['none', 'null', ''] - for value in update_cfg.values() - if isinstance(value, str) + value.lower() is not ["none", "null", ""] + for value in update_cfg.values() + if isinstance(value, str) ) # creates login.cfg diff --git a/services/web/server/tests/unit/test_consistency.py b/services/web/server/tests/unit/test_consistency.py index 2cbbe6af4be..29d54146e85 100644 --- a/services/web/server/tests/unit/test_consistency.py +++ b/services/web/server/tests/unit/test_consistency.py @@ -5,13 +5,21 @@ def test_docker_composes_service_versions(osparc_simcore_root_dir: Path, here: Path): # look for main docker-compose file - main_docker_compose_path = osparc_simcore_root_dir / "services" / "docker-compose.yml" + main_docker_compose_path = ( + osparc_simcore_root_dir / "services" / "docker-compose.yml" + ) main_docker_compose_specs = yaml.safe_load(main_docker_compose_path.open()) - main_services_image_names = [service["image"] for _service_name, service in main_docker_compose_specs["services"].items()] + main_services_image_names = [ + service["image"] + for _service_name, service in main_docker_compose_specs["services"].items() + ] # look for other docker-compose files in test folders - for compose_file in here.glob('**/docker-compose.yml'): + for compose_file in here.glob("**/docker-compose.yml"): compose_specs = yaml.safe_load(compose_file.open()) - service_image_names = [service["image"] for _service_name, service in compose_specs["services"].items()] + service_image_names = [ + service["image"] + for _service_name, service in compose_specs["services"].items() + ] assert all(elem in main_services_image_names for elem in service_image_names) diff --git a/services/web/server/tests/unit/test_package.py b/services/web/server/tests/unit/test_package.py index 7939a7158ed..28d563a15cb 100644 --- a/services/web/server/tests/unit/test_package.py +++ b/services/web/server/tests/unit/test_package.py @@ -20,30 +20,32 @@ def pylintrc(osparc_simcore_root_dir): assert pylintrc.exists() return pylintrc + def test_run_pylint(pylintrc, package_dir): try: - AUTODETECT=0 - cmd = f'pylint --jobs={AUTODETECT} --rcfile {pylintrc} -v {package_dir}'.split() + AUTODETECT = 0 + cmd = f"pylint --jobs={AUTODETECT} --rcfile {pylintrc} -v {package_dir}".split() assert subprocess.check_call(cmd) == 0 except subprocess.CalledProcessError as err: pytest.fail("Linting error. Linter existed with code %d" % err.returncode) -def test_main(here): # pylint: disable=unused-variable +def test_main(here): # pylint: disable=unused-variable with pytest.raises(SystemExit) as excinfo: main("--help".split()) assert excinfo.value.code == 0 + def test_no_pdbs_in_place(package_dir): # TODO: add also test_dir excluding this function!? # TODO: it can be commented! - MATCH = re.compile(r'pdb.set_trace()') + MATCH = re.compile(r"pdb.set_trace()") EXCLUDE = ["__pycache__", ".git"] for root, dirs, files in os.walk(package_dir): for name in files: if name.endswith(".py"): - pypth = (Path(root) / name) + pypth = Path(root) / name code = pypth.read_text() found = MATCH.findall(code) assert not found, "pbd.set_trace found in %s" % pypth diff --git a/services/web/server/tests/unit/test_projects_models.py b/services/web/server/tests/unit/test_projects_models.py index 44315e463ee..2d8e58bfd08 100644 --- a/services/web/server/tests/unit/test_projects_models.py +++ b/services/web/server/tests/unit/test_projects_models.py @@ -10,30 +10,35 @@ import pytest -from simcore_service_webserver.projects.projects_db import (ProjectDBAPI, - _convert_to_db_names, - _convert_to_schema_names) +from simcore_service_webserver.projects.projects_db import ( + ProjectDBAPI, + _convert_to_db_names, + _convert_to_schema_names, +) @pytest.fixture def fake_schema_dict(): return { "anEntryThatUsesCamelCase": "I'm the entry", - "anotherEntryThatUsesCamelCase": "I'm also an entry" + "anotherEntryThatUsesCamelCase": "I'm also an entry", } + @pytest.fixture def fake_db_dict(): return { "an_entry_that_uses_snake_case": "I'm the entry", - "another_entry_that_uses_snake_case": "I'm also an entry" + "another_entry_that_uses_snake_case": "I'm also an entry", } + def test_convert_to_db_names(fake_schema_dict): db_entries = _convert_to_db_names(fake_schema_dict) assert "an_entry_that_uses_camel_case" in db_entries assert "another_entry_that_uses_camel_case" in db_entries + def test_convert_to_schema_names(fake_db_dict): db_entries = _convert_to_schema_names(fake_db_dict) assert "anEntryThatUsesSnakeCase" in db_entries @@ -43,21 +48,26 @@ def test_convert_to_schema_names(fake_db_dict): fake_db_dict["time_entry"] = date db_entries = _convert_to_schema_names(fake_db_dict) assert "timeEntry" in db_entries - assert db_entries["timeEntry"] == "{}Z".format(date.isoformat(timespec='milliseconds')) + assert db_entries["timeEntry"] == "{}Z".format( + date.isoformat(timespec="milliseconds") + ) @pytest.fixture def user_id(): return -1 + class MockAsyncContextManager(MagicMock): mock_object = None async def __aenter__(self): return self.mock_object + async def __aexit__(self, *args): pass + @pytest.fixture def mock_db_engine(mocker): def create_engine(mock_result): @@ -71,8 +81,10 @@ def create_engine(mock_result): mock_db_engine = mocker.patch("aiopg.sa.engine.Engine", spec=True) mock_db_engine.acquire.return_value = mock_context_manager return mock_db_engine, mock_connection + yield create_engine + async def test_add_projects(fake_project, user_id, mocker, mock_db_engine): mock_result_row = mocker.patch("aiopg.sa.result.RowProxy", spec=True) @@ -83,7 +95,6 @@ async def test_add_projects(fake_project, user_id, mocker, mock_db_engine): db_engine, mock_connection = mock_db_engine(mock_result) - db = ProjectDBAPI.init_from_engine(db_engine) await db.add_projects([fake_project], user_id=user_id) @@ -91,6 +102,7 @@ async def test_add_projects(fake_project, user_id, mocker, mock_db_engine): mock_connection.execute.assert_called() assert mock_connection.execute.call_count == 3 + # not sure this is useful... # async def test_load_projects(user_id, mocker, mock_db_engine): # mock_result_row = mocker.patch("aiopg.sa.result.RowProxy", spec=True) diff --git a/services/web/server/tests/unit/test_projects_utils.py b/services/web/server/tests/unit/test_projects_utils.py index aec0b9ffd5a..8ad34fca72d 100644 --- a/services/web/server/tests/unit/test_projects_utils.py +++ b/services/web/server/tests/unit/test_projects_utils.py @@ -11,17 +11,18 @@ import pytest from jsonschema import ValidationError -from simcore_service_webserver.projects.projects_utils import \ - clone_project_document +from simcore_service_webserver.projects.projects_utils import clone_project_document from simcore_service_webserver.resources import resources def load_template_projects(): projects = [] - projects_names = [name for name in resources.listdir('data') if 'template-projects' in name] + projects_names = [ + name for name in resources.listdir("data") if "template-projects" in name + ] for name in projects_names: - with resources.stream(f'data/{name}') as fp: - projects.extend( json.load(fp) ) + with resources.stream(f"data/{name}") as fp: + projects.extend(json.load(fp)) return projects @@ -31,8 +32,10 @@ def project_schema(project_schema_file): schema = json.load(fh) return schema -@pytest.mark.parametrize("name,project", - [(p['name'], p) for p in load_template_projects()] ) + +@pytest.mark.parametrize( + "name,project", [(p["name"], p) for p in load_template_projects()] +) def test_clone_project_document(name, project, project_schema): source = deepcopy(project) @@ -42,10 +45,10 @@ def test_clone_project_document(name, project, project_schema): assert source == project # valid clone - assert clone['uuid'] != project['uuid'] + assert clone["uuid"] != project["uuid"] - node_ids = project['workbench'].keys() - for clone_node_id in clone['workbench']: + node_ids = project["workbench"].keys() + for clone_node_id in clone["workbench"]: assert clone_node_id not in node_ids try: diff --git a/services/web/server/tests/unit/test_resources.py b/services/web/server/tests/unit/test_resources.py index 172041f0c22..5e6a0613f1b 100644 --- a/services/web/server/tests/unit/test_resources.py +++ b/services/web/server/tests/unit/test_resources.py @@ -13,17 +13,20 @@ log = logging.getLogger(__name__) + @pytest.fixture def app_resources(package_dir: Path) -> List[str]: resource_names = [] base = package_dir - for name in (resources.config_folder, 'api'): + for name in (resources.config_folder, "api"): folder = base / name - resource_names += [ str(p.relative_to(base)) for p in folder.rglob("*.y*ml") ] + resource_names += [str(p.relative_to(base)) for p in folder.rglob("*.y*ml")] return resource_names -#------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ + def test_resource_io_utils(app_resources: List[str]): @@ -41,10 +44,12 @@ def test_resource_io_utils(app_resources: List[str]): assert ostream.closed + def test_named_resources(): - exposed = [getattr(resources, name) + exposed = [ + getattr(resources, name) for name in dir(resources) - if name.startswith("RESOURCES") + if name.startswith("RESOURCES") ] for resource_name in exposed: @@ -52,6 +57,7 @@ def test_named_resources(): assert resources.isdir(resource_name) assert resources.listdir(resource_name) + def test_paths(app_resources: List[str]): for resource_name in app_resources: assert resources.get_path(resource_name).exists() diff --git a/services/web/server/tests/unit/test_rest.py b/services/web/server/tests/unit/test_rest.py index a4312099b4e..3ef4e9ea639 100644 --- a/services/web/server/tests/unit/test_rest.py +++ b/services/web/server/tests/unit/test_rest.py @@ -19,6 +19,7 @@ # TODO: reduce log from openapi_core loggers + @pytest.fixture def spec_dict(openapi_path): with openapi_path.open() as f: @@ -30,20 +31,17 @@ def spec_dict(openapi_path): def client(loop, aiohttp_unused_port, aiohttp_client, api_version_prefix): app = create_safe_application() - server_kwargs={'port': aiohttp_unused_port(), 'host': 'localhost'} + server_kwargs = {"port": aiohttp_unused_port(), "host": "localhost"} # fake config app[APP_CONFIG_KEY] = { "main": server_kwargs, - "rest": { - "enabled": True, - "version": api_version_prefix - } + "rest": {"enabled": True, "version": api_version_prefix}, } # activates only security+restAPI sub-modules setup_security(app) setup_rest(app) - cli = loop.run_until_complete( aiohttp_client(app, server_kwargs=server_kwargs) ) + cli = loop.run_until_complete(aiohttp_client(app, server_kwargs=server_kwargs)) return cli @@ -52,32 +50,31 @@ async def test_check_health(client, api_version_prefix): payload = await resp.json() assert resp.status == 200, str(payload) - data, error = tuple(payload.get(k) for k in ('data', 'error')) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert data assert not error - assert data['name'] == 'simcore_service_webserver' - assert data['status'] == 'SERVICE_RUNNING' + assert data["name"] == "simcore_service_webserver" + assert data["status"] == "SERVICE_RUNNING" -FAKE = { - 'path_value': 'one', - 'query_value': 'two', - 'body_value': { - 'a': 'foo', - 'b': '45' - } - } +FAKE = { + "path_value": "one", + "query_value": "two", + "body_value": {"a": "foo", "b": "45"}, +} async def test_check_action(client, api_version_prefix): - QUERY = 'value' - ACTION = 'echo' + QUERY = "value" + ACTION = "echo" - resp = await client.post(f"/{api_version_prefix}/check/{ACTION}?data={QUERY}", json=FAKE) + resp = await client.post( + f"/{api_version_prefix}/check/{ACTION}?data={QUERY}", json=FAKE + ) payload = await resp.json() - data, error = tuple(payload.get(k) for k in ('data', 'error')) + data, error = tuple(payload.get(k) for k in ("data", "error")) assert resp.status == 200, str(payload) assert data @@ -85,14 +82,15 @@ async def test_check_action(client, api_version_prefix): # TODO: validate response against specs - assert data['path_value'] == ACTION - assert data['query_value'] == QUERY - assert data['body_value'] == FAKE - + assert data["path_value"] == ACTION + assert data["query_value"] == QUERY + assert data["body_value"] == FAKE async def test_check_fail(client, api_version_prefix): - url = client.app.router["check_action"].url_for(action="fail").with_query(data="foo") + url = ( + client.app.router["check_action"].url_for(action="fail").with_query(data="foo") + ) assert str(url) == f"/{api_version_prefix}/check/fail?data=foo" resp = await client.post(url, json=FAKE) @@ -100,7 +98,6 @@ async def test_check_fail(client, api_version_prefix): assert "some randome failure" in str(error) - async def test_frontend_config(client, api_version_prefix): url = client.app.router["get_config"].url_for() assert str(url) == f"/{api_version_prefix}/config" @@ -113,20 +110,22 @@ async def test_frontend_config(client, api_version_prefix): # w/ invitation explicitly for enabled in (True, False): - client.app[APP_CONFIG_KEY]['login'] = {'registration_invitation_required': enabled} + client.app[APP_CONFIG_KEY]["login"] = { + "registration_invitation_required": enabled + } response = await client.get(f"/{api_version_prefix}/config") data, _ = await assert_status(response, web.HTTPOk) assert data["invitation_required"] is enabled - - # FIXME: hard-coded v0 @pytest.mark.parametrize("resource_name", resources.listdir("api/v0/schemas")) def test_validate_component_schema(resource_name, api_version_prefix): try: - with resources.stream(f"api/{api_version_prefix}/schemas/{resource_name}") as fh: + with resources.stream( + f"api/{api_version_prefix}/schemas/{resource_name}" + ) as fh: schema_under_test = json.load(fh) validator = jsonschema.validators.validator_for(schema_under_test) diff --git a/services/web/server/tests/unit/test_reverse_proxy.py b/services/web/server/tests/unit/test_reverse_proxy.py index 66795584fdf..a826b936207 100644 --- a/services/web/server/tests/unit/test_reverse_proxy.py +++ b/services/web/server/tests/unit/test_reverse_proxy.py @@ -15,7 +15,9 @@ import pytest from aiohttp import web from aiohttp.client_reqrep import ClientResponse -from aiohttp.test_utils import TestClient as DTestClient # renaming avoid pytest to collect +from aiohttp.test_utils import ( + TestClient as DTestClient, +) # renaming avoid pytest to collect from yarl import URL import simcore_service_webserver.reverse_proxy.handlers as reverse_proxy_handlers @@ -33,16 +35,18 @@ async def handler(request: web.Request): Echos back received info + its name """ body = await request.text() - return web.json_response({ - "name": name, - "image": image, - "received": { - "method": request.method, - "url": str(request.url), - "body": body, - "proxy_path": request.match_info.get("proxy_path", "") + return web.json_response( + { + "name": name, + "image": image, + "received": { + "method": request.method, + "url": str(request.url), + "body": body, + "proxy_path": request.match_info.get("proxy_path", ""), + }, } - }) + ) app = create_safe_application() app.router.add_route("*", basepath + "/{proxy_path:.*}", handler) @@ -50,7 +54,10 @@ async def handler(request: web.Request): def random_name(lenght=5): - return ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(lenght)) + return "".join( + random.choice(string.ascii_lowercase + string.digits) for _ in range(lenght) + ) + # FIXTURES ------------------ @@ -70,12 +77,13 @@ async def info(req: web.Request): serviceid = req.match_info.get("serviceId") for mountpoint, item in registry.items(): - if item["info"]['id'] == serviceid: + if item["info"]["id"] == serviceid: return web.json_response(registry[mountpoint]["info"]) raise web.HTTPServiceUnavailable( reason="Service {} is not running".format(serviceid), - content_type="application/json") + content_type="application/json", + ) async def start(req: web.Request): # client requests to run image in basepath @@ -94,16 +102,19 @@ async def start(req: web.Request): registry[mountpoint] = { "server": server, "info": { - 'name': name, - 'image': image, - 'mountpoint': mountpoint, - 'id': serviceid, - 'url': str(URL.build( - scheme=server.scheme, - host=server.host, - port=server.port, - path=mountpoint)) - } + "name": name, + "image": image, + "mountpoint": mountpoint, + "id": serviceid, + "url": str( + URL.build( + scheme=server.scheme, + host=server.host, + port=server.port, + path=mountpoint, + ) + ), + }, } # produces an identifier @@ -114,7 +125,7 @@ async def stop(req: web.Request): info = {"id": serviceid} # determines unique mountpoint for mountpoint, item in registry.items(): - if item["info"]['id'] == serviceid: + if item["info"]["id"] == serviceid: print("stopping %s ...", item["info"]) service = registry[mountpoint]["server"] await service.close() @@ -162,9 +173,7 @@ async def find_url(self, service_identifier: str) -> URL: info = await res.json() return info["url"] - app = create_safe_application({'reverse_proxy':{ - 'enabled': True - }}) + app = create_safe_application({"reverse_proxy": {"enabled": True}}) # setup app["director.client"] = spawner_client @@ -175,15 +184,19 @@ async def find_url(self, service_identifier: str) -> URL: app["reverse_proxy.basemount"] = monitor.base_mountpoint url = app.router["reverse_proxy"].url_for( - serviceId="foo", proxyPath="bar") # <-- another way to "publish", with a named-resouce + serviceId="foo", proxyPath="bar" + ) # <-- another way to "publish", with a named-resouce assert url == URL(app["reverse_proxy.basemount"] + "/foo/bar") # adds api async def bypass(req: web.Request): """ bypasses traffic to spawmer """ # /services/{serviceId}?action=xxx -> /services/{serviceId}/{action} - method, path, body = req.method, join( - req.path, req.query.get("action", "")).rstrip("/"), None + method, path, body = ( + req.method, + join(req.path, req.query.get("action", "")).rstrip("/"), + None, + ) if method != "GET": body = await req.json() body["basepath"] = req.app["reverse_proxy.basemount"] @@ -194,8 +207,7 @@ async def bypass(req: web.Request): res = await cli.request(method, path, json=body) assert isinstance(res, ClientResponse), "NOTE: %s" % type(res) - response = web.StreamResponse(status=res.status, - headers=res.headers) + response = web.StreamResponse(status=res.status, headers=res.headers) await response.prepare(req) payload = await res.read() await response.write_eof(payload) @@ -229,18 +241,14 @@ async def test_spawner(spawner_client): assert resp.status == 200, data assert data == [] - resp = await spawner_client.post("/services/start", json={ - "image": "A:latest", - "name": "a", - "basepath": BASEPATH - }) + resp = await spawner_client.post( + "/services/start", json={"image": "A:latest", "name": "a", "basepath": BASEPATH} + ) data = await resp.text() - resp = await spawner_client.post("/services/start", json={ - "image": "B:latest", - "name": "b", - "basepath": BASEPATH - }) + resp = await spawner_client.post( + "/services/start", json={"image": "B:latest", "name": "b", "basepath": BASEPATH} + ) data = await resp.text() assert resp.status == 200, data @@ -277,11 +285,9 @@ async def test_spawner_from_client(client): assert len(data) == 0 # start - resp = await client.post("/services", params="action=start", - json={ - "image": "A:latest" - } - ) + resp = await client.post( + "/services", params="action=start", json={"image": "A:latest"} + ) data = await resp.json() assert resp.status == 200, data assert data["mountpoint"].startswith(PROXY_MOUNTPOINT) @@ -318,11 +324,9 @@ async def test_spawned_from_client(client): # spawns 3 services: client <-> reverse_proxy_server <-> spawner_server for _ in range(3): - resp = await client.post("/services", params="action=start", - json={ - "image": IMAGE - } - ) + resp = await client.post( + "/services", params="action=start", json={"image": IMAGE} + ) data = await resp.json() assert resp.status == 200, data @@ -332,7 +336,7 @@ async def test_spawned_from_client(client): # pings them: client <-> reverse_proxy_server <-> spawned_servers for sid, mountpoint in registry.items(): - resp = await client.get(mountpoint+"/ping") + resp = await client.get(mountpoint + "/ping") assert resp.status == 200 data = await resp.json() @@ -342,11 +346,14 @@ async def test_spawned_from_client(client): assert data["received"]["proxy_path"] == "ping" tail = client.app.router["reverse_proxy"].url_for( - serviceId=sid, proxyPath="ping") + serviceId=sid, proxyPath="ping" + ) url = URL(data["received"]["url"]) assert url.relative() == tail assert not data["received"]["body"] def test_module_configs(): - assert reverse_proxy_handlers.jupyter.APP_SOCKETS_KEY == reverse_proxy.APP_SOCKETS_KEY + assert ( + reverse_proxy_handlers.jupyter.APP_SOCKETS_KEY == reverse_proxy.APP_SOCKETS_KEY + ) diff --git a/services/web/server/tests/unit/test_security_access_model.py b/services/web/server/tests/unit/test_security_access_model.py index a81139d51b5..9147d2f560c 100644 --- a/services/web/server/tests/unit/test_security_access_model.py +++ b/services/web/server/tests/unit/test_security_access_model.py @@ -9,6 +9,7 @@ import copy import difflib import json + # https://blog.nodeswat.com/implement-access-control-in-node-js-8567e7b484d1 # from typing import Callable, Dict, List @@ -20,18 +21,18 @@ from simcore_service_webserver.resources import resources from simcore_service_webserver.security_access_model import ( - RoleBasedAccessModel, check_access) + RoleBasedAccessModel, + check_access, +) from simcore_service_webserver.security_permissions import and_, or_ -from simcore_service_webserver.security_roles import (ROLES_PERMISSIONS, - UserRole) +from simcore_service_webserver.security_roles import ROLES_PERMISSIONS, UserRole @pytest.fixture def access_model(): - def can_update_inputs(context): - current_data = context['current'] - candidate_data = context['candidate'] + current_data = context["current"] + candidate_data = context["candidate"] diffs = jsondiff.diff(current_data, candidate_data) @@ -39,8 +40,8 @@ def can_update_inputs(context): try: for node in diffs["workbench"]: # can ONLY modify `inputs` fields set as ReadAndWrite - access = current_data['workbench'][node]["inputAccess"] - inputs = diffs["workbench"][node]['inputs'] + access = current_data["workbench"][node]["inputAccess"] + inputs = diffs["workbench"][node]["inputs"] for key in inputs: if access.get(key) != "ReadAndWrite": return False @@ -49,23 +50,23 @@ def can_update_inputs(context): pass return False - return len(diffs)==0 # no changes + return len(diffs) == 0 # no changes - #----------- + # ----------- fake_roles_permissions = { UserRole.ANONYMOUS: { - 'can': [ + "can": [ "studies.templates.read", "study.start", "study.stop", { "name": "study.pipeline.node.inputs.update", - "check": can_update_inputs - } + "check": can_update_inputs, + }, ] }, UserRole.USER: { - 'can': [ + "can": [ "study.node.create", "study.node.delete", "study.node.rename", @@ -73,17 +74,14 @@ def can_update_inputs(context): "study.node.data.push", "study.node.data.delete", "study.edge.create", - "study.edge.delete" + "study.edge.delete", ], - 'inherits': [UserRole.ANONYMOUS] + "inherits": [UserRole.ANONYMOUS], }, UserRole.TESTER: { - 'can': [ - "study.nodestree.uuid.read", - "study.logger.debug.read" - ], - # This double inheritance is done intentionally redundant - 'inherits': [UserRole.USER, UserRole.ANONYMOUS] + "can": ["study.nodestree.uuid.read", "study.logger.debug.read"], + # This double inheritance is done intentionally redundant + "inherits": [UserRole.USER, UserRole.ANONYMOUS], }, } @@ -91,13 +89,15 @@ def can_update_inputs(context): rbac = RoleBasedAccessModel.from_rawdata(fake_roles_permissions) return rbac + # TESTS ------------------------------------------------------------------------- + def test_roles(): super_users = UserRole.super_users() assert super_users assert UserRole.USER not in super_users - assert all( r in UserRole for r in super_users ) + assert all(r in UserRole for r in super_users) def test_unique_permissions(): @@ -110,7 +110,10 @@ def test_unique_permissions(): for role in ROLES_PERMISSIONS: can = ROLES_PERMISSIONS[role].get("can", []) for permission in can: - assert permission not in used, "'%s' in %s is repeated in security_roles.ROLES_PERMISSIONS" % (permission, role) + assert permission not in used, ( + "'%s' in %s is repeated in security_roles.ROLES_PERMISSIONS" + % (permission, role) + ) used.append(permission) @@ -125,7 +128,7 @@ def test_access_model_loads(): async def test_named_permissions(access_model): - R = UserRole # alias + R = UserRole # alias # direct permission assert await access_model.can(R.USER, "study.edge.delete") @@ -135,7 +138,6 @@ async def test_named_permissions(access_model): assert await access_model.can(R.TESTER, "study.edge.delete") assert await access_model.can(R.ANONYMOUS, "studies.templates.read") - who_can_delete = await access_model.who_can("study.edge.delete") assert R.USER in who_can_delete assert R.TESTER in who_can_delete @@ -169,13 +171,13 @@ async def test_permissions_inheritance(access_model): @pytest.mark.skip(reason="REVIEW") async def test_checked_permissions(access_model): - R = UserRole # alias - MOCKPATH = 'data/fake-template-projects.json' + R = UserRole # alias + MOCKPATH = "data/fake-template-projects.json" with resources.stream(MOCKPATH) as fh: data = json.load(fh) - current ={} + current = {} for prj in data: if prj["uuid"] == "template-uuid-1234-a1a7-f7d4f3a8f26b": current = prj @@ -185,54 +187,56 @@ async def test_checked_permissions(access_model): # updates both allowed and not allowed fields candidate = copy.deepcopy(current) - candidate['workbench']['template-uuid-409d-998c-c1f04de67f8b']["inputs"]["Kr"] = 66 # ReadOnly! - candidate['workbench']['template-uuid-409d-998c-c1f04de67f8b']["inputs"]["Na"] = 66 # ReadWrite + candidate["workbench"]["template-uuid-409d-998c-c1f04de67f8b"]["inputs"][ + "Kr" + ] = 66 # ReadOnly! + candidate["workbench"]["template-uuid-409d-998c-c1f04de67f8b"]["inputs"][ + "Na" + ] = 66 # ReadWrite assert not await access_model.can( R.ANONYMOUS, "study.pipeline.node.inputs.update", - context={'current': current, 'candidate': candidate} + context={"current": current, "candidate": candidate}, ) # updates allowed fields candidate = copy.deepcopy(current) - candidate['workbench']['template-uuid-409d-998c-c1f04de67f8b']["inputs"]["Na"] = 66 # ReadWrite + candidate["workbench"]["template-uuid-409d-998c-c1f04de67f8b"]["inputs"][ + "Na" + ] = 66 # ReadWrite assert await access_model.can( R.ANONYMOUS, "study.pipeline.node.inputs.update", - context={'current': current, 'candidate': candidate} + context={"current": current, "candidate": candidate}, ) # udpates not permitted fields candidate = copy.deepcopy(current) - candidate['description'] = 'not allowed to write here' + candidate["description"] = "not allowed to write here" assert not await access_model.can( R.ANONYMOUS, "study.pipeline.node.inputs.update", - context={'current': current, 'candidate': candidate} + context={"current": current, "candidate": candidate}, ) async def test_async_checked_permissions(access_model): - R = UserRole # alias + R = UserRole # alias # add checked permissions async def async_callback(context) -> bool: - return context['response'] + return context["response"] - access_model.roles[R.TESTER].check['study.edge.edit'] = async_callback + access_model.roles[R.TESTER].check["study.edge.edit"] = async_callback assert not await access_model.can( - R.TESTER, - "study.edge.edit", - context={'response':False} + R.TESTER, "study.edge.edit", context={"response": False} ) assert await access_model.can( - R.TESTER, - "study.edge.edit", - context={'response':True} + R.TESTER, "study.edge.edit", context={"response": True} ) @@ -241,18 +245,19 @@ async def test_check_access_expressions(access_model): assert await check_access(access_model, R.ANONYMOUS, "study.stop") - assert await check_access(access_model, R.ANONYMOUS, - "study.stop |study.node.create") + assert await check_access( + access_model, R.ANONYMOUS, "study.stop |study.node.create" + ) - assert not await check_access(access_model, R.ANONYMOUS, - "study.stop & study.node.create") + assert not await check_access( + access_model, R.ANONYMOUS, "study.stop & study.node.create" + ) - assert await check_access(access_model, R.USER, - "study.stop & study.node.create") + assert await check_access(access_model, R.USER, "study.stop & study.node.create") # TODO: extend expression parser - #assert await check_access(access_model, R.USER, + # assert await check_access(access_model, R.USER, # "study.stop & (study.node.create|study.nodestree.uuid.read)") - #assert await check_access(access_model, R.TESTER, + # assert await check_access(access_model, R.TESTER, # "study.stop & study.node.create & study.nodestree.uuid.read") diff --git a/services/web/server/tests/unit/test_template_projects.py b/services/web/server/tests/unit/test_template_projects.py index 43d3e8ce575..0d30184d930 100644 --- a/services/web/server/tests/unit/test_template_projects.py +++ b/services/web/server/tests/unit/test_template_projects.py @@ -15,7 +15,9 @@ from servicelib.jsonschema_validation import validate_instance from simcore_service_webserver.projects.projects_fakes import Fake from simcore_service_webserver.projects.projects_utils import ( - substitute_parameterized_inputs, variable_pattern) + substitute_parameterized_inputs, + variable_pattern, +) from simcore_service_webserver.resources import resources from yarl import URL @@ -41,14 +43,14 @@ def fake_db(): @pytest.fixture def mock_parametrized_project(fake_data_dir): - path = fake_data_dir/"parametrized_project.json" + path = fake_data_dir / "parametrized_project.json" with path.open() as fh: prj = json.load(fh) # check parameterized - inputs = prj['workbench']['template-uuid-409d-998c-c1f04de67f8b']['inputs'] - assert variable_pattern.match(inputs['Na']) - assert variable_pattern.match(inputs['BCL']) + inputs = prj["workbench"]["template-uuid-409d-998c-c1f04de67f8b"]["inputs"] + assert variable_pattern.match(inputs["Na"]) + assert variable_pattern.match(inputs["BCL"]) return prj @@ -62,10 +64,17 @@ async def test_validate_templates(loop, project_specs: Dict, fake_db): def test_substitutions(mock_parametrized_project): - template_id = mock_parametrized_project['uuid'] - url = URL(f"https://myplatform/study/{template_id}").with_query(my_Na='33', my_BCL="54.0") + template_id = mock_parametrized_project["uuid"] + url = URL(f"https://myplatform/study/{template_id}").with_query( + my_Na="33", my_BCL="54.0" + ) prj = substitute_parameterized_inputs(mock_parametrized_project, dict(url.query)) assert prj - assert prj['workbench']['template-uuid-409d-998c-c1f04de67f8b']['inputs']['Na'] == 33 - assert prj['workbench']['template-uuid-409d-998c-c1f04de67f8b']['inputs']['BCL'] == 54.0 + assert ( + prj["workbench"]["template-uuid-409d-998c-c1f04de67f8b"]["inputs"]["Na"] == 33 + ) + assert ( + prj["workbench"]["template-uuid-409d-998c-c1f04de67f8b"]["inputs"]["BCL"] + == 54.0 + ) diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py index 520ee4201c1..0521c762477 100644 --- a/services/web/server/tests/unit/with_dbs/conftest.py +++ b/services/web/server/tests/unit/with_dbs/conftest.py @@ -29,8 +29,7 @@ from servicelib.aiopg_utils import DSN from servicelib.rest_responses import unwrap_envelope from simcore_service_webserver.application import create_application -from simcore_service_webserver.application_config import \ - app_schema as app_schema +from simcore_service_webserver.application_config import app_schema as app_schema from simcore_service_webserver.db_models import confirmations, metadata, users ## current directory @@ -44,9 +43,9 @@ def default_app_cfg(osparc_simcore_root_dir, fake_static_dir): assert cfg_path.exists() variables = dict(os.environ) - variables.update({ - 'OSPARC_SIMCORE_REPO_ROOTDIR': str(osparc_simcore_root_dir), - }) + variables.update( + {"OSPARC_SIMCORE_REPO_ROOTDIR": str(osparc_simcore_root_dir),} + ) # validates and fills all defaults/optional entries that normal load would not do cfg_dict = trafaret_config.read_and_validate(cfg_path, app_schema, vars=variables) @@ -57,6 +56,7 @@ def default_app_cfg(osparc_simcore_root_dir, fake_static_dir): # FIXME: free cfg_dict but deepcopy shall be r/w return cfg_dict + @pytest.fixture(scope="function") def app_cfg(default_app_cfg, aiohttp_unused_port): cfg = deepcopy(default_app_cfg) @@ -68,7 +68,8 @@ def app_cfg(default_app_cfg, aiohttp_unused_port): # this fixture can be safely modified during test since it is renovated on every call return cfg -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def docker_compose_file(default_app_cfg): """ Overrides pytest-docker fixture @@ -78,11 +79,11 @@ def docker_compose_file(default_app_cfg): cfg = deepcopy(default_app_cfg["db"]["postgres"]) # docker-compose reads these environs - os.environ['TEST_POSTGRES_DB']=cfg['database'] - os.environ['TEST_POSTGRES_USER']=cfg['user'] - os.environ['TEST_POSTGRES_PASSWORD']=cfg['password'] + os.environ["TEST_POSTGRES_DB"] = cfg["database"] + os.environ["TEST_POSTGRES_USER"] = cfg["user"] + os.environ["TEST_POSTGRES_PASSWORD"] = cfg["password"] - dc_path = current_dir / 'docker-compose.yml' + dc_path = current_dir / "docker-compose.yml" assert dc_path.exists() yield str(dc_path) @@ -90,23 +91,22 @@ def docker_compose_file(default_app_cfg): os.environ = old -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def postgres_service(docker_services, docker_ip, default_app_cfg): cfg = deepcopy(default_app_cfg["db"]["postgres"]) - cfg['host'] = docker_ip - cfg['port'] = docker_services.port_for('postgres', 5432) + cfg["host"] = docker_ip + cfg["port"] = docker_services.port_for("postgres", 5432) url = DSN.format(**cfg) # Wait until service is responsive. docker_services.wait_until_responsive( - check=lambda: is_postgres_responsive(url), - timeout=30.0, - pause=0.1, + check=lambda: is_postgres_responsive(url), timeout=30.0, pause=0.1, ) return url + @pytest.fixture def postgres_db(app_cfg, postgres_service): cfg = app_cfg["db"]["postgres"] @@ -124,18 +124,21 @@ def postgres_db(app_cfg, postgres_service): metadata.drop_all(engine) engine.dispose() + @pytest.fixture def web_server(loop, aiohttp_server, app_cfg, monkeypatch, postgres_db): app = create_application(app_cfg) path_mail(monkeypatch) - server = loop.run_until_complete( aiohttp_server(app, port=app_cfg["main"]["port"]) ) + server = loop.run_until_complete(aiohttp_server(app, port=app_cfg["main"]["port"])) return server + @pytest.fixture def client(loop, aiohttp_client, web_server): client = loop.run_until_complete(aiohttp_client(web_server)) return client + @pytest.fixture async def storage_subsystem_mock(loop, mocker): """ @@ -144,26 +147,34 @@ async def storage_subsystem_mock(loop, mocker): Patched functions are exposed within projects but call storage subsystem """ # requests storage to copy data - mock = mocker.patch('simcore_service_webserver.projects.projects_api.copy_data_folders_from_project') + mock = mocker.patch( + "simcore_service_webserver.projects.projects_api.copy_data_folders_from_project" + ) + async def _mock_copy_data_from_project(*args): return args[2] mock.side_effect = _mock_copy_data_from_project # requests storage to delete data - #mock1 = mocker.patch('simcore_service_webserver.projects.projects_handlers.delete_data_folders_of_project', return_value=None) - mock1 = mocker.patch('simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project', return_value=Future()) + # mock1 = mocker.patch('simcore_service_webserver.projects.projects_handlers.delete_data_folders_of_project', return_value=None) + mock1 = mocker.patch( + "simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project", + return_value=Future(), + ) mock1.return_value.set_result("") return mock, mock1 # helpers --------------- + def path_mail(monkeypatch): async def send_mail(*args): - print('=== EMAIL TO: {}\n=== SUBJECT: {}\n=== BODY:\n{}'.format(*args)) + print("=== EMAIL TO: {}\n=== SUBJECT: {}\n=== BODY:\n{}".format(*args)) + + monkeypatch.setattr(simcore_service_webserver.login.utils, "send_mail", send_mail) - monkeypatch.setattr(simcore_service_webserver.login.utils, 'send_mail', send_mail) def is_postgres_responsive(url): """Check if something responds to ``url`` """ @@ -175,24 +186,25 @@ def is_postgres_responsive(url): return False return True -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def redis_service(docker_services, docker_ip): host = docker_ip - port = docker_services.port_for('redis', 6379) + port = docker_services.port_for("redis", 6379) url = URL(f"redis://{host}:{port}") docker_services.wait_until_responsive( - check=lambda: is_redis_responsive(host, port), - timeout=30.0, - pause=0.1, + check=lambda: is_redis_responsive(host, port), timeout=30.0, pause=0.1, ) return url + def is_redis_responsive(host: str, port: str) -> bool: r = redis.Redis(host=host, port=port) return r.ping() == True + @pytest.fixture async def redis_client(loop, redis_service): client = await aioredis.create_redis_pool(str(redis_service), encoding="utf-8") @@ -205,9 +217,10 @@ async def redis_client(loop, redis_service): @pytest.fixture() async def socketio_url(client) -> str: - SOCKET_IO_PATH = '/socket.io/' + SOCKET_IO_PATH = "/socket.io/" return str(client.make_url(SOCKET_IO_PATH)) + @pytest.fixture() async def security_cookie(client) -> str: # get the cookie by calling the root entrypoint @@ -223,45 +236,57 @@ async def security_cookie(client) -> str: cookie = resp.request_info.headers["Cookie"] yield cookie + @pytest.fixture() async def socketio_client(socketio_url: str, security_cookie: str): clients = [] async def connect(client_session_id): sio = socketio.AsyncClient() - url = str(URL(socketio_url).with_query({'client_session_id': client_session_id})) - await sio.connect(url, headers={'Cookie': security_cookie}) + url = str( + URL(socketio_url).with_query({"client_session_id": client_session_id}) + ) + await sio.connect(url, headers={"Cookie": security_cookie}) assert sio.sid clients.append(sio) return sio + yield connect for sio in clients: await sio.disconnect() assert not sio.sid + @pytest.fixture() def client_session_id() -> str: def create() -> str(): return str(uuid4()) + return create @pytest.fixture async def mocked_director_api(loop, mocker): mocks = {} - mocked_running_services = mocker.patch('simcore_service_webserver.director.director_api.get_running_interactive_services', - return_value=Future()) + mocked_running_services = mocker.patch( + "simcore_service_webserver.director.director_api.get_running_interactive_services", + return_value=Future(), + ) mocked_running_services.return_value.set_result("") mocks["get_running_interactive_services"] = mocked_running_services - mocked_stop_service = mocker.patch('simcore_service_webserver.director.director_api.stop_service', - return_value=Future()) + mocked_stop_service = mocker.patch( + "simcore_service_webserver.director.director_api.stop_service", + return_value=Future(), + ) mocked_stop_service.return_value.set_result("") mocks["stop_service"] = mocked_stop_service yield mocks + @pytest.fixture async def mocked_dynamic_service(loop, client, mocked_director_api): services = [] + async def create(user_id, project_id) -> Dict: SERVICE_UUID = str(uuid4()) SERVICE_KEY = "simcore/services/dynamic/3d-viewer" @@ -270,7 +295,7 @@ async def create(user_id, project_id) -> Dict: create_node_data = { "service_key": SERVICE_KEY, "service_version": SERVICE_VERSION, - "service_uuid": SERVICE_UUID + "service_uuid": SERVICE_UUID, } running_service_dict = { @@ -280,12 +305,15 @@ async def create(user_id, project_id) -> Dict: "service_version": SERVICE_VERSION, "service_host": "some_service_host", "service_port": "some_service_port", - "service_state": "some_service_state" + "service_state": "some_service_state", } services.append(running_service_dict) # reset the future or an invalidStateError will appear as set_result sets the future to done mocked_director_api["get_running_interactive_services"].return_value = Future() - mocked_director_api["get_running_interactive_services"].return_value.set_result(services) + mocked_director_api["get_running_interactive_services"].return_value.set_result( + services + ) return running_service_dict + return create diff --git a/services/web/server/tests/unit/with_dbs/test_access_to_studies.py b/services/web/server/tests/unit/with_dbs/test_access_to_studies.py index 70e38ad8d59..2b2a5736cbe 100644 --- a/services/web/server/tests/unit/with_dbs/test_access_to_studies.py +++ b/services/web/server/tests/unit/with_dbs/test_access_to_studies.py @@ -36,15 +36,21 @@ SHARED_STUDY_UUID = "e2e38eee-c569-4e55-b104-70d159e49c87" + @pytest.fixture def qx_client_outdir(tmpdir, mocker): """ Emulates qx output at service/web/client after compiling """ basedir = tmpdir.mkdir("source-output") - folders = [ basedir.mkdir(folder_name) for folder_name in ('osparc', 'resource', 'transpiled')] - - index_file = Path( basedir.join("index.html") ) - index_file.write_text(textwrap.dedent("""\ + folders = [ + basedir.mkdir(folder_name) + for folder_name in ("osparc", "resource", "transpiled") + ] + + index_file = Path(basedir.join("index.html")) + index_file.write_text( + textwrap.dedent( + """\ @@ -52,7 +58,9 @@ def qx_client_outdir(tmpdir, mocker):

This is a result of qx_client_outdir fixture

- """)) + """ + ) + ) # patch get_client_outdir mocker.patch.object(simcore_service_webserver.statics, "get_client_outdir") @@ -60,14 +68,16 @@ def qx_client_outdir(tmpdir, mocker): @pytest.fixture -def client(loop, aiohttp_client, app_cfg, postgres_service, qx_client_outdir, monkeypatch): -#def client(loop, aiohttp_client, app_cfg, qx_client_outdir, monkeypatch): # <<<< FOR DEVELOPMENT. DO NOT REMOVE. +def client( + loop, aiohttp_client, app_cfg, postgres_service, qx_client_outdir, monkeypatch +): + # def client(loop, aiohttp_client, app_cfg, qx_client_outdir, monkeypatch): # <<<< FOR DEVELOPMENT. DO NOT REMOVE. cfg = deepcopy(app_cfg) - cfg["db"]["init_tables"] = True # inits tables of postgres_service upon startup - cfg['projects']['enabled'] = True - cfg['storage']['enabled'] = False - cfg['rabbit']['enabled'] = False + cfg["db"]["init_tables"] = True # inits tables of postgres_service upon startup + cfg["projects"]["enabled"] = True + cfg["storage"]["enabled"] = False + cfg["rabbit"]["enabled"] = False app = create_safe_application(cfg) @@ -75,35 +85,36 @@ def client(loop, aiohttp_client, app_cfg, postgres_service, qx_client_outdir, mo setup_db(app) setup_session(app) setup_security(app) - setup_rest(app) # TODO: why should we need this?? + setup_rest(app) # TODO: why should we need this?? setup_login(app) setup_users(app) assert setup_projects(app), "Shall not skip this setup" assert setup_studies_access(app), "Shall not skip this setup" # server and client - yield loop.run_until_complete(aiohttp_client(app, server_kwargs={ - 'port': cfg["main"]["port"], - 'host': cfg['main']['host'] - })) + yield loop.run_until_complete( + aiohttp_client( + app, + server_kwargs={"port": cfg["main"]["port"], "host": cfg["main"]["host"]}, + ) + ) @pytest.fixture -async def logged_user(client): #, role: UserRole): +async def logged_user(client): # , role: UserRole): """ adds a user in db and logs in with client NOTE: role fixture is defined as a parametrization below """ - role = UserRole.USER # TODO: parameterize roles + role = UserRole.USER # TODO: parameterize roles async with LoggedUser( - client, - {"role": role.name}, - check_if_succeeds = role!=UserRole.ANONYMOUS + client, {"role": role.name}, check_if_succeeds=role != UserRole.ANONYMOUS ) as user: yield user await delete_all_projects(client.app) + @pytest.fixture async def published_project(client, fake_project): project_data = deepcopy(fake_project) @@ -112,13 +123,11 @@ async def published_project(client, fake_project): project_data["published"] = True async with NewProject( - project_data, - client.app, - user_id=None, - clear_all=True + project_data, client.app, user_id=None, clear_all=True ) as template_project: yield template_project + @pytest.fixture async def unpublished_project(client, fake_project): project_data = deepcopy(fake_project) @@ -127,10 +136,7 @@ async def unpublished_project(client, fake_project): project_data["published"] = False async with NewProject( - project_data, - client.app, - user_id=None, - clear_all=True + project_data, client.app, user_id=None, clear_all=True ) as template_project: yield template_project @@ -146,6 +152,7 @@ async def _get_user_projects(client): return projects + def _assert_same_projects(got: Dict, expected: Dict): # TODO: validate using api/specs/webserver/v0/components/schemas/project-v0.0.1.json # TODO: validate workbench! @@ -171,15 +178,15 @@ async def test_access_to_forbidden_study(client, unpublished_project): resp = await client.get("/study/%s" % valid_but_not_sharable) content = await resp.text() - assert resp.status == web.HTTPNotFound.status_code, \ + assert resp.status == web.HTTPNotFound.status_code, ( "STANDARD studies are NOT sharable: %s" % content + ) -async def test_access_study_anonymously(client, qx_client_outdir, published_project, storage_subsystem_mock): - params = { - "uuid":SHARED_STUDY_UUID, - "name":"some-template" - } +async def test_access_study_anonymously( + client, qx_client_outdir, published_project, storage_subsystem_mock +): + params = {"uuid": SHARED_STUDY_UUID, "name": "some-template"} url_path = "/study/%s" % SHARED_STUDY_UUID resp = await client.get(url_path) @@ -188,17 +195,18 @@ async def test_access_study_anonymously(client, qx_client_outdir, published_proj # index assert resp.status == web.HTTPOk.status_code, "Got %s" % str(content) assert str(resp.url.path) == "/" - assert "OSPARC-SIMCORE" in content, \ - "Expected front-end rendering workbench's study, got %s" % str(content) + assert ( + "OSPARC-SIMCORE" in content + ), "Expected front-end rendering workbench's study, got %s" % str(content) real_url = str(resp.real_url) # has auto logged in as guest? resp = await client.get("/v0/me") data, _ = await assert_status(resp, web.HTTPOk) - assert data['login'].endswith("guest-at-osparc.io") - assert data['gravatar_id'] - assert data['role'].upper() == UserRole.GUEST.name + assert data["login"].endswith("guest-at-osparc.io") + assert data["gravatar_id"] + assert data["role"].upper() == UserRole.GUEST.name # guest user only a copy of the template project projects = await _get_user_projects(client) @@ -208,14 +216,13 @@ async def test_access_study_anonymously(client, qx_client_outdir, published_proj assert real_url.endswith("#/study/%s" % guest_project["uuid"]) _assert_same_projects(guest_project, published_project) - assert guest_project['prjOwner'] == data['login'] + assert guest_project["prjOwner"] == data["login"] -async def test_access_study_by_logged_user(client, logged_user, qx_client_outdir, published_project, storage_subsystem_mock): - params = { - "uuid":SHARED_STUDY_UUID, - "name":"some-template" - } +async def test_access_study_by_logged_user( + client, logged_user, qx_client_outdir, published_project, storage_subsystem_mock +): + params = {"uuid": SHARED_STUDY_UUID, "name": "some-template"} url_path = "/study/%s" % SHARED_STUDY_UUID resp = await client.get(url_path) @@ -226,8 +233,9 @@ async def test_access_study_by_logged_user(client, logged_user, qx_client_outdir assert str(resp.url.path) == "/" real_url = str(resp.real_url) - assert "OSPARC-SIMCORE" in content, \ - "Expected front-end rendering workbench's study, got %s" % str(content) + assert ( + "OSPARC-SIMCORE" in content + ), "Expected front-end rendering workbench's study, got %s" % str(content) # user has a copy of the template project projects = await _get_user_projects(client) @@ -239,4 +247,4 @@ async def test_access_study_by_logged_user(client, logged_user, qx_client_outdir _assert_same_projects(user_project, published_project) - assert user_project['prjOwner'] == logged_user['email'] + assert user_project["prjOwner"] == logged_user["email"] diff --git a/services/web/server/tests/unit/with_dbs/test_change_email.py b/services/web/server/tests/unit/with_dbs/test_change_email.py index 375022f247e..373bcf23328 100644 --- a/services/web/server/tests/unit/with_dbs/test_change_email.py +++ b/services/web/server/tests/unit/with_dbs/test_change_email.py @@ -13,44 +13,40 @@ from utils_assert import assert_status from utils_login import LoggedUser, NewUser, parse_link -NEW_EMAIL = 'new@mail.com' +NEW_EMAIL = "new@mail.com" async def test_unauthorized(client): - url = client.app.router['auth_change_email'].url_for() - rsp = await client.post(url, json={ - 'email': NEW_EMAIL, - }) + url = client.app.router["auth_change_email"].url_for() + rsp = await client.post(url, json={"email": NEW_EMAIL,}) assert rsp.status == 401 await assert_status(rsp, web.HTTPUnauthorized) async def test_change_to_existing_email(client): - url = client.app.router['auth_change_email'].url_for() + url = client.app.router["auth_change_email"].url_for() async with LoggedUser(client) as user: async with NewUser() as other: - rsp = await client.post(url, json={ - 'email': other['email'], - }) - await assert_status(rsp, web.HTTPUnprocessableEntity, "This email cannot be used") + rsp = await client.post(url, json={"email": other["email"],}) + await assert_status( + rsp, web.HTTPUnprocessableEntity, "This email cannot be used" + ) async def test_change_and_confirm(client, capsys): cfg = client.app[APP_LOGIN_CONFIG] - url = client.app.router['auth_change_email'].url_for() + url = client.app.router["auth_change_email"].url_for() index_url = client.app.router[INDEX_RESOURCE_NAME].url_for() - login_url = client.app.router['auth_login'].url_for() - logout_url = client.app.router['auth_logout'].url_for() + login_url = client.app.router["auth_login"].url_for() + logout_url = client.app.router["auth_logout"].url_for() assert index_url.path == URL(cfg.LOGIN_REDIRECT).path async with LoggedUser(client) as user: # request change email - rsp = await client.post(url, json={ - 'email': NEW_EMAIL, - }) + rsp = await client.post(url, json={"email": NEW_EMAIL,}) assert rsp.url_obj.path == url.path await assert_status(rsp, web.HTTPOk, cfg.MSG_CHANGE_EMAIL_REQUESTED) @@ -70,16 +66,15 @@ async def test_change_and_confirm(client, capsys): assert rsp.url_obj.path == index_url.path assert "welcome to fake web front-end" in txt - rsp = await client.post(login_url, json={ - 'email': NEW_EMAIL, - 'password': user['raw_password'], - }) + rsp = await client.post( + login_url, json={"email": NEW_EMAIL, "password": user["raw_password"],} + ) payload = await rsp.json() assert rsp.url_obj.path == login_url.path await assert_status(rsp, web.HTTPOk, cfg.MSG_LOGGED_IN) - -if __name__ == '__main__': +if __name__ == "__main__": import pytest - pytest.main([__file__, '--maxfail=1']) + + pytest.main([__file__, "--maxfail=1"]) diff --git a/services/web/server/tests/unit/with_dbs/test_change_password.py b/services/web/server/tests/unit/with_dbs/test_change_password.py index a508b6bdd57..0de8f038a14 100644 --- a/services/web/server/tests/unit/with_dbs/test_change_password.py +++ b/services/web/server/tests/unit/with_dbs/test_change_password.py @@ -12,30 +12,31 @@ from utils_assert import assert_status from utils_login import LoggedUser, parse_link -NEW_PASSWORD = 'NewPassword1*&^' +NEW_PASSWORD = "NewPassword1*&^" async def test_unauthorized(client): - url = client.app.router['auth_change_password'].url_for() - rsp = await client.post(url, json={ - 'current':' fake', - 'new': NEW_PASSWORD, - 'confirm': NEW_PASSWORD, - }) + url = client.app.router["auth_change_password"].url_for() + rsp = await client.post( + url, json={"current": " fake", "new": NEW_PASSWORD, "confirm": NEW_PASSWORD,} + ) assert rsp.status == 401 await assert_status(rsp, web.HTTPUnauthorized) async def test_wrong_current_password(client): cfg = client.app[APP_LOGIN_CONFIG] - url = client.app.router['auth_change_password'].url_for() + url = client.app.router["auth_change_password"].url_for() async with LoggedUser(client): - rsp = await client.post(url, json={ - 'current': 'wrongpassword', - 'new': NEW_PASSWORD, - 'confirm': NEW_PASSWORD, - }) + rsp = await client.post( + url, + json={ + "current": "wrongpassword", + "new": NEW_PASSWORD, + "confirm": NEW_PASSWORD, + }, + ) assert rsp.url_obj.path == url.path assert rsp.status == 422 assert cfg.MSG_WRONG_PASSWORD in await rsp.text() @@ -44,14 +45,17 @@ async def test_wrong_current_password(client): async def test_wrong_confirm_pass(client): cfg = client.app[APP_LOGIN_CONFIG] - url = client.app.router['auth_change_password'].url_for() + url = client.app.router["auth_change_password"].url_for() async with LoggedUser(client) as user: - rsp = await client.post(url, json={ - 'current': user['raw_password'], - 'new': NEW_PASSWORD, - 'confirm': NEW_PASSWORD.upper(), - }) + rsp = await client.post( + url, + json={ + "current": user["raw_password"], + "new": NEW_PASSWORD, + "confirm": NEW_PASSWORD.upper(), + }, + ) assert rsp.url_obj.path == url.path assert rsp.status == 409 await assert_status(rsp, web.HTTPConflict, cfg.MSG_PASSWORD_MISMATCH) @@ -60,16 +64,19 @@ async def test_wrong_confirm_pass(client): async def test_success(client): cfg = client.app[APP_LOGIN_CONFIG] - url = client.app.router['auth_change_password'].url_for() - login_url = client.app.router['auth_login'].url_for() - logout_url = client.app.router['auth_logout'].url_for() + url = client.app.router["auth_change_password"].url_for() + login_url = client.app.router["auth_login"].url_for() + logout_url = client.app.router["auth_logout"].url_for() async with LoggedUser(client) as user: - rsp = await client.post(url, json={ - 'current': user['raw_password'], - 'new': NEW_PASSWORD, - 'confirm': NEW_PASSWORD, - }) + rsp = await client.post( + url, + json={ + "current": user["raw_password"], + "new": NEW_PASSWORD, + "confirm": NEW_PASSWORD, + }, + ) assert rsp.url_obj.path == url.path assert rsp.status == 200 assert cfg.MSG_PASSWORD_CHANGED in await rsp.text() @@ -79,16 +86,15 @@ async def test_success(client): assert rsp.status == 200 assert rsp.url_obj.path == logout_url.path - rsp = await client.post(login_url, json={ - 'email': user['email'], - 'password': NEW_PASSWORD, - }) + rsp = await client.post( + login_url, json={"email": user["email"], "password": NEW_PASSWORD,} + ) assert rsp.status == 200 assert rsp.url_obj.path == login_url.path await assert_status(rsp, web.HTTPOk, cfg.MSG_LOGGED_IN) - -if __name__ == '__main__': +if __name__ == "__main__": import pytest - pytest.main([__file__, '--maxfail=1']) + + pytest.main([__file__, "--maxfail=1"]) diff --git a/services/web/server/tests/unit/with_dbs/test_db.py b/services/web/server/tests/unit/with_dbs/test_db.py index a7a216acaaf..cea0674a231 100644 --- a/services/web/server/tests/unit/with_dbs/test_db.py +++ b/services/web/server/tests/unit/with_dbs/test_db.py @@ -2,8 +2,8 @@ import yaml -from simcore_service_webserver.db import (is_service_enabled, - is_service_responsive) +from simcore_service_webserver.db import is_service_enabled, is_service_responsive + def test_uses_same_postgres_version(docker_compose_file, osparc_simcore_root_dir): with io.open(docker_compose_file) as fh: @@ -12,7 +12,10 @@ def test_uses_same_postgres_version(docker_compose_file, osparc_simcore_root_dir with io.open(osparc_simcore_root_dir / "services" / "docker-compose.yml") as fh: expected = yaml.safe_load(fh) - assert fixture['services']['postgres']['image'] == expected['services']['postgres']['image'] + assert ( + fixture["services"]["postgres"]["image"] + == expected["services"]["postgres"]["image"] + ) async def test_responsive(web_server): diff --git a/services/web/server/tests/unit/with_dbs/test_guests_management.py b/services/web/server/tests/unit/with_dbs/test_guests_management.py index 218f51e7dd6..f75ecaf9050 100644 --- a/services/web/server/tests/unit/with_dbs/test_guests_management.py +++ b/services/web/server/tests/unit/with_dbs/test_guests_management.py @@ -20,16 +20,15 @@ def client(loop, aiohttp_client, app_cfg, postgres_service): # config app cfg = deepcopy(app_cfg) port = cfg["main"]["port"] - cfg["db"]["init_tables"] = True # inits tables of postgres_service upon startup + cfg["db"]["init_tables"] = True # inits tables of postgres_service upon startup cfg["projects"]["enabled"] = True app = application.create_application(cfg) # server and client - return loop.run_until_complete(aiohttp_client(app, server_kwargs={ - 'port': port, - 'host': 'localhost' - })) + return loop.run_until_complete( + aiohttp_client(app, server_kwargs={"port": port, "host": "localhost"}) + ) @pytest.mark.skip(reason="Under dev") @@ -43,7 +42,6 @@ def test_users_projects_db(client): pass - @pytest.mark.skip(reason="Under dev") def test_cleanup_expired_guest_users(client): pass diff --git a/services/web/server/tests/unit/with_dbs/test_login.py b/services/web/server/tests/unit/with_dbs/test_login.py index 128d58dfa03..8a5c3e96c7b 100644 --- a/services/web/server/tests/unit/with_dbs/test_login.py +++ b/services/web/server/tests/unit/with_dbs/test_login.py @@ -11,15 +11,14 @@ from simcore_service_webserver.login.cfg import cfg from utils_login import NewUser -EMAIL, PASSWORD = 'tester@test.com', 'password' +EMAIL, PASSWORD = "tester@test.com", "password" async def test_login_with_unknown_email(client): - url = client.app.router['auth_login'].url_for() - r = await client.post(url, json={ - 'email': 'unknown@email.com', - 'password': 'wrong.' - }) + url = client.app.router["auth_login"].url_for() + r = await client.post( + url, json={"email": "unknown@email.com", "password": "wrong."} + ) payload = await r.json() assert r.status == web.HTTPUnauthorized.status_code, str(payload) @@ -28,17 +27,14 @@ async def test_login_with_unknown_email(client): async def test_login_with_wrong_password(client): - url = client.app.router['auth_login'].url_for() + url = client.app.router["auth_login"].url_for() r = await client.get(url) payload = await r.json() assert cfg.MSG_WRONG_PASSWORD not in await r.text(), str(payload) async with NewUser() as user: - r = await client.post(url, json={ - 'email': user['email'], - 'password': 'wrong.', - }) + r = await client.post(url, json={"email": user["email"], "password": "wrong.",}) payload = await r.json() assert r.status == web.HTTPUnauthorized.status_code, str(payload) assert r.url_obj.path == url.path @@ -46,48 +42,45 @@ async def test_login_with_wrong_password(client): async def test_login_banned_user(client): - url = client.app.router['auth_login'].url_for() + url = client.app.router["auth_login"].url_for() r = await client.get(url) assert cfg.MSG_USER_BANNED not in await r.text() - async with NewUser({'status': UserStatus.BANNED.name}) as user: - r = await client.post(url, json={ - 'email': user['email'], - 'password': user['raw_password'] - }) + async with NewUser({"status": UserStatus.BANNED.name}) as user: + r = await client.post( + url, json={"email": user["email"], "password": user["raw_password"]} + ) payload = await r.json() assert r.status == web.HTTPUnauthorized.status_code, str(payload) assert r.url_obj.path == url.path - assert cfg.MSG_USER_BANNED in payload['error']['errors'][0]['message'] + assert cfg.MSG_USER_BANNED in payload["error"]["errors"][0]["message"] async def test_login_inactive_user(client): - url = client.app.router['auth_login'].url_for() + url = client.app.router["auth_login"].url_for() r = await client.get(url) assert cfg.MSG_ACTIVATION_REQUIRED not in await r.text() - async with NewUser({'status': UserStatus.CONFIRMATION_PENDING.name}) as user: - r = await client.post(url, json={ - 'email': user['email'], - 'password': user['raw_password'] - }) + async with NewUser({"status": UserStatus.CONFIRMATION_PENDING.name}) as user: + r = await client.post( + url, json={"email": user["email"], "password": user["raw_password"]} + ) assert r.status == web.HTTPUnauthorized.status_code assert r.url_obj.path == url.path assert cfg.MSG_ACTIVATION_REQUIRED in await r.text() async def test_login_successfully(client): - url = client.app.router['auth_login'].url_for() + url = client.app.router["auth_login"].url_for() async with NewUser() as user: - r = await client.post(url, json={ - 'email': user['email'], - 'password': user['raw_password'] - }) + r = await client.post( + url, json={"email": user["email"], "password": user["raw_password"]} + ) assert r.status == 200 data, error = unwrap_envelope(await r.json()) assert not error assert data - assert cfg.MSG_LOGGED_IN in data['message'] + assert cfg.MSG_LOGGED_IN in data["message"] diff --git a/services/web/server/tests/unit/with_dbs/test_logout.py b/services/web/server/tests/unit/with_dbs/test_logout.py index 98f908d936d..e8ed4e945bf 100644 --- a/services/web/server/tests/unit/with_dbs/test_logout.py +++ b/services/web/server/tests/unit/with_dbs/test_logout.py @@ -8,13 +8,13 @@ async def test_logout(client): db = get_storage(client.app) - logout_url = client.app.router['auth_logout'].url_for() - protected_url = client.app.router['auth_change_email'].url_for() + logout_url = client.app.router["auth_logout"].url_for() + protected_url = client.app.router["auth_change_email"].url_for() async with LoggedUser(client) as user: # try to access protected page - r = await client.post(protected_url, json={'email': user['email']}) + r = await client.post(protected_url, json={"email": user["email"]}) assert r.url_obj.path == protected_url.path await assert_status(r, web.HTTPOk) @@ -28,10 +28,10 @@ async def test_logout(client): assert r.url_obj.path == protected_url.path await assert_status(r, web.HTTPUnauthorized) - await db.delete_user(user) -if __name__ == '__main__': +if __name__ == "__main__": import pytest - pytest.main([__file__, '--maxfail=1']) + + pytest.main([__file__, "--maxfail=1"]) diff --git a/services/web/server/tests/unit/with_dbs/test_projects.py b/services/web/server/tests/unit/with_dbs/test_projects.py index 0b17f22d06c..f482783f7e1 100644 --- a/services/web/server/tests/unit/with_dbs/test_projects.py +++ b/services/web/server/tests/unit/with_dbs/test_projects.py @@ -37,22 +37,26 @@ from utils_projects import NewProject, delete_all_projects API_VERSION = "v0" -RESOURCE_NAME = 'projects' +RESOURCE_NAME = "projects" API_PREFIX = "/" + API_VERSION @pytest.fixture def client(loop, aiohttp_client, app_cfg, postgres_service): -#def client(loop, aiohttp_client, app_cfg): # <<<< FOR DEVELOPMENT. DO NOT REMOVE. + # def client(loop, aiohttp_client, app_cfg): # <<<< FOR DEVELOPMENT. DO NOT REMOVE. # config app cfg = deepcopy(app_cfg) port = cfg["main"]["port"] - cfg["db"]["init_tables"] = True # inits tables of postgres_service upon startup + cfg["db"]["init_tables"] = True # inits tables of postgres_service upon startup cfg["projects"]["enabled"] = True cfg["director"]["enabled"] = True - cfg["resource_manager"]["garbage_collection_interval_seconds"] = 3 # increase speed of garbage collection - cfg["resource_manager"]["resource_deletion_timeout_seconds"] = 3 # reduce deletion delay + cfg["resource_manager"][ + "garbage_collection_interval_seconds" + ] = 3 # increase speed of garbage collection + cfg["resource_manager"][ + "resource_deletion_timeout_seconds" + ] = 3 # reduce deletion delay app = create_safe_application(cfg) # setup app @@ -60,7 +64,7 @@ def client(loop, aiohttp_client, app_cfg, postgres_service): setup_session(app) setup_security(app) setup_rest(app) - setup_login(app) # needed for login_utils fixtures + setup_login(app) # needed for login_utils fixtures setup_resource_manager(app) setup_sockets(app) setup_director(app) @@ -68,13 +72,13 @@ def client(loop, aiohttp_client, app_cfg, postgres_service): assert setup_projects(app) # server and client - yield loop.run_until_complete(aiohttp_client(app, server_kwargs={ - 'port': port, - 'host': 'localhost' - })) + yield loop.run_until_complete( + aiohttp_client(app, server_kwargs={"port": port, "host": "localhost"}) + ) # teardown here ... + @pytest.fixture() async def logged_user(client, user_role: UserRole): """ adds a user in db and logs in with client @@ -84,23 +88,23 @@ async def logged_user(client, user_role: UserRole): async with LoggedUser( client, {"role": user_role.name}, - check_if_succeeds = user_role!=UserRole.ANONYMOUS + check_if_succeeds=user_role != UserRole.ANONYMOUS, ) as user: print("-----> logged in user", user_role) yield user print("<----- logged out user", user_role) + @pytest.fixture async def user_project(client, fake_project, logged_user): async with NewProject( - fake_project, - client.app, - user_id=logged_user["id"] + fake_project, client.app, user_id=logged_user["id"] ) as project: print("-----> added project", project["name"]) yield project print("<----- removed project", project["name"]) + @pytest.fixture async def template_project(client, fake_project): project_data = deepcopy(fake_project) @@ -108,21 +112,23 @@ async def template_project(client, fake_project): project_data["uuid"] = "d4d0eca3-d210-4db6-84f9-63670b07176b" async with NewProject( - project_data, - client.app, - user_id=None, - clear_all=True + project_data, client.app, user_id=None, clear_all=True ) as template_project: print("-----> added template project", template_project["name"]) yield template_project print("<----- removed template project", template_project["name"]) + @pytest.fixture def computational_system_mock(mocker): - mock_fun = mocker.patch('simcore_service_webserver.projects.projects_handlers.update_pipeline_db', return_value=Future()) + mock_fun = mocker.patch( + "simcore_service_webserver.projects.projects_handlers.update_pipeline_db", + return_value=Future(), + ) mock_fun.return_value.set_result("") return mock_fun + @pytest.fixture def fake_services(): def create_fakes(number_services: int) -> List[Dict]: @@ -134,9 +140,11 @@ def create_fakes(number_services: int) -> List[Dict]: def assert_replaced(current_project, update_data): def _extract(dikt, keys): - return {k:dikt[k] for k in keys} + return {k: dikt[k] for k in keys} - modified = ["lastChangeDate", ] + modified = [ + "lastChangeDate", + ] keep = [k for k in update_data.keys() if k not in modified] assert _extract(current_project, keep) == _extract(update_data, keep) @@ -145,17 +153,20 @@ def _extract(dikt, keys): assert to_datetime(update_data[k]) < to_datetime(current_project[k]) - - # GET -------- -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_list_projects(client, logged_user, user_project, template_project, expected): - #TODO: GET /v0/projects?start=0&count=3 +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_list_projects( + client, logged_user, user_project, template_project, expected +): + # TODO: GET /v0/projects?start=0&count=3 # GET /v0/projects url = client.app.router["list_projects"].url_for() @@ -169,30 +180,34 @@ async def test_list_projects(client, logged_user, user_project, template_project assert data[0] == template_project assert data[1] == user_project - #GET /v0/projects?type=user - resp = await client.get(url.with_query(type='user')) + # GET /v0/projects?type=user + resp = await client.get(url.with_query(type="user")) data, errors = await assert_status(resp, expected) if not errors: assert len(data) == 1 assert data[0] == user_project - #GET /v0/projects?type=template + # GET /v0/projects?type=template # instead /v0/projects/templates ?? - resp = await client.get(url.with_query(type='template')) + resp = await client.get(url.with_query(type="template")) data, errors = await assert_status(resp, expected) if not errors: assert len(data) == 1 assert data[0] == template_project - -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_get_project(client, logged_user, user_project, template_project, expected): +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_get_project( + client, logged_user, user_project, template_project, expected +): # GET /v0/projects/{project_id} # with a project owned by user @@ -215,14 +230,18 @@ async def test_get_project(client, logged_user, user_project, template_project, # POST -------- -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPCreated), - (UserRole.TESTER, web.HTTPCreated), -]) -async def test_new_project(client, logged_user, expected, - computational_system_mock, storage_subsystem_mock): +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPCreated), + (UserRole.TESTER, web.HTTPCreated), + ], +) +async def test_new_project( + client, logged_user, expected, computational_system_mock, storage_subsystem_mock +): # POST /v0/projects url = client.app.router["create_projects"].url_for() assert str(url) == API_PREFIX + "/projects" @@ -236,7 +255,7 @@ async def test_new_project(client, logged_user, expected, "creationDate": now_str(), "lastChangeDate": now_str(), "thumbnail": "", - "workbench": {} + "workbench": {}, } resp = await client.post(url, json=default_project) @@ -249,11 +268,13 @@ async def test_new_project(client, logged_user, expected, # updated fields assert default_project["uuid"] != new_project["uuid"] assert default_project["prjOwner"] != logged_user["name"] - assert to_datetime(default_project["creationDate"]) < to_datetime(new_project["creationDate"]) + assert to_datetime(default_project["creationDate"]) < to_datetime( + new_project["creationDate"] + ) # invariant fields for key in new_project.keys(): - if key not in ('uuid', 'prjOwner', 'creationDate', 'lastChangeDate'): + if key not in ("uuid", "prjOwner", "creationDate", "lastChangeDate"): assert default_project[key] == new_project[key] # TODO: validate response using OAS? @@ -264,16 +285,30 @@ async def test_new_project(client, logged_user, expected, # violates foreign key constraint "user_to_projects_user_id_fkey" on table "user_to_projects" await delete_all_projects(client.app) -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPCreated), - (UserRole.TESTER, web.HTTPCreated), -]) -async def test_new_project_from_template(client, logged_user, template_project, expected, - computational_system_mock, storage_subsystem_mock): + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPCreated), + (UserRole.TESTER, web.HTTPCreated), + ], +) +async def test_new_project_from_template( + client, + logged_user, + template_project, + expected, + computational_system_mock, + storage_subsystem_mock, +): # POST /v0/projects?from_template={template_uuid} - url = client.app.router["create_projects"].url_for().with_query(from_template=template_project["uuid"]) + url = ( + client.app.router["create_projects"] + .url_for() + .with_query(from_template=template_project["uuid"]) + ) resp = await client.post(url) @@ -288,8 +323,12 @@ async def test_new_project_from_template(client, logged_user, template_project, assert project["prjOwner"] != template_project["prjOwner"] # different timestamps - assert to_datetime(template_project["creationDate"]) < to_datetime(project["creationDate"]) - assert to_datetime(template_project["lastChangeDate"]) < to_datetime(project["lastChangeDate"]) + assert to_datetime(template_project["creationDate"]) < to_datetime( + project["creationDate"] + ) + assert to_datetime(template_project["lastChangeDate"]) < to_datetime( + project["lastChangeDate"] + ) # different uuids for project and nodes!? assert project["uuid"] != template_project["uuid"] @@ -301,26 +340,40 @@ async def test_new_project_from_template(client, logged_user, template_project, except ValueError: pytest.fail("Invalid uuid in workbench node {}".format(node_name)) -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPCreated), - (UserRole.TESTER, web.HTTPCreated), -]) -async def test_new_project_from_template_with_body(client, logged_user, template_project, expected, - computational_system_mock, storage_subsystem_mock): + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPCreated), + (UserRole.TESTER, web.HTTPCreated), + ], +) +async def test_new_project_from_template_with_body( + client, + logged_user, + template_project, + expected, + computational_system_mock, + storage_subsystem_mock, +): # POST /v0/projects?from_template={template_uuid} - url = client.app.router["create_projects"].url_for().with_query(from_template=template_project["uuid"]) + url = ( + client.app.router["create_projects"] + .url_for() + .with_query(from_template=template_project["uuid"]) + ) predefined = { - "uuid":"", - "name":"Sleepers8", - "description":"Some lines from user", - "thumbnail":"", - "prjOwner":"", - "creationDate":"2019-06-03T09:59:31.987Z", - "lastChangeDate":"2019-06-03T09:59:31.987Z", - "workbench":{} + "uuid": "", + "name": "Sleepers8", + "description": "Some lines from user", + "thumbnail": "", + "prjOwner": "", + "creationDate": "2019-06-03T09:59:31.987Z", + "lastChangeDate": "2019-06-03T09:59:31.987Z", + "workbench": {}, } resp = await client.post(url, json=predefined) @@ -334,7 +387,6 @@ async def test_new_project_from_template_with_body(client, logged_user, template assert project["name"] == predefined["name"] assert project["description"] == predefined["description"] - modified = ["prjOwner", "creationDate", "lastChangeDate", "uuid"] # different ownership @@ -342,8 +394,12 @@ async def test_new_project_from_template_with_body(client, logged_user, template assert project["prjOwner"] != template_project["prjOwner"] # different timestamps - assert to_datetime(template_project["creationDate"]) < to_datetime(project["creationDate"]) - assert to_datetime(template_project["lastChangeDate"]) < to_datetime(project["lastChangeDate"]) + assert to_datetime(template_project["creationDate"]) < to_datetime( + project["creationDate"] + ) + assert to_datetime(template_project["lastChangeDate"]) < to_datetime( + project["lastChangeDate"] + ) # different uuids for project and nodes!? assert project["uuid"] != template_project["uuid"] @@ -356,17 +412,29 @@ async def test_new_project_from_template_with_body(client, logged_user, template pytest.fail("Invalid uuid in workbench node {}".format(node_name)) -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPForbidden), - (UserRole.TESTER, web.HTTPCreated), -]) -async def test_new_template_from_project(client, logged_user, user_project, expected, - computational_system_mock, storage_subsystem_mock): +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPForbidden), + (UserRole.TESTER, web.HTTPCreated), + ], +) +async def test_new_template_from_project( + client, + logged_user, + user_project, + expected, + computational_system_mock, + storage_subsystem_mock, +): # POST /v0/projects?as_template={user_uuid} - url = client.app.router["create_projects"].url_for().\ - with_query(as_template=user_project["uuid"]) + url = ( + client.app.router["create_projects"] + .url_for() + .with_query(as_template=user_project["uuid"]) + ) resp = await client.post(url) data, error = await assert_status(resp, expected) @@ -389,15 +457,19 @@ async def test_new_template_from_project(client, logged_user, user_project, expe # TODO: check in detail all fields in a node - # PUT -------- -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_replace_project(client, logged_user, user_project, expected, computational_system_mock): +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_replace_project( + client, logged_user, user_project, expected, computational_system_mock +): # PUT /v0/projects/{project_id} url = client.app.router["replace_project"].url_for(project_id=user_project["uuid"]) @@ -410,19 +482,25 @@ async def test_replace_project(client, logged_user, user_project, expected, comp if not error: assert_replaced(current_project=data, update_data=project_update) -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_replace_project_updated_inputs(client, logged_user, user_project, expected, computational_system_mock): + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_replace_project_updated_inputs( + client, logged_user, user_project, expected, computational_system_mock +): # PUT /v0/projects/{project_id} url = client.app.router["replace_project"].url_for(project_id=user_project["uuid"]) project_update = deepcopy(user_project) # - #"inputAccess": { + # "inputAccess": { # "Na": "ReadAndWrite", <-------- # "Kr": "ReadOnly", # "BCL": "ReadAndWrite", @@ -430,7 +508,9 @@ async def test_replace_project_updated_inputs(client, logged_user, user_project, # "Ligand": "Invisible", # "cAMKII": "Invisible" # }, - project_update["workbench"]["5739e377-17f7-4f09-a6ad-62659fb7fdec"]["inputs"]["Na"] = 55 + project_update["workbench"]["5739e377-17f7-4f09-a6ad-62659fb7fdec"]["inputs"][ + "Na" + ] = 55 resp = await client.put(url, json=project_update) data, error = await assert_status(resp, expected) @@ -438,19 +518,29 @@ async def test_replace_project_updated_inputs(client, logged_user, user_project, if not error: assert_replaced(current_project=data, update_data=project_update) -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_replace_project_updated_readonly_inputs(client, logged_user, user_project, expected, computational_system_mock): + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_replace_project_updated_readonly_inputs( + client, logged_user, user_project, expected, computational_system_mock +): # PUT /v0/projects/{project_id} url = client.app.router["replace_project"].url_for(project_id=user_project["uuid"]) project_update = deepcopy(user_project) - project_update["workbench"]["5739e377-17f7-4f09-a6ad-62659fb7fdec"]["inputs"]["Na"] = 55 - project_update["workbench"]["5739e377-17f7-4f09-a6ad-62659fb7fdec"]["inputs"]["Kr"] = 5 + project_update["workbench"]["5739e377-17f7-4f09-a6ad-62659fb7fdec"]["inputs"][ + "Na" + ] = 55 + project_update["workbench"]["5739e377-17f7-4f09-a6ad-62659fb7fdec"]["inputs"][ + "Kr" + ] = 5 resp = await client.put(url, json=project_update) data, error = await assert_status(resp, expected) @@ -461,20 +551,38 @@ async def test_replace_project_updated_readonly_inputs(client, logged_user, user # DELETE ------- -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPNoContent), - (UserRole.TESTER, web.HTTPNoContent), -]) -async def test_delete_project(client, logged_user, user_project, expected, storage_subsystem_mock, mocker, fake_services): + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPNoContent), + (UserRole.TESTER, web.HTTPNoContent), + ], +) +async def test_delete_project( + client, + logged_user, + user_project, + expected, + storage_subsystem_mock, + mocker, + fake_services, +): # DELETE /v0/projects/{project_id} fakes = fake_services(5) - mock_director_api = mocker.patch('simcore_service_webserver.director.director_api.get_running_interactive_services', return_value=Future()) + mock_director_api = mocker.patch( + "simcore_service_webserver.director.director_api.get_running_interactive_services", + return_value=Future(), + ) mock_director_api.return_value.set_result(fakes) - mock_director_api_stop_services = mocker.patch('simcore_service_webserver.director.director_api.stop_service', return_value=Future()) + mock_director_api_stop_services = mocker.patch( + "simcore_service_webserver.director.director_api.stop_service", + return_value=Future(), + ) mock_director_api_stop_services.return_value.set_result("") url = client.app.router["delete_project"].url_for(project_id=user_project["uuid"]) @@ -493,48 +601,94 @@ async def test_delete_project(client, logged_user, user_project, expected, stora resp = await client.get(url) data, error = await assert_status(resp, web.HTTPNotFound) -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_open_project(client, logged_user, user_project, client_session_id, expected, mocker): + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_open_project( + client, logged_user, user_project, client_session_id, expected, mocker +): # POST /v0/projects/{project_id}:open # open project - mock_director_api = mocker.patch('simcore_service_webserver.director.director_api.get_running_interactive_services', return_value=Future()) + mock_director_api = mocker.patch( + "simcore_service_webserver.director.director_api.get_running_interactive_services", + return_value=Future(), + ) mock_director_api.return_value.set_result("") - mock_director_api_start_service = mocker.patch('simcore_service_webserver.director.director_api.start_service', return_value=Future()) + mock_director_api_start_service = mocker.patch( + "simcore_service_webserver.director.director_api.start_service", + return_value=Future(), + ) mock_director_api_start_service.return_value.set_result("") url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) resp = await client.post(url, json=client_session_id()) await assert_status(resp, expected) if resp.status == web.HTTPOk.status_code: - dynamic_services = {service_uuid:service for service_uuid, service in user_project["workbench"].items() if "/dynamic/" in service["key"]} + dynamic_services = { + service_uuid: service + for service_uuid, service in user_project["workbench"].items() + if "/dynamic/" in service["key"] + } calls = [] for service_uuid, service in dynamic_services.items(): - calls.append(call(client.server.app, project_id=user_project["uuid"], service_key=service["key"], service_uuid=service_uuid, service_version=service["version"], user_id=logged_user["id"])) + calls.append( + call( + client.server.app, + project_id=user_project["uuid"], + service_key=service["key"], + service_uuid=service_uuid, + service_version=service["version"], + user_id=logged_user["id"], + ) + ) mock_director_api_start_service.assert_has_calls(calls) -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPNoContent), - (UserRole.TESTER, web.HTTPNoContent), -]) -async def test_close_project(client, logged_user, user_project, client_session_id, expected, mocker, fake_services): + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPNoContent), + (UserRole.TESTER, web.HTTPNoContent), + ], +) +async def test_close_project( + client, + logged_user, + user_project, + client_session_id, + expected, + mocker, + fake_services, +): # POST /v0/projects/{project_id}:close fakes = fake_services(5) assert len(fakes) == 5 - mock_director_api = mocker.patch('simcore_service_webserver.director.director_api.get_running_interactive_services', return_value=Future()) + mock_director_api = mocker.patch( + "simcore_service_webserver.director.director_api.get_running_interactive_services", + return_value=Future(), + ) mock_director_api.return_value.set_result(fakes) - mock_director_api_start_service = mocker.patch('simcore_service_webserver.director.director_api.start_service', return_value=Future()) + mock_director_api_start_service = mocker.patch( + "simcore_service_webserver.director.director_api.start_service", + return_value=Future(), + ) mock_director_api_start_service.return_value.set_result("") - mock_director_api_stop_services = mocker.patch('simcore_service_webserver.director.director_api.stop_service', return_value=Future()) + mock_director_api_stop_services = mocker.patch( + "simcore_service_webserver.director.director_api.stop_service", + return_value=Future(), + ) mock_director_api_stop_services.return_value.set_result("") # open project client_id = client_session_id() @@ -556,27 +710,52 @@ async def test_close_project(client, logged_user, user_project, client_session_i else: mock_director_api.assert_not_called() -@pytest.mark.parametrize("user_role, expected", [ - # (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_get_active_project(client, logged_user, user_project, client_session_id, expected, socketio_client, mocker): - mock_director_api = mocker.patch('simcore_service_webserver.director.director_api.get_running_interactive_services', return_value=Future()) + +@pytest.mark.parametrize( + "user_role, expected", + [ + # (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_get_active_project( + client, + logged_user, + user_project, + client_session_id, + expected, + socketio_client, + mocker, +): + mock_director_api = mocker.patch( + "simcore_service_webserver.director.director_api.get_running_interactive_services", + return_value=Future(), + ) mock_director_api.return_value.set_result("") - mock_director_api_start_service = mocker.patch('simcore_service_webserver.director.director_api.start_service', return_value=Future()) + mock_director_api_start_service = mocker.patch( + "simcore_service_webserver.director.director_api.start_service", + return_value=Future(), + ) mock_director_api_start_service.return_value.set_result("") - mock_director_api_stop_services = mocker.patch('simcore_service_webserver.director.director_api.stop_service', return_value=Future()) + mock_director_api_stop_services = mocker.patch( + "simcore_service_webserver.director.director_api.stop_service", + return_value=Future(), + ) mock_director_api_stop_services.return_value.set_result("") # login with socket using client session id client_id1 = client_session_id() sio = await socketio_client(client_id1) assert sio.sid # get active projects -> empty - get_active_projects_url = client.app.router["get_active_project"].url_for().with_query(client_session_id=client_id1) + get_active_projects_url = ( + client.app.router["get_active_project"] + .url_for() + .with_query(client_session_id=client_id1) + ) resp = await client.get(get_active_projects_url) data, error = await assert_status(resp, expected) if resp.status == web.HTTPOk.status_code: @@ -584,7 +763,9 @@ async def test_get_active_project(client, logged_user, user_project, client_sess assert not error # open project - open_project_url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) + open_project_url = client.app.router["open_project"].url_for( + project_id=user_project["uuid"] + ) resp = await client.post(open_project_url, json=client_id1) data, error = await assert_status(resp, expected) resp = await client.get(get_active_projects_url) @@ -598,7 +779,11 @@ async def test_get_active_project(client, logged_user, user_project, client_sess sio = await socketio_client(client_id2) assert sio.sid # get active projects -> empty - get_active_projects_url = client.app.router["get_active_project"].url_for().with_query(client_session_id=client_id2) + get_active_projects_url = ( + client.app.router["get_active_project"] + .url_for() + .with_query(client_session_id=client_id2) + ) resp = await client.get(get_active_projects_url) data, error = await assert_status(resp, expected) if resp.status == web.HTTPOk.status_code: @@ -606,20 +791,43 @@ async def test_get_active_project(client, logged_user, user_project, client_sess assert not error -@pytest.mark.parametrize("user_role, expected", [ - # (UserRole.ANONYMOUS), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPForbidden), - (UserRole.TESTER, web.HTTPForbidden), -]) -async def test_delete_shared_project_forbidden(loop, client, logged_user, user_project, mocked_director_api, mocked_dynamic_service, socketio_client, client_session_id, expected, mocker): - mock_director_api = mocker.patch('simcore_service_webserver.director.director_api.get_running_interactive_services', return_value=Future()) +@pytest.mark.parametrize( + "user_role, expected", + [ + # (UserRole.ANONYMOUS), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPForbidden), + (UserRole.TESTER, web.HTTPForbidden), + ], +) +async def test_delete_shared_project_forbidden( + loop, + client, + logged_user, + user_project, + mocked_director_api, + mocked_dynamic_service, + socketio_client, + client_session_id, + expected, + mocker, +): + mock_director_api = mocker.patch( + "simcore_service_webserver.director.director_api.get_running_interactive_services", + return_value=Future(), + ) mock_director_api.return_value.set_result("") - mock_director_api_start_service = mocker.patch('simcore_service_webserver.director.director_api.start_service', return_value=Future()) + mock_director_api_start_service = mocker.patch( + "simcore_service_webserver.director.director_api.start_service", + return_value=Future(), + ) mock_director_api_start_service.return_value.set_result("") - mock_director_api_stop_services = mocker.patch('simcore_service_webserver.director.director_api.stop_service', return_value=Future()) + mock_director_api_stop_services = mocker.patch( + "simcore_service_webserver.director.director_api.stop_service", + return_value=Future(), + ) mock_director_api_stop_services.return_value.set_result("") # service in project = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) @@ -637,28 +845,56 @@ async def test_delete_shared_project_forbidden(loop, client, logged_user, user_p resp = await client.delete(url) await assert_status(resp, expected) -@pytest.mark.parametrize("user_role, create_exp, get_exp, deletion_exp", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized, web.HTTPUnauthorized, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden, web.HTTPOk, web.HTTPForbidden), - (UserRole.USER, web.HTTPCreated, web.HTTPOk, web.HTTPNoContent), - (UserRole.TESTER, web.HTTPCreated, web.HTTPOk, web.HTTPNoContent), -]) -async def test_project_node_lifetime(loop, client, logged_user, user_project, create_exp, get_exp, deletion_exp, mocker, storage_subsystem_mock): - mock_director_api_get_running_services = mocker.patch('simcore_service_webserver.director.director_api.get_running_interactive_services', return_value=Future()) - mock_director_api_start_service = mocker.patch('simcore_service_webserver.director.director_api.start_service', return_value=Future()) +@pytest.mark.parametrize( + "user_role, create_exp, get_exp, deletion_exp", + [ + ( + UserRole.ANONYMOUS, + web.HTTPUnauthorized, + web.HTTPUnauthorized, + web.HTTPUnauthorized, + ), + (UserRole.GUEST, web.HTTPForbidden, web.HTTPOk, web.HTTPForbidden), + (UserRole.USER, web.HTTPCreated, web.HTTPOk, web.HTTPNoContent), + (UserRole.TESTER, web.HTTPCreated, web.HTTPOk, web.HTTPNoContent), + ], +) +async def test_project_node_lifetime( + loop, + client, + logged_user, + user_project, + create_exp, + get_exp, + deletion_exp, + mocker, + storage_subsystem_mock, +): + mock_director_api_get_running_services = mocker.patch( + "simcore_service_webserver.director.director_api.get_running_interactive_services", + return_value=Future(), + ) + + mock_director_api_start_service = mocker.patch( + "simcore_service_webserver.director.director_api.start_service", + return_value=Future(), + ) mock_director_api_start_service.return_value.set_result("") - mock_director_api_stop_services = mocker.patch('simcore_service_webserver.director.director_api.stop_service', return_value=Future()) + mock_director_api_stop_services = mocker.patch( + "simcore_service_webserver.director.director_api.stop_service", + return_value=Future(), + ) mock_director_api_stop_services.return_value.set_result("") - mock_storage_api_delete_data_folders_of_project_node = mocker.patch('simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project_node', return_value=Future()) + mock_storage_api_delete_data_folders_of_project_node = mocker.patch( + "simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project_node", + return_value=Future(), + ) mock_storage_api_delete_data_folders_of_project_node.return_value.set_result("") # create a new dynamic node... url = client.app.router["create_node"].url_for(project_id=user_project["uuid"]) - body = { - "service_key": "some/dynamic/key", - "service_version": "1.3.4" - } + body = {"service_key": "some/dynamic/key", "service_version": "1.3.4"} resp = await client.post(url, json=body) data, errors = await assert_status(resp, create_exp) node_id = "wrong_node_id" @@ -671,10 +907,7 @@ async def test_project_node_lifetime(loop, client, logged_user, user_project, cr # create a new NOT dynamic node... mock_director_api_start_service.reset_mock() url = client.app.router["create_node"].url_for(project_id=user_project["uuid"]) - body = { - "service_key": "some/notdynamic/key", - "service_version": "1.3.4" - } + body = {"service_key": "some/notdynamic/key", "service_version": "1.3.4"} resp = await client.post(url, json=body) data, errors = await assert_status(resp, create_exp) node_id_2 = "wrong_node_id" @@ -686,8 +919,12 @@ async def test_project_node_lifetime(loop, client, logged_user, user_project, cr mock_director_api_start_service.assert_not_called() # get the node state - mock_director_api_get_running_services.return_value.set_result([{"service_uuid": node_id, "service_state": "running"}]) - url = client.app.router["get_node"].url_for(project_id=user_project["uuid"], node_id=node_id) + mock_director_api_get_running_services.return_value.set_result( + [{"service_uuid": node_id, "service_state": "running"}] + ) + url = client.app.router["get_node"].url_for( + project_id=user_project["uuid"], node_id=node_id + ) resp = await client.get(url) data, errors = await assert_status(resp, get_exp) if resp.status == web.HTTPOk.status_code: @@ -697,7 +934,9 @@ async def test_project_node_lifetime(loop, client, logged_user, user_project, cr # get the NOT dynamic node state mock_director_api_get_running_services.return_value = Future() mock_director_api_get_running_services.return_value.set_result("") - url = client.app.router["get_node"].url_for(project_id=user_project["uuid"], node_id=node_id_2) + url = client.app.router["get_node"].url_for( + project_id=user_project["uuid"], node_id=node_id_2 + ) resp = await client.get(url) data, errors = await assert_status(resp, get_exp) if resp.status == web.HTTPOk.status_code: @@ -706,8 +945,12 @@ async def test_project_node_lifetime(loop, client, logged_user, user_project, cr # delete the node mock_director_api_get_running_services.return_value = Future() - mock_director_api_get_running_services.return_value.set_result([{"service_uuid": node_id}]) - url = client.app.router["delete_node"].url_for(project_id=user_project["uuid"], node_id=node_id) + mock_director_api_get_running_services.return_value.set_result( + [{"service_uuid": node_id}] + ) + url = client.app.router["delete_node"].url_for( + project_id=user_project["uuid"], node_id=node_id + ) resp = await client.delete(url) data, errors = await assert_status(resp, deletion_exp) if resp.status == web.HTTPNoContent.status_code: @@ -721,7 +964,9 @@ async def test_project_node_lifetime(loop, client, logged_user, user_project, cr mock_director_api_stop_services.reset_mock() mock_storage_api_delete_data_folders_of_project_node.reset_mock() # mock_director_api_get_running_services.return_value.set_result([{"service_uuid": node_id}]) - url = client.app.router["delete_node"].url_for(project_id=user_project["uuid"], node_id=node_id_2) + url = client.app.router["delete_node"].url_for( + project_id=user_project["uuid"], node_id=node_id_2 + ) resp = await client.delete(url) data, errors = await assert_status(resp, deletion_exp) if resp.status == web.HTTPNoContent.status_code: @@ -732,10 +977,10 @@ async def test_project_node_lifetime(loop, client, logged_user, user_project, cr mock_storage_api_delete_data_folders_of_project_node.assert_not_called() -@pytest.mark.parametrize("user_role,expected", [ - (UserRole.USER, web.HTTPOk) -]) -async def test_tags_to_studies(client, logged_user, user_project, expected, test_tags_data): +@pytest.mark.parametrize("user_role,expected", [(UserRole.USER, web.HTTPOk)]) +async def test_tags_to_studies( + client, logged_user, user_project, expected, test_tags_data +): # Add test tags tags = test_tags_data added_tags = [] @@ -745,7 +990,9 @@ async def test_tags_to_studies(client, logged_user, user_project, expected, test added_tag, _ = await assert_status(resp, expected) added_tags.append(added_tag) # Add tag to study - url = client.app.router["add_tag"].url_for(study_uuid=user_project.get("uuid"), tag_id=str(added_tag.get("id"))) + url = client.app.router["add_tag"].url_for( + study_uuid=user_project.get("uuid"), tag_id=str(added_tag.get("id")) + ) resp = await client.put(url) data, _ = await assert_status(resp, expected) # Tag is included in response @@ -756,17 +1003,23 @@ async def test_tags_to_studies(client, logged_user, user_project, expected, test resp = await client.delete(url) await assert_status(resp, web.HTTPNoContent) # Get project and check that tag is no longer there - url = client.app.router["get_project"].url_for(project_id=str(user_project.get("uuid"))) + url = client.app.router["get_project"].url_for( + project_id=str(user_project.get("uuid")) + ) resp = await client.get(url) data, _ = await assert_status(resp, expected) assert added_tags[0].get("id") not in data.get("tags") - #Remove tag1 from project - url = client.app.router["remove_tag"].url_for(study_uuid=user_project.get("uuid"), tag_id=str(added_tags[1].get("id"))) + # Remove tag1 from project + url = client.app.router["remove_tag"].url_for( + study_uuid=user_project.get("uuid"), tag_id=str(added_tags[1].get("id")) + ) resp = await client.delete(url) await assert_status(resp, expected) # Get project and check that tag is no longer there - url = client.app.router["get_project"].url_for(project_id=str(user_project.get("uuid"))) + url = client.app.router["get_project"].url_for( + project_id=str(user_project.get("uuid")) + ) resp = await client.get(url) data, _ = await assert_status(resp, expected) assert added_tags[1].get("id") not in data.get("tags") diff --git a/services/web/server/tests/unit/with_dbs/test_redis.py b/services/web/server/tests/unit/with_dbs/test_redis.py index ad5d801b7dd..90833683ae4 100644 --- a/services/web/server/tests/unit/with_dbs/test_redis.py +++ b/services/web/server/tests/unit/with_dbs/test_redis.py @@ -4,8 +4,8 @@ import aioredis -async def test_aioredis(loop, redis_client): - await redis_client.set('my-key', 'value') - val = await redis_client.get('my-key') - assert val == 'value' +async def test_aioredis(loop, redis_client): + await redis_client.set("my-key", "value") + val = await redis_client.get("my-key") + assert val == "value" diff --git a/services/web/server/tests/unit/with_dbs/test_redis_registry.py b/services/web/server/tests/unit/with_dbs/test_redis_registry.py index 07da37160fb..0eaa7320901 100644 --- a/services/web/server/tests/unit/with_dbs/test_redis_registry.py +++ b/services/web/server/tests/unit/with_dbs/test_redis_registry.py @@ -9,12 +9,19 @@ import pytest from simcore_service_webserver.resource_manager.config import ( - APP_CLIENT_REDIS_CLIENT_KEY, APP_CLIENT_SOCKET_REGISTRY_KEY, - APP_CONFIG_KEY, CONFIG_SECTION_NAME) + APP_CLIENT_REDIS_CLIENT_KEY, + APP_CLIENT_SOCKET_REGISTRY_KEY, + APP_CONFIG_KEY, + CONFIG_SECTION_NAME, +) from simcore_service_webserver.resource_manager.registry import ( - ALIVE_SUFFIX, RESOURCE_SUFFIX, RedisResourceRegistry) -from simcore_service_webserver.resource_manager.websocket_manager import \ - managed_resource + ALIVE_SUFFIX, + RESOURCE_SUFFIX, + RedisResourceRegistry, +) +from simcore_service_webserver.resource_manager.websocket_manager import ( + managed_resource, +) @pytest.fixture @@ -22,14 +29,11 @@ def redis_enabled_app(redis_client) -> Dict: app = { APP_CLIENT_REDIS_CLIENT_KEY: redis_client, APP_CLIENT_SOCKET_REGISTRY_KEY: None, - APP_CONFIG_KEY: { - CONFIG_SECTION_NAME: { - "resource_deletion_timeout_seconds": 3 - } - } + APP_CONFIG_KEY: {CONFIG_SECTION_NAME: {"resource_deletion_timeout_seconds": 3}}, } yield app + @pytest.fixture def redis_registry(redis_enabled_app) -> RedisResourceRegistry: registry = RedisResourceRegistry(redis_enabled_app) @@ -37,39 +41,49 @@ def redis_registry(redis_enabled_app) -> RedisResourceRegistry: yield registry - @pytest.fixture def user_ids(): def create_user_id(number: int) -> List[str]: return [f"user id {i}" for i in range(number)] + return create_user_id -@pytest.mark.parametrize("key, hash_key", [ - ({"some_key": "some_value"}, "some_key=some_value"), - ({"some_key": "some_value", "another_key": "another_value"}, "some_key=some_value:another_key=another_value") -]) + +@pytest.mark.parametrize( + "key, hash_key", + [ + ({"some_key": "some_value"}, "some_key=some_value"), + ( + {"some_key": "some_value", "another_key": "another_value"}, + "some_key=some_value:another_key=another_value", + ), + ], +) async def test_redis_registry_hashes(loop, redis_enabled_app, key, hash_key): # pylint: disable=protected-access assert RedisResourceRegistry._hash_key(key) == hash_key - assert RedisResourceRegistry._decode_hash_key(f"{hash_key}:{RESOURCE_SUFFIX}") == key + assert ( + RedisResourceRegistry._decode_hash_key(f"{hash_key}:{RESOURCE_SUFFIX}") == key + ) assert RedisResourceRegistry._decode_hash_key(f"{hash_key}:{ALIVE_SUFFIX}") == key + async def test_redis_registry(loop, redis_registry): random_value = randint(1, 10) - key = {f"key_{x}":f"value_{x}" for x in range(random_value)} - second_key = {f"sec_key_{x}":f"sec_value_{x}" for x in range(random_value)} - invalid_key = {f"invalid_key":f"invalid_value"} + key = {f"key_{x}": f"value_{x}" for x in range(random_value)} + second_key = {f"sec_key_{x}": f"sec_value_{x}" for x in range(random_value)} + invalid_key = {f"invalid_key": f"invalid_value"} NUM_RESOURCES = 7 resources = [(f"res_key{x}", f"res_value{x}") for x in range(NUM_RESOURCES)] - invalid_resource = (f"invalid_res_key",f"invalid_res_value") + invalid_resource = (f"invalid_res_key", f"invalid_res_value") # create resources for res in resources: await redis_registry.set_resource(key, res) - assert len(await redis_registry.get_resources(key)) == resources.index(res)+1 + assert len(await redis_registry.get_resources(key)) == resources.index(res) + 1 # get them - assert await redis_registry.get_resources(key) == {x[0]:x[1] for x in resources} + assert await redis_registry.get_resources(key) == {x[0]: x[1] for x in resources} assert not await redis_registry.get_resources(invalid_key) # find them for res in resources: @@ -81,7 +95,10 @@ async def test_redis_registry(loop, redis_registry): # add second key for res in resources: await redis_registry.set_resource(second_key, res) - assert len(await redis_registry.get_resources(second_key)) == resources.index(res)+1 + assert ( + len(await redis_registry.get_resources(second_key)) + == resources.index(res) + 1 + ) # find them for res in resources: assert await redis_registry.find_resources(key, res[0]) == [res[1]] @@ -115,7 +132,10 @@ async def test_redis_registry(loop, redis_registry): for res in resources: assert await redis_registry.find_keys(res) == [second_key] await redis_registry.remove_resource(second_key, res[0]) - assert len(await redis_registry.get_resources(second_key)) == len(resources) - (resources.index(res)+1) + assert len(await redis_registry.get_resources(second_key)) == len(resources) - ( + resources.index(res) + 1 + ) + async def test_websocket_manager(loop, redis_enabled_app, redis_registry, user_ids): @@ -136,25 +156,34 @@ async def test_websocket_manager(loop, redis_enabled_app, redis_registry, user_i assert socket_id not in tabs tabs[socket_id] = client_session_id with managed_resource(user, client_session_id, redis_enabled_app) as rt: - #pylint: disable=protected-access - resource_key = {"user_id":user, "client_session_id": client_session_id} + # pylint: disable=protected-access + resource_key = {"user_id": user, "client_session_id": client_session_id} assert rt._resource_key() == resource_key # set the socket id and check it is rightfully there await rt.set_socket_id(socket_id) assert await rt.get_socket_id() == socket_id - assert await redis_registry.get_resources(resource_key) == {"socket_id": socket_id} + assert await redis_registry.get_resources(resource_key) == { + "socket_id": socket_id + } list_of_sockets_of_user = await rt.find_socket_ids() assert socket_id in list_of_sockets_of_user # resource key shall be empty assert await rt.find(res_key) == [] # add the resource now await rt.add(res_key, res_value) - assert await redis_registry.get_resources(resource_key) == {"socket_id": socket_id, res_key: res_value} + assert await redis_registry.get_resources(resource_key) == { + "socket_id": socket_id, + res_key: res_value, + } # resource key shall be filled assert await rt.find(res_key) == [res_value] - list_of_same_resource_users = await rt.find_users_of_resource(res_key, res_value) - assert list_user_ids[:(list_user_ids.index(user)+1)] == sorted(list_of_same_resource_users) + list_of_same_resource_users = await rt.find_users_of_resource( + res_key, res_value + ) + assert list_user_ids[: (list_user_ids.index(user) + 1)] == sorted( + list_of_same_resource_users + ) # remove sockets for user in list_user_ids: diff --git a/services/web/server/tests/unit/with_dbs/test_registration.py b/services/web/server/tests/unit/with_dbs/test_registration.py index bf8be8b21b0..b301232a16f 100644 --- a/services/web/server/tests/unit/with_dbs/test_registration.py +++ b/services/web/server/tests/unit/with_dbs/test_registration.py @@ -13,117 +13,129 @@ from utils_assert import assert_error, assert_status from utils_login import NewInvitation, NewUser, parse_link -EMAIL, PASSWORD = 'tester@test.com', 'password' +EMAIL, PASSWORD = "tester@test.com", "password" async def test_regitration_availibility(client): - url = client.app.router['auth_register'].url_for() - r = await client.post(url, json={ - 'email': EMAIL, - 'password': PASSWORD, - 'confirm': PASSWORD, - }) + url = client.app.router["auth_register"].url_for() + r = await client.post( + url, json={"email": EMAIL, "password": PASSWORD, "confirm": PASSWORD,} + ) await assert_status(r, web.HTTPOk) + async def test_regitration_is_not_get(client): - url = client.app.router['auth_register'].url_for() + url = client.app.router["auth_register"].url_for() r = await client.get(url) await assert_error(r, web.HTTPMethodNotAllowed) + async def test_registration_with_existing_email(client): db = get_storage(client.app) - url = client.app.router['auth_register'].url_for() + url = client.app.router["auth_register"].url_for() async with NewUser() as user: - r = await client.post(url, json={ - 'email': user['email'], - 'password': user['raw_password'], - 'confirm': user['raw_password'] - }) + r = await client.post( + url, + json={ + "email": user["email"], + "password": user["raw_password"], + "confirm": user["raw_password"], + }, + ) await assert_error(r, web.HTTPConflict, cfg.MSG_EMAIL_EXISTS) + @pytest.mark.skip("TODO: Feature still not implemented") async def test_registration_with_expired_confirmation(client, monkeypatch): - monkeypatch.setitem(cfg, 'REGISTRATION_CONFIRMATION_REQUIRED', True) - monkeypatch.setitem(cfg, 'REGISTRATION_CONFIRMATION_LIFETIME', -1) + monkeypatch.setitem(cfg, "REGISTRATION_CONFIRMATION_REQUIRED", True) + monkeypatch.setitem(cfg, "REGISTRATION_CONFIRMATION_LIFETIME", -1) db = get_storage(client.app) - url = client.app.router['auth_register'].url_for() - - async with NewUser({'status': UserStatus.CONFIRMATION_PENDING.name}) as user: - confirmation = await db.create_confirmation(user, ConfirmationAction.REGISTRATION.name) - r = await client.post(url, json={ - 'email': user['email'], - 'password': user['raw_password'], - 'confirm': user['raw_password'], - }) + url = client.app.router["auth_register"].url_for() + + async with NewUser({"status": UserStatus.CONFIRMATION_PENDING.name}) as user: + confirmation = await db.create_confirmation( + user, ConfirmationAction.REGISTRATION.name + ) + r = await client.post( + url, + json={ + "email": user["email"], + "password": user["raw_password"], + "confirm": user["raw_password"], + }, + ) await db.delete_confirmation(confirmation) await assert_error(r, web.HTTPConflict, cfg.MSG_EMAIL_EXISTS) + async def test_registration_without_confirmation(client, monkeypatch): - monkeypatch.setitem(cfg, 'REGISTRATION_CONFIRMATION_REQUIRED', False) + monkeypatch.setitem(cfg, "REGISTRATION_CONFIRMATION_REQUIRED", False) db = get_storage(client.app) - url = client.app.router['auth_register'].url_for() - r = await client.post(url, json={ - 'email': EMAIL, - 'password': PASSWORD, - 'confirm': PASSWORD - }) + url = client.app.router["auth_register"].url_for() + r = await client.post( + url, json={"email": EMAIL, "password": PASSWORD, "confirm": PASSWORD} + ) data, error = unwrap_envelope(await r.json()) assert r.status == 200, (data, error) assert cfg.MSG_LOGGED_IN in data["message"] - user = await db.get_user({'email': EMAIL}) + user = await db.get_user({"email": EMAIL}) assert user await db.delete_user(user) + async def test_registration_with_confirmation(client, capsys, monkeypatch): - monkeypatch.setitem(cfg, 'REGISTRATION_CONFIRMATION_REQUIRED', True) + monkeypatch.setitem(cfg, "REGISTRATION_CONFIRMATION_REQUIRED", True) db = get_storage(client.app) - url = client.app.router['auth_register'].url_for() - r = await client.post(url, json={ - 'email': EMAIL, - 'password': PASSWORD, - 'confirm': PASSWORD - }) + url = client.app.router["auth_register"].url_for() + r = await client.post( + url, json={"email": EMAIL, "password": PASSWORD, "confirm": PASSWORD} + ) data, error = unwrap_envelope(await r.json()) assert r.status == 200, (data, error) - user = await db.get_user({'email': EMAIL}) - assert user['status'] == UserStatus.CONFIRMATION_PENDING.name + user = await db.get_user({"email": EMAIL}) + assert user["status"] == UserStatus.CONFIRMATION_PENDING.name assert "verification link" in data["message"] # retrieves sent link by email (see monkeypatch of email in conftest.py) out, err = capsys.readouterr() link = parse_link(out) - assert '/auth/confirmation/' in str(link) + assert "/auth/confirmation/" in str(link) resp = await client.get(link) text = await resp.text() assert "welcome to fake web front-end" in text assert resp.status == 200 - user = await db.get_user({'email': EMAIL}) - assert user['status'] == UserStatus.ACTIVE.name + user = await db.get_user({"email": EMAIL}) + assert user["status"] == UserStatus.ACTIVE.name await db.delete_user(user) -@pytest.mark.parametrize("is_invitation_required,has_valid_invitation,expected_response", [ - (True, True, web.HTTPOk), - (True, False, web.HTTPForbidden), - (False, True, web.HTTPOk), - (False, False, web.HTTPOk), -]) -async def test_registration_with_invitation(client, is_invitation_required, has_valid_invitation, expected_response): +@pytest.mark.parametrize( + "is_invitation_required,has_valid_invitation,expected_response", + [ + (True, True, web.HTTPOk), + (True, False, web.HTTPForbidden), + (False, True, web.HTTPOk), + (False, False, web.HTTPOk), + ], +) +async def test_registration_with_invitation( + client, is_invitation_required, has_valid_invitation, expected_response +): from servicelib.application_keys import APP_CONFIG_KEY from simcore_service_webserver.login.config import CONFIG_SECTION_NAME - client.app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] = { + client.app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] = { "registration_confirmation_required": False, - "registration_invitation_required": is_invitation_required + "registration_invitation_required": is_invitation_required, } # @@ -133,24 +145,28 @@ async def test_registration_with_invitation(client, is_invitation_required, has_ # Front end then creates the following request # async with NewInvitation(client) as confirmation: - print( get_confirmation_info(confirmation) ) - - url = client.app.router['auth_register'].url_for() - - r = await client.post(url, json={ - 'email': EMAIL, - 'password': PASSWORD, - 'confirm': PASSWORD, - 'invitation': confirmation['code'] if has_valid_invitation else "WRONG_CODE" - }) + print(get_confirmation_info(confirmation)) + + url = client.app.router["auth_register"].url_for() + + r = await client.post( + url, + json={ + "email": EMAIL, + "password": PASSWORD, + "confirm": PASSWORD, + "invitation": confirmation["code"] + if has_valid_invitation + else "WRONG_CODE", + }, + ) await assert_status(r, expected_response) # check optional fields in body if not has_valid_invitation or not is_invitation_required: - r = await client.post(url, json={ - 'email': "new-user" + EMAIL, - 'password': PASSWORD - }) + r = await client.post( + url, json={"email": "new-user" + EMAIL, "password": PASSWORD} + ) await assert_status(r, expected_response) if is_invitation_required and has_valid_invitation: @@ -158,5 +174,5 @@ async def test_registration_with_invitation(client, is_invitation_required, has_ assert not await db.get_confirmation(confirmation) -if __name__ == '__main__': - pytest.main([__file__, '--maxfail=1']) +if __name__ == "__main__": + pytest.main([__file__, "--maxfail=1"]) diff --git a/services/web/server/tests/unit/with_dbs/test_reset_password.py b/services/web/server/tests/unit/with_dbs/test_reset_password.py index 954d51a1906..d324374d51f 100644 --- a/services/web/server/tests/unit/with_dbs/test_reset_password.py +++ b/services/web/server/tests/unit/with_dbs/test_reset_password.py @@ -15,7 +15,7 @@ from utils_assert import assert_status from utils_login import NewUser, parse_link, parse_test_marks -EMAIL, PASSWORD = 'tester@test.com', 'password' +EMAIL, PASSWORD = "tester@test.com", "password" @pytest.fixture @@ -24,11 +24,9 @@ def cfg(client): async def test_unknown_email(client, capsys, cfg): - reset_url = client.app.router['auth_reset_password'].url_for() + reset_url = client.app.router["auth_reset_password"].url_for() - rp = await client.post(reset_url, json={ - 'email': EMAIL, - }) + rp = await client.post(reset_url, json={"email": EMAIL,}) payload = await rp.text() assert rp.url_obj.path == reset_url.path @@ -39,12 +37,10 @@ async def test_unknown_email(client, capsys, cfg): async def test_banned_user(client, capsys, cfg): - reset_url = client.app.router['auth_reset_password'].url_for() + reset_url = client.app.router["auth_reset_password"].url_for() - async with NewUser({'status': UserStatus.BANNED.name}) as user: - rp = await client.post(reset_url, json={ - 'email': user['email'], - }) + async with NewUser({"status": UserStatus.BANNED.name}) as user: + rp = await client.post(reset_url, json={"email": user["email"],}) assert rp.url_obj.path == reset_url.path await assert_status(rp, web.HTTPOk, cfg.MSG_EMAIL_SENT.format(**user)) @@ -54,12 +50,10 @@ async def test_banned_user(client, capsys, cfg): async def test_inactive_user(client, capsys, cfg): - reset_url = client.app.router['auth_reset_password'].url_for() + reset_url = client.app.router["auth_reset_password"].url_for() - async with NewUser({'status': UserStatus.CONFIRMATION_PENDING.name}) as user: - rp = await client.post(reset_url, json={ - 'email': user['email'], - }) + async with NewUser({"status": UserStatus.CONFIRMATION_PENDING.name}) as user: + rp = await client.post(reset_url, json={"email": user["email"],}) assert rp.url_obj.path == reset_url.path await assert_status(rp, web.HTTPOk, cfg.MSG_EMAIL_SENT.format(**user)) @@ -69,16 +63,16 @@ async def test_inactive_user(client, capsys, cfg): async def test_too_often(client, capsys, cfg): - reset_url = client.app.router['auth_reset_password'].url_for() + reset_url = client.app.router["auth_reset_password"].url_for() cfg = client.app[APP_LOGIN_CONFIG] db = cfg.STORAGE async with NewUser() as user: - confirmation = await db.create_confirmation(user, ConfirmationAction.RESET_PASSWORD.name) - rp = await client.post(reset_url, json={ - 'email': user['email'], - }) + confirmation = await db.create_confirmation( + user, ConfirmationAction.RESET_PASSWORD.name + ) + rp = await client.post(reset_url, json={"email": user["email"],}) await db.delete_confirmation(confirmation) assert rp.url_obj.path == reset_url.path @@ -88,13 +82,10 @@ async def test_too_often(client, capsys, cfg): assert parse_test_marks(out)["reason"] == cfg.MSG_OFTEN_RESET_PASSWORD - async def test_reset_and_confirm(client, capsys, cfg): async with NewUser() as user: - reset_url = client.app.router['auth_reset_password'].url_for() - rp = await client.post(reset_url, json={ - 'email': user['email'], - }) + reset_url = client.app.router["auth_reset_password"].url_for() + rp = await client.post(reset_url, json={"email": user["email"],}) assert rp.url_obj.path == reset_url.path await assert_status(rp, web.HTTPOk, cfg.MSG_EMAIL_SENT.format(**user)) @@ -105,15 +96,21 @@ async def test_reset_and_confirm(client, capsys, cfg): # emulates user click on email url rp = await client.get(confirmation_url) assert rp.status == 200 - assert rp.url_obj.path_qs == URL(cfg.LOGIN_REDIRECT).with_fragment("reset-password?code=%s" % code ).path_qs + assert ( + rp.url_obj.path_qs + == URL(cfg.LOGIN_REDIRECT) + .with_fragment("reset-password?code=%s" % code) + .path_qs + ) # api/specs/webserver/v0/components/schemas/auth.yaml#/ResetPasswordForm - reset_allowed_url = client.app.router['auth_reset_password_allowed'].url_for(code=code) - new_password = get_random_string(5,10) - rp = await client.post(reset_allowed_url, json={ - 'password': new_password, - 'confirm': new_password, - }) + reset_allowed_url = client.app.router["auth_reset_password_allowed"].url_for( + code=code + ) + new_password = get_random_string(5, 10) + rp = await client.post( + reset_allowed_url, json={"password": new_password, "confirm": new_password,} + ) payload = await rp.json() assert rp.status == 200, payload assert rp.url_obj.path == reset_allowed_url.path @@ -121,19 +118,18 @@ async def test_reset_and_confirm(client, capsys, cfg): # TODO: multiple flash messages # Try new password - logout_url = client.app.router['auth_logout'].url_for() + logout_url = client.app.router["auth_logout"].url_for() rp = await client.post(logout_url) assert rp.url_obj.path == logout_url.path await assert_status(rp, web.HTTPUnauthorized, "Unauthorized") - login_url = client.app.router['auth_login'].url_for() - rp = await client.post(login_url, json={ - 'email': user['email'], - 'password': new_password, - }) + login_url = client.app.router["auth_login"].url_for() + rp = await client.post( + login_url, json={"email": user["email"], "password": new_password,} + ) assert rp.url_obj.path == login_url.path await assert_status(rp, web.HTTPOk, cfg.MSG_LOGGED_IN) -if __name__ == '__main__': - pytest.main([__file__, '--maxfail=1']) +if __name__ == "__main__": + pytest.main([__file__, "--maxfail=1"]) diff --git a/services/web/server/tests/unit/with_dbs/test_resource_manager.py b/services/web/server/tests/unit/with_dbs/test_resource_manager.py index fd3f25917b2..eb2551e52d2 100644 --- a/services/web/server/tests/unit/with_dbs/test_resource_manager.py +++ b/services/web/server/tests/unit/with_dbs/test_resource_manager.py @@ -22,8 +22,11 @@ from simcore_service_webserver.director import setup_director from simcore_service_webserver.login import setup_login from simcore_service_webserver.projects import setup_projects -from simcore_service_webserver.resource_manager import (config, registry, - setup_resource_manager) +from simcore_service_webserver.resource_manager import ( + config, + registry, + setup_resource_manager, +) from simcore_service_webserver.resource_manager.registry import get_registry from simcore_service_webserver.rest import setup_rest from simcore_service_webserver.security import setup_security @@ -40,6 +43,7 @@ GARBAGE_COLLECTOR_INTERVAL = 1 SERVICE_DELETION_DELAY = 1 + @pytest.fixture def client(loop, aiohttp_client, app_cfg, postgres_service): cfg = deepcopy(app_cfg) @@ -49,7 +53,9 @@ def client(loop, aiohttp_client, app_cfg, postgres_service): cfg["db"]["init_tables"] = True # inits postgres_service cfg["projects"]["enabled"] = True cfg["director"]["enabled"] = True - cfg[config.CONFIG_SECTION_NAME]["garbage_collection_interval_seconds"] = GARBAGE_COLLECTOR_INTERVAL # increase speed of garbage collection + cfg[config.CONFIG_SECTION_NAME][ + "garbage_collection_interval_seconds" + ] = GARBAGE_COLLECTOR_INTERVAL # increase speed of garbage collection # fake config app = create_safe_application(cfg) @@ -66,10 +72,12 @@ def client(loop, aiohttp_client, app_cfg, postgres_service): setup_director(app) assert setup_resource_manager(app) - yield loop.run_until_complete(aiohttp_client(app, server_kwargs={ - 'port': cfg["main"]["port"], - 'host': cfg['main']['host'] - })) + yield loop.run_until_complete( + aiohttp_client( + app, + server_kwargs={"port": cfg["main"]["port"], "host": cfg["main"]["host"]}, + ) + ) @pytest.fixture() @@ -81,12 +89,13 @@ async def logged_user(client, user_role: UserRole): async with LoggedUser( client, {"role": user_role.name}, - check_if_succeeds=user_role != UserRole.ANONYMOUS + check_if_succeeds=user_role != UserRole.ANONYMOUS, ) as user: print("-----> logged in user", user_role) yield user print("<----- logged out user", user_role) + @pytest.fixture() async def logged_user2(client, user_role: UserRole): """ adds a user in db and logs in with client @@ -96,74 +105,77 @@ async def logged_user2(client, user_role: UserRole): async with LoggedUser( client, {"role": user_role.name}, - check_if_succeeds=user_role != UserRole.ANONYMOUS + check_if_succeeds=user_role != UserRole.ANONYMOUS, ) as user: print("-----> logged in user", user_role) yield user print("<----- logged out user", user_role) - - @pytest.fixture async def empty_user_project(client, empty_project, logged_user): project = empty_project() - async with NewProject( - project, - client.app, - user_id=logged_user["id"] - ) as project: + async with NewProject(project, client.app, user_id=logged_user["id"]) as project: print("-----> added project", project["name"]) yield project print("<----- removed project", project["name"]) + @pytest.fixture async def empty_user_project2(client, empty_project, logged_user): project = empty_project() - async with NewProject( - project, - client.app, - user_id=logged_user["id"] - ) as project: + async with NewProject(project, client.app, user_id=logged_user["id"]) as project: print("-----> added project", project["name"]) yield project print("<----- removed project", project["name"]) - - # ------------------------ UTILS ---------------------------------- def set_service_deletion_delay(delay: int, app: web.Application): - app[config.APP_CONFIG_KEY][config.CONFIG_SECTION_NAME]["resource_deletion_timeout_seconds"] = delay + app[config.APP_CONFIG_KEY][config.CONFIG_SECTION_NAME][ + "resource_deletion_timeout_seconds" + ] = delay + async def open_project(client, project_uuid: str, client_session_id: str) -> None: url = client.app.router["open_project"].url_for(project_id=project_uuid) resp = await client.post(url, json=client_session_id) await assert_status(resp, web.HTTPOk) + async def close_project(client, project_uuid: str, client_session_id: str) -> None: url = client.app.router["close_project"].url_for(project_id=project_uuid) resp = await client.post(url, json=client_session_id) await assert_status(resp, web.HTTPNoContent) + # ------------------------ TESTS ------------------------------- async def test_anonymous_websocket_connection(socketio_client, client_session_id): with pytest.raises(socketio.exceptions.ConnectionError): await socketio_client(client_session_id()) -@pytest.mark.parametrize("user_role", [ - # (UserRole.ANONYMOUS), - (UserRole.GUEST), - (UserRole.USER), - (UserRole.TESTER), -]) -async def test_websocket_resource_management(client, logged_user, socketio_client, client_session_id): + +@pytest.mark.parametrize( + "user_role", + [ + # (UserRole.ANONYMOUS), + (UserRole.GUEST), + (UserRole.USER), + (UserRole.TESTER), + ], +) +async def test_websocket_resource_management( + client, logged_user, socketio_client, client_session_id +): app = client.server.app socket_registry = get_registry(app) cur_client_session_id = client_session_id() sio = await socketio_client(cur_client_session_id) sid = sio.sid - resource_key = {"user_id":str(logged_user["id"]), "client_session_id": cur_client_session_id} + resource_key = { + "user_id": str(logged_user["id"]), + "client_session_id": cur_client_session_id, + } assert await socket_registry.find_keys(("socket_id", sio.sid)) == [resource_key] assert sio.sid in await socket_registry.find_resources(resource_key, "socket_id") assert len(await socket_registry.find_resources(resource_key, "socket_id")) == 1 @@ -174,13 +186,19 @@ async def test_websocket_resource_management(client, logged_user, socketio_clien assert not sid in await socket_registry.find_resources(resource_key, "socket_id") assert not await socket_registry.find_resources(resource_key, "socket_id") -@pytest.mark.parametrize("user_role", [ - # (UserRole.ANONYMOUS), - (UserRole.GUEST), - (UserRole.USER), - (UserRole.TESTER), -]) -async def test_websocket_multiple_connections(client, logged_user, socketio_client, client_session_id): + +@pytest.mark.parametrize( + "user_role", + [ + # (UserRole.ANONYMOUS), + (UserRole.GUEST), + (UserRole.USER), + (UserRole.TESTER), + ], +) +async def test_websocket_multiple_connections( + client, logged_user, socketio_client, client_session_id +): app = client.server.app socket_registry = get_registry(app) NUMBER_OF_SOCKETS = 5 @@ -189,10 +207,20 @@ async def test_websocket_multiple_connections(client, logged_user, socketio_clie for socket in range(NUMBER_OF_SOCKETS): cur_client_session_id = client_session_id() sio = await socketio_client(cur_client_session_id) - resource_key = {"user_id": str(logged_user["id"]), "client_session_id": cur_client_session_id} + resource_key = { + "user_id": str(logged_user["id"]), + "client_session_id": cur_client_session_id, + } assert await socket_registry.find_keys(("socket_id", sio.sid)) == [resource_key] - assert [sio.sid] == await socket_registry.find_resources(resource_key, "socket_id") - assert len(await socket_registry.find_resources({"user_id": str(logged_user["id"]), "client_session_id": "*"}, "socket_id")) == (socket+1) + assert [sio.sid] == await socket_registry.find_resources( + resource_key, "socket_id" + ) + assert len( + await socket_registry.find_resources( + {"user_id": str(logged_user["id"]), "client_session_id": "*"}, + "socket_id", + ) + ) == (socket + 1) clients.append(sio) # NOTE: the socket.io client needs the websockets package in order to upgrade to websocket transport @@ -202,18 +230,25 @@ async def test_websocket_multiple_connections(client, logged_user, socketio_clie await sio.disconnect() assert not sio.sid assert not await socket_registry.find_keys(("socket_id", sio.sid)) - assert not sid in await socket_registry.find_resources(resource_key, "socket_id") + assert not sid in await socket_registry.find_resources( + resource_key, "socket_id" + ) assert not await socket_registry.find_resources(resource_key, "socket_id") -@pytest.mark.parametrize("user_role,expected", [ - # (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_websocket_disconnected_after_logout(client, logged_user, socketio_client, client_session_id, expected, mocker): +@pytest.mark.parametrize( + "user_role,expected", + [ + # (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_websocket_disconnected_after_logout( + client, logged_user, socketio_client, client_session_id, expected, mocker +): app = client.server.app socket_registry = get_registry(app) @@ -221,24 +256,25 @@ async def test_websocket_disconnected_after_logout(client, logged_user, socketio cur_client_session_id1 = client_session_id() sio = await socketio_client(cur_client_session_id1) socket_logout_mock_callable = mocker.Mock() - sio.on('logout', handler=socket_logout_mock_callable) + sio.on("logout", handler=socket_logout_mock_callable) # connect second socket cur_client_session_id2 = client_session_id() sio2 = await socketio_client(cur_client_session_id2) socket_logout_mock_callable2 = mocker.Mock() - sio2.on('logout', handler=socket_logout_mock_callable2) + sio2.on("logout", handler=socket_logout_mock_callable2) # connect third socket cur_client_session_id3 = client_session_id() sio3 = await socketio_client(cur_client_session_id3) socket_logout_mock_callable3 = mocker.Mock() - sio3.on('logout', handler=socket_logout_mock_callable3) - + sio3.on("logout", handler=socket_logout_mock_callable3) # logout client with socket 2 - logout_url = client.app.router['auth_logout'].url_for() - r = await client.post(logout_url, json={"client_session_id": cur_client_session_id2}) + logout_url = client.app.router["auth_logout"].url_for() + r = await client.post( + logout_url, json={"client_session_id": cur_client_session_id2} + ) assert r.url_obj.path == logout_url.path await assert_status(r, expected) @@ -250,7 +286,7 @@ async def test_websocket_disconnected_after_logout(client, logged_user, socketio # the others should receive a logout message through their respective sockets await sleep(3) socket_logout_mock_callable.assert_called_once() - socket_logout_mock_callable2.assert_not_called() # note 2 should be not called ever + socket_logout_mock_callable2.assert_not_called() # note 2 should be not called ever socket_logout_mock_callable3.assert_called_once() await sleep(3) @@ -260,47 +296,71 @@ async def test_websocket_disconnected_after_logout(client, logged_user, socketio assert not sio3.sid -@pytest.mark.parametrize("user_role", [ - (UserRole.GUEST), - (UserRole.USER), - (UserRole.TESTER), -]) -async def test_interactive_services_removed_after_logout(loop, client, logged_user, empty_user_project, mocked_director_api, mocked_dynamic_service, client_session_id, socketio_client): +@pytest.mark.parametrize( + "user_role", [(UserRole.GUEST), (UserRole.USER), (UserRole.TESTER),] +) +async def test_interactive_services_removed_after_logout( + loop, + client, + logged_user, + empty_user_project, + mocked_director_api, + mocked_dynamic_service, + client_session_id, + socketio_client, +): set_service_deletion_delay(SERVICE_DELETION_DELAY, client.server.app) # login - logged_user fixture # create empty study - empty_user_project fixture # create dynamic service - mocked_dynamic_service fixture - service = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) + service = await mocked_dynamic_service( + logged_user["id"], empty_user_project["uuid"] + ) # create websocket client_session_id1 = client_session_id() sio = await socketio_client(client_session_id1) # open project in client 1 await open_project(client, empty_user_project["uuid"], client_session_id1) # logout - logout_url = client.app.router['auth_logout'].url_for() + logout_url = client.app.router["auth_logout"].url_for() r = await client.post(logout_url, json={"client_session_id": client_session_id1}) assert r.url_obj.path == logout_url.path await assert_status(r, web.HTTPOk) # ensure sufficient time is wasted here - await sleep(SERVICE_DELETION_DELAY+GARBAGE_COLLECTOR_INTERVAL+1) + await sleep(SERVICE_DELETION_DELAY + GARBAGE_COLLECTOR_INTERVAL + 1) # assert dynamic service is removed calls = [call(client.server.app, service["service_uuid"])] mocked_director_api["stop_service"].assert_has_calls(calls) -@pytest.mark.parametrize("user_role, expected", [ - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_interactive_services_remain_after_websocket_reconnection_from_2_tabs(loop, client, logged_user, expected, empty_user_project, mocked_director_api, mocked_dynamic_service, socketio_client, client_session_id): +@pytest.mark.parametrize( + "user_role, expected", + [ + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_interactive_services_remain_after_websocket_reconnection_from_2_tabs( + loop, + client, + logged_user, + expected, + empty_user_project, + mocked_director_api, + mocked_dynamic_service, + socketio_client, + client_session_id, +): set_service_deletion_delay(SERVICE_DELETION_DELAY, client.server.app) # login - logged_user fixture # create empty study - empty_user_project fixture # create dynamic service - mocked_dynamic_service fixture - service = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) + service = await mocked_dynamic_service( + logged_user["id"], empty_user_project["uuid"] + ) # create first websocket client_session_id1 = client_session_id() sio = await socketio_client(client_session_id1) @@ -317,7 +377,7 @@ async def test_interactive_services_remain_after_websocket_reconnection_from_2_t await sio.disconnect() assert not sio.sid # ensure sufficient time is wasted here - await sleep(SERVICE_DELETION_DELAY+GARBAGE_COLLECTOR_INTERVAL) + await sleep(SERVICE_DELETION_DELAY + GARBAGE_COLLECTOR_INTERVAL) # assert dynamic service is still around mocked_director_api["stop_service"].assert_not_called() # disconnect second websocket @@ -330,24 +390,38 @@ async def test_interactive_services_remain_after_websocket_reconnection_from_2_t # assert dynamic service is still around mocked_director_api["stop_service"].assert_not_called() # event after waiting some time - await sleep(SERVICE_DELETION_DELAY+1) + await sleep(SERVICE_DELETION_DELAY + 1) mocked_director_api["stop_service"].assert_not_called() # now really disconnect await sio2.disconnect() assert not sio2.sid # we need to wait for the service deletion delay - await sleep(SERVICE_DELETION_DELAY+GARBAGE_COLLECTOR_INTERVAL+1) + await sleep(SERVICE_DELETION_DELAY + GARBAGE_COLLECTOR_INTERVAL + 1) # assert dynamic service is gone calls = [call(client.server.app, service["service_uuid"])] mocked_director_api["stop_service"].assert_has_calls(calls) -@pytest.mark.parametrize("user_role", [ - # (UserRole.ANONYMOUS), - (UserRole.GUEST), - (UserRole.USER), - (UserRole.TESTER), -]) -async def test_interactive_services_removed_per_project(loop, client, logged_user, empty_user_project, empty_user_project2, mocked_director_api, mocked_dynamic_service, socketio_client, client_session_id): + +@pytest.mark.parametrize( + "user_role", + [ + # (UserRole.ANONYMOUS), + (UserRole.GUEST), + (UserRole.USER), + (UserRole.TESTER), + ], +) +async def test_interactive_services_removed_per_project( + loop, + client, + logged_user, + empty_user_project, + empty_user_project2, + mocked_director_api, + mocked_dynamic_service, + socketio_client, + client_session_id, +): set_service_deletion_delay(SERVICE_DELETION_DELAY, client.server.app) # create server with delay set to DELAY # login - logged_user fixture @@ -356,9 +430,15 @@ async def test_interactive_services_removed_per_project(loop, client, logged_use # service1 in project1 = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) # service2 in project2 = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) # service3 in project2 = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) - service = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) - service2 = await mocked_dynamic_service(logged_user["id"], empty_user_project2["uuid"]) - service3 = await mocked_dynamic_service(logged_user["id"], empty_user_project2["uuid"]) + service = await mocked_dynamic_service( + logged_user["id"], empty_user_project["uuid"] + ) + service2 = await mocked_dynamic_service( + logged_user["id"], empty_user_project2["uuid"] + ) + service3 = await mocked_dynamic_service( + logged_user["id"], empty_user_project2["uuid"] + ) # create websocket1 from tab1 client_session_id1 = client_session_id() sio1 = await socketio_client(client_session_id1) @@ -373,7 +453,7 @@ async def test_interactive_services_removed_per_project(loop, client, logged_use # assert dynamic service is still around mocked_director_api["stop_service"].assert_not_called() # wait the defined delay - await sleep(SERVICE_DELETION_DELAY+GARBAGE_COLLECTOR_INTERVAL) + await sleep(SERVICE_DELETION_DELAY + GARBAGE_COLLECTOR_INTERVAL) # assert dynamic service 1 is removed calls = [call(client.server.app, service["service_uuid"])] mocked_director_api["stop_service"].assert_has_calls(calls) @@ -385,26 +465,44 @@ async def test_interactive_services_removed_per_project(loop, client, logged_use # assert dynamic services are still around mocked_director_api["stop_service"].assert_not_called() # wait the defined delay - await sleep(SERVICE_DELETION_DELAY+GARBAGE_COLLECTOR_INTERVAL) + await sleep(SERVICE_DELETION_DELAY + GARBAGE_COLLECTOR_INTERVAL) # assert dynamic service 2,3 is removed - calls = [call(client.server.app, service2["service_uuid"]), - call(client.server.app, service3["service_uuid"])] + calls = [ + call(client.server.app, service2["service_uuid"]), + call(client.server.app, service3["service_uuid"]), + ] mocked_director_api["stop_service"].assert_has_calls(calls) mocked_director_api["stop_service"].reset_mock() -@pytest.mark.parametrize("user_role", [ - # (UserRole.ANONYMOUS), - # (UserRole.GUEST), - (UserRole.USER), - (UserRole.TESTER), -]) -async def test_services_remain_after_closing_one_out_of_two_tabs(loop, client, logged_user, empty_user_project, empty_user_project2, mocked_director_api, mocked_dynamic_service, socketio_client, client_session_id): + +@pytest.mark.parametrize( + "user_role", + [ + # (UserRole.ANONYMOUS), + # (UserRole.GUEST), + (UserRole.USER), + (UserRole.TESTER), + ], +) +async def test_services_remain_after_closing_one_out_of_two_tabs( + loop, + client, + logged_user, + empty_user_project, + empty_user_project2, + mocked_director_api, + mocked_dynamic_service, + socketio_client, + client_session_id, +): set_service_deletion_delay(SERVICE_DELETION_DELAY, client.server.app) # create server with delay set to DELAY # login - logged_user fixture # create empty study in project - empty_user_project fixture # service in project = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) - service = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) + service = await mocked_dynamic_service( + logged_user["id"], empty_user_project["uuid"] + ) # open project in tab1 client_session_id1 = client_session_id() sio1 = await socketio_client(client_session_id1) @@ -416,13 +514,13 @@ async def test_services_remain_after_closing_one_out_of_two_tabs(loop, client, l # close project in tab1 await close_project(client, empty_user_project["uuid"], client_session_id1) # wait the defined delay - await sleep(SERVICE_DELETION_DELAY+GARBAGE_COLLECTOR_INTERVAL) + await sleep(SERVICE_DELETION_DELAY + GARBAGE_COLLECTOR_INTERVAL) # assert dynamic service is still around mocked_director_api["stop_service"].assert_not_called() # close project in tab2 await close_project(client, empty_user_project["uuid"], client_session_id2) # wait the defined delay - await sleep(SERVICE_DELETION_DELAY+GARBAGE_COLLECTOR_INTERVAL) - mocked_director_api["stop_service"].assert_has_calls([ - call(client.server.app, service["service_uuid"]) - ]) + await sleep(SERVICE_DELETION_DELAY + GARBAGE_COLLECTOR_INTERVAL) + mocked_director_api["stop_service"].assert_has_calls( + [call(client.server.app, service["service_uuid"])] + ) diff --git a/services/web/server/tests/unit/with_dbs/test_storage.py b/services/web/server/tests/unit/with_dbs/test_storage.py index fe1a1d132ad..50652df1aff 100644 --- a/services/web/server/tests/unit/with_dbs/test_storage.py +++ b/services/web/server/tests/unit/with_dbs/test_storage.py @@ -33,9 +33,7 @@ async def _get_locs(request: web.Request): assert "user_id" in query assert query["user_id"], "Expected user id" - return web.json_response({ - 'data': [{"user_id": int(query["user_id"])}, ] - }) + return web.json_response({"data": [{"user_id": int(query["user_id"])},]}) async def _get_filemeta(request: web.Request): assert not request.has_body @@ -46,9 +44,7 @@ async def _get_filemeta(request: web.Request): assert query["user_id"], "Expected user id" - return web.json_response({ - 'data': [{"filemeta": 42}, ] - }) + return web.json_response({"data": [{"filemeta": 42},]}) async def _get_filtered_list(request: web.Request): assert not request.has_body @@ -59,9 +55,7 @@ async def _get_filtered_list(request: web.Request): assert query["user_id"], "Expected user id" assert query["uuid_filter"], "expected a filter" - return web.json_response({ - 'data': [{"uuid_filter": query["uuid_filter"]}, ] - }) + return web.json_response({"data": [{"uuid_filter": query["uuid_filter"]},]}) async def _get_datasets(request: web.Request): assert not request.has_body @@ -72,9 +66,9 @@ async def _get_datasets(request: web.Request): assert query["user_id"], "Expected user id" - return web.json_response({ - 'data': [{"dataset_id": "asdf", "display_name" : "bbb"}, ] - }) + return web.json_response( + {"data": [{"dataset_id": "asdf", "display_name": "bbb"},]} + ) async def _get_datasets_meta(request: web.Request): assert not request.has_body @@ -85,23 +79,31 @@ async def _get_datasets_meta(request: web.Request): assert query["user_id"], "Expected user id" - return web.json_response({ - 'data': [{"dataset_id": "asdf", "display_name" : "bbb"}, ] - }) - - storage_api_version = cfg['version'] - assert storage_api_version != API_VERSION, "backend service w/ different version as webserver entrypoint" - - app.router.add_get(f"/{storage_api_version}/locations" , _get_locs) - app.router.add_get(f"/{storage_api_version}/locations/0/files/{{file_id}}/metadata", _get_filemeta) - app.router.add_get(f"/{storage_api_version}/locations/0/files/metadata", _get_filtered_list) + return web.json_response( + {"data": [{"dataset_id": "asdf", "display_name": "bbb"},]} + ) + + storage_api_version = cfg["version"] + assert ( + storage_api_version != API_VERSION + ), "backend service w/ different version as webserver entrypoint" + + app.router.add_get(f"/{storage_api_version}/locations", _get_locs) + app.router.add_get( + f"/{storage_api_version}/locations/0/files/{{file_id}}/metadata", _get_filemeta + ) + app.router.add_get( + f"/{storage_api_version}/locations/0/files/metadata", _get_filtered_list + ) app.router.add_get(f"/{storage_api_version}/locations/0/datasets", _get_datasets) - app.router.add_get(f"/{storage_api_version}/locations/0/datasets/{{dataset_id}}/metadata", _get_datasets_meta) + app.router.add_get( + f"/{storage_api_version}/locations/0/datasets/{{dataset_id}}/metadata", + _get_datasets_meta, + ) - assert cfg['host']=='localhost' + assert cfg["host"] == "localhost" - - server = loop.run_until_complete(aiohttp_server(app, port= cfg['port'])) + server = loop.run_until_complete(aiohttp_server(app, port=cfg["port"])) return server @@ -112,23 +114,27 @@ async def logged_user(client, role: UserRole): NOTE: role fixture is defined as a parametrization below """ async with LoggedUser( - client, - {"role": role.name}, - check_if_succeeds = role!=UserRole.ANONYMOUS + client, {"role": role.name}, check_if_succeeds=role != UserRole.ANONYMOUS ) as user: yield user -#-------------------------------------------------------------------------- +# -------------------------------------------------------------------------- PREFIX = "/" + API_VERSION + "/storage" -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_get_storage_locations(client, storage_server, logged_user, role, expected): + +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_get_storage_locations( + client, storage_server, logged_user, role, expected +): url = "/v0/storage/locations" assert url.startswith(PREFIX) @@ -137,21 +143,27 @@ async def test_get_storage_locations(client, storage_server, logged_user, role, if not error: assert len(data) == 1 - assert data[0]['user_id'] == logged_user['id'] - -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_get_datasets_metadata(client, storage_server, logged_user, role, expected): + assert data[0]["user_id"] == logged_user["id"] + + +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_get_datasets_metadata( + client, storage_server, logged_user, role, expected +): url = "/v0/storage/locations/0/datasets" assert url.startswith(PREFIX) _url = client.app.router["get_datasets_metadata"].url_for(location_id="0") - assert url==str(_url) + assert url == str(_url) resp = await client.get(url) data, error = await assert_status(resp, expected) @@ -161,19 +173,26 @@ async def test_get_datasets_metadata(client, storage_server, logged_user, role, assert data[0]["dataset_id"] == "asdf" -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) -async def test_get_files_metadata_dataset(client, storage_server, logged_user, role, expected): +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) +async def test_get_files_metadata_dataset( + client, storage_server, logged_user, role, expected +): url = "/v0/storage/locations/0/datasets/N:asdfsdf/metadata" assert url.startswith(PREFIX) - _url = client.app.router["get_files_metadata_dataset"].url_for(location_id="0", dataset_id="N:asdfsdf") + _url = client.app.router["get_files_metadata_dataset"].url_for( + location_id="0", dataset_id="N:asdfsdf" + ) - assert url==str(_url) + assert url == str(_url) resp = await client.get(url) data, error = await assert_status(resp, expected) @@ -182,16 +201,20 @@ async def test_get_files_metadata_dataset(client, storage_server, logged_user, r assert len(data) == 1 assert data[0]["dataset_id"] == "asdf" -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) + +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) async def test_storage_file_meta(client, storage_server, logged_user, role, expected): # tests redirect of path with quotes in path file_id = "a/b/c/d/e/dat" - url = "/v0/storage/locations/0/files/{}/metadata".format(quote(file_id, safe='')) + url = "/v0/storage/locations/0/files/{}/metadata".format(quote(file_id, safe="")) assert url.startswith(PREFIX) @@ -200,19 +223,24 @@ async def test_storage_file_meta(client, storage_server, logged_user, role, expe if not error: assert len(data) == 1 - assert data[0]['filemeta'] == 42 - - -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) + assert data[0]["filemeta"] == 42 + + +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) async def test_storage_list_filter(client, storage_server, logged_user, role, expected): # tests composition of 2 queries file_id = "a/b/c/d/e/dat" - url = "/v0/storage/locations/0/files/metadata?uuid_filter={}".format(quote(file_id, safe='')) + url = "/v0/storage/locations/0/files/metadata?uuid_filter={}".format( + quote(file_id, safe="") + ) assert url.startswith(PREFIX) @@ -221,4 +249,4 @@ async def test_storage_list_filter(client, storage_server, logged_user, role, ex if not error: assert len(data) == 1 - assert data[0]['uuid_filter'] == file_id + assert data[0]["uuid_filter"] == file_id diff --git a/services/web/server/tests/unit/with_dbs/test_users.py b/services/web/server/tests/unit/with_dbs/test_users.py index 4a57675655c..738092f756e 100644 --- a/services/web/server/tests/unit/with_dbs/test_users.py +++ b/services/web/server/tests/unit/with_dbs/test_users.py @@ -24,8 +24,11 @@ from simcore_service_webserver.users import setup_users from utils_assert import assert_status from utils_login import LoggedUser -from utils_tokens import (create_token_in_db, delete_all_tokens_from_db, - get_token_from_db) +from utils_tokens import ( + create_token_in_db, + delete_all_tokens_from_db, + get_token_from_db, +) API_VERSION = "v0" @@ -38,7 +41,7 @@ def client(loop, aiohttp_client, app_cfg, postgres_service): assert cfg["rest"]["version"] == API_VERSION - cfg["db"]["init_tables"] = True # inits postgres_service + cfg["db"]["init_tables"] = True # inits postgres_service # fake config app = create_safe_application(cfg) @@ -50,10 +53,9 @@ def client(loop, aiohttp_client, app_cfg, postgres_service): setup_login(app) setup_users(app) - client = loop.run_until_complete(aiohttp_client(app, server_kwargs={ - 'port': port, - 'host': 'localhost' - })) + client = loop.run_until_complete( + aiohttp_client(app, server_kwargs={"port": port, "host": "localhost"}) + ) return client @@ -63,6 +65,7 @@ def client(loop, aiohttp_client, app_cfg, postgres_service): # https://github.com/pytest-dev/pytest-asyncio/issues/76 # + @pytest.fixture async def logged_user(client, role: UserRole): """ adds a user in db and logs in with client @@ -70,9 +73,7 @@ async def logged_user(client, role: UserRole): NOTE: role fixture is defined as a parametrization below """ async with LoggedUser( - client, - {"role": role.name}, - check_if_succeeds = role!=UserRole.ANONYMOUS + client, {"role": role.name}, check_if_succeeds=role != UserRole.ANONYMOUS ) as user: yield user @@ -90,7 +91,7 @@ async def fake_tokens(logged_user, tokens_db): from faker.providers import lorem fake = faker.Factory.create() - fake.seed(4567) # Always the same fakes + fake.seed(4567) # Always the same fakes fake.add_provider(lorem) all_tokens = [] @@ -100,28 +101,33 @@ async def fake_tokens(logged_user, tokens_db): for _ in repeat(None, 5): # TODO: add tokens from other users data = { - 'service': fake.word(ext_word_list=None), - 'token_key': fake.md5(raw_output=False), - 'token_secret': fake.md5(raw_output=False) + "service": fake.word(ext_word_list=None), + "token_key": fake.md5(raw_output=False), + "token_secret": fake.md5(raw_output=False), } - row = await create_token_in_db( tokens_db, - user_id = logged_user['id'], - token_service = data['service'], - token_data = data + row = await create_token_in_db( + tokens_db, + user_id=logged_user["id"], + token_service=data["service"], + token_data=data, ) all_tokens.append(data) return all_tokens -#-------------------------------------------------------------------------- +# -------------------------------------------------------------------------- PREFIX = "/" + API_VERSION + "/me" -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPOk), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) + +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPOk), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) async def test_get_profile(logged_user, client, role, expected): url = client.app.router["get_my_profile"].url_for() assert str(url) == "/v0/me" @@ -130,18 +136,22 @@ async def test_get_profile(logged_user, client, role, expected): data, error = await assert_status(resp, expected) if not error: - assert data['login'] == logged_user["email"] - assert data['gravatar_id'] - assert data['first_name'] == logged_user["name"] - assert data['last_name'] == "" - assert data['role'] == role.name.capitalize() - -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPNoContent), - (UserRole.TESTER, web.HTTPNoContent), -]) + assert data["login"] == logged_user["email"] + assert data["gravatar_id"] + assert data["first_name"] == logged_user["name"] + assert data["last_name"] == "" + assert data["role"] == role.name.capitalize() + + +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPNoContent), + (UserRole.TESTER, web.HTTPNoContent), + ], +) async def test_update_profile(logged_user, client, role, expected): url = client.app.router["update_my_profile"].url_for() assert str(url) == "/v0/me" @@ -153,51 +163,55 @@ async def test_update_profile(logged_user, client, role, expected): resp = await client.get(url) data, _ = await assert_status(resp, web.HTTPOk) - assert data['first_name'] == logged_user["name"] - assert data['last_name'] == "Foo" - assert data['role'] == role.name.capitalize() - + assert data["first_name"] == logged_user["name"] + assert data["last_name"] == "Foo" + assert data["role"] == role.name.capitalize() # Test CRUD on tokens -------------------------------------------- # TODO: template for CRUD testing? # TODO: create parametrize fixture with resource_name -RESOURCE_NAME = 'tokens' +RESOURCE_NAME = "tokens" PREFIX = "/" + API_VERSION + "/me/" + RESOURCE_NAME - -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPCreated), - (UserRole.TESTER, web.HTTPCreated), -]) +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPCreated), + (UserRole.TESTER, web.HTTPCreated), + ], +) async def test_create_token(client, logged_user, tokens_db, role, expected): url = client.app.router["create_tokens"].url_for() - assert '/v0/me/tokens' == str(url) + assert "/v0/me/tokens" == str(url) token = { - 'service': "blackfynn", - 'token_key': '4k9lyzBTS', - 'token_secret': 'my secret' + "service": "blackfynn", + "token_key": "4k9lyzBTS", + "token_secret": "my secret", } resp = await client.post(url, json=token) data, error = await assert_status(resp, expected) if not error: db_token = await get_token_from_db(tokens_db, token_data=token) - assert db_token['token_data'] == token - assert db_token['user_id'] == logged_user["id"] - - -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPOk), - (UserRole.TESTER, web.HTTPOk), -]) + assert db_token["token_data"] == token + assert db_token["user_id"] == logged_user["id"] + + +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPOk), + (UserRole.TESTER, web.HTTPOk), + ], +) async def test_read_token(client, logged_user, tokens_db, fake_tokens, role, expected): # list all url = client.app.router["list_tokens"].url_for() @@ -208,7 +222,7 @@ async def test_read_token(client, logged_user, tokens_db, fake_tokens, role, exp if not error: expected_token = random.choice(fake_tokens) - sid = expected_token['service'] + sid = expected_token["service"] # get one url = client.app.router["get_token"].url_for(service=sid) @@ -220,44 +234,52 @@ async def test_read_token(client, logged_user, tokens_db, fake_tokens, role, exp assert data == expected_token, "list and read item are both read operations" -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPNoContent), - (UserRole.TESTER, web.HTTPNoContent), -]) -async def test_update_token(client, logged_user, tokens_db, fake_tokens, role, expected): +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPNoContent), + (UserRole.TESTER, web.HTTPNoContent), + ], +) +async def test_update_token( + client, logged_user, tokens_db, fake_tokens, role, expected +): selected = random.choice(fake_tokens) - sid = selected['service'] + sid = selected["service"] url = client.app.router["get_token"].url_for(service=sid) assert "/v0/me/tokens/%s" % sid == str(url) - resp = await client.put(url, json={ - 'token_secret': 'some completely new secret' - }) + resp = await client.put(url, json={"token_secret": "some completely new secret"}) data, error = await assert_status(resp, expected) if not error: # check in db token_in_db = await get_token_from_db(tokens_db, token_service=sid) - assert token_in_db['token_data']['token_secret'] == 'some completely new secret' - assert token_in_db['token_data']['token_secret'] != selected['token_secret'] + assert token_in_db["token_data"]["token_secret"] == "some completely new secret" + assert token_in_db["token_data"]["token_secret"] != selected["token_secret"] - selected['token_secret'] = 'some completely new secret' - assert token_in_db['token_data'] == selected + selected["token_secret"] = "some completely new secret" + assert token_in_db["token_data"] == selected -@pytest.mark.parametrize("role,expected", [ - (UserRole.ANONYMOUS, web.HTTPUnauthorized), - (UserRole.GUEST, web.HTTPForbidden), - (UserRole.USER, web.HTTPNoContent), - (UserRole.TESTER, web.HTTPNoContent), -]) -async def test_delete_token(client, logged_user, tokens_db, fake_tokens, role, expected): - sid = fake_tokens[0]['service'] +@pytest.mark.parametrize( + "role,expected", + [ + (UserRole.ANONYMOUS, web.HTTPUnauthorized), + (UserRole.GUEST, web.HTTPForbidden), + (UserRole.USER, web.HTTPNoContent), + (UserRole.TESTER, web.HTTPNoContent), + ], +) +async def test_delete_token( + client, logged_user, tokens_db, fake_tokens, role, expected +): + sid = fake_tokens[0]["service"] url = client.app.router["delete_token"].url_for(service=sid) assert "/v0/me/tokens/%s" % sid == str(url) @@ -272,6 +294,7 @@ async def test_delete_token(client, logged_user, tokens_db, fake_tokens, role, e ## BUG FIXES ####################################################### + @pytest.fixture def mock_failing_connection(mocker) -> MagicMock: """ @@ -280,16 +303,22 @@ def mock_failing_connection(mocker) -> MagicMock: """ # See http://initd.org/psycopg/docs/module.html conn_execute = mocker.patch.object(SAConnection, "execute") - conn_execute.side_effect=OperationalError("MOCK: server closed the connection unexpectedly") + conn_execute.side_effect = OperationalError( + "MOCK: server closed the connection unexpectedly" + ) return conn_execute -@pytest.mark.parametrize("role,expected", [ - (UserRole.USER, web.HTTPServiceUnavailable), -]) -async def test_get_profile_with_failing_db_connection(logged_user, client, + +@pytest.mark.parametrize( + "role,expected", [(UserRole.USER, web.HTTPServiceUnavailable),] +) +async def test_get_profile_with_failing_db_connection( + logged_user, + client, mock_failing_connection: MagicMock, role: UserRole, - expected: web.HTTPException): + expected: web.HTTPException, +): """ Reproduces issue https://github.com/ITISFoundation/osparc-simcore/pull/1160 @@ -305,6 +334,8 @@ async def test_get_profile_with_failing_db_connection(logged_user, client, resp = await client.get(url) NUM_RETRY = 3 - assert mock_failing_connection.call_count == NUM_RETRY, "Expected mock failure raised in AuthorizationPolicy.authorized_userid after severals" + assert ( + mock_failing_connection.call_count == NUM_RETRY + ), "Expected mock failure raised in AuthorizationPolicy.authorized_userid after severals" data, error = await assert_status(resp, expected) diff --git a/tests/swarm-deploy/Makefile b/tests/swarm-deploy/Makefile index 4b402d3f865..495e2d0af53 100644 --- a/tests/swarm-deploy/Makefile +++ b/tests/swarm-deploy/Makefile @@ -1,4 +1,7 @@ -.DEFAULT_GOAL := help +# +# Targets for DEVELOPMENT for system tests +# +include ../../scripts/common.Makefile ROOT_DIR = $(abspath $(CURDIR)/../../) VENV_DIR ?= $(abspath $(ROOT_DIR)/.venv) @@ -19,12 +22,3 @@ install: $(VENV_DIR) requirements.txt ## installs dependencies tests: ## runs all tests [DEV] # running unit tests @$(VENV_DIR)/bin/pytest -vv -x --ff --pdb $(CURDIR) - - -.PHONY: help -# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html -help: ## this colorful help - @echo "Recipes for tests/swarm-deploy:" - @echo "" - @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) - @echo ""