diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index a7428f05750..5be14fb3619 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -332,7 +332,6 @@ jobs: run: ./ci/github/unit-testing/webserver.bash install - name: typecheck run: ./ci/github/unit-testing/webserver.bash typecheck - continue-on-error: true - name: test isolated if: always() run: ./ci/github/unit-testing/webserver.bash test_isolated diff --git a/packages/models-library/src/models_library/api_schemas_webserver/wallets.py b/packages/models-library/src/models_library/api_schemas_webserver/wallets.py index 203f009b5cb..af0aa61ac80 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/wallets.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/wallets.py @@ -13,7 +13,7 @@ class WalletGet(OutputSchema): wallet_id: WalletID - name: str + name: IDStr description: str | None owner: GroupID thumbnail: str | None diff --git a/packages/models-library/src/models_library/basic_types.py b/packages/models-library/src/models_library/basic_types.py index a70709ca802..2f186b31fe2 100644 --- a/packages/models-library/src/models_library/basic_types.py +++ b/packages/models-library/src/models_library/basic_types.py @@ -1,6 +1,6 @@ import re from enum import Enum -from typing import TypeAlias +from typing import Final, TypeAlias from pydantic import ( ConstrainedDecimal, @@ -83,11 +83,32 @@ class UUIDStr(ConstrainedStr): # non-empty bounded string used as identifier # e.g. "123" or "name_123" or "fa327c73-52d8-462a-9267-84eeaf0f90e3" but NOT "" +_ELLIPSIS_CHAR: Final[str] = "..." + + class IDStr(ConstrainedStr): strip_whitespace = True min_length = 1 max_length = 100 + @staticmethod + def concatenate(*args: "IDStr", link_char: str = " ") -> "IDStr": + result = link_char.join(args).strip() + assert IDStr.min_length # nosec + assert IDStr.max_length # nosec + if len(result) > IDStr.max_length: + if IDStr.max_length > len(_ELLIPSIS_CHAR): + result = ( + result[: IDStr.max_length - len(_ELLIPSIS_CHAR)].rstrip() + + _ELLIPSIS_CHAR + ) + else: + result = _ELLIPSIS_CHAR[0] * IDStr.max_length + if len(result) < IDStr.min_length: + msg = f"IDStr.concatenate: result is too short: {result}" + raise ValueError(msg) + return IDStr(result) + class ShortTruncatedStr(ConstrainedStr): # NOTE: Use to input e.g. titles or display names diff --git a/packages/service-library/src/servicelib/redis_utils.py b/packages/service-library/src/servicelib/redis_utils.py index 32753ec16ed..10f32ae5944 100644 --- a/packages/service-library/src/servicelib/redis_utils.py +++ b/packages/service-library/src/servicelib/redis_utils.py @@ -3,6 +3,7 @@ import logging from collections.abc import Awaitable, Callable from datetime import timedelta +from typing import Any import arrow @@ -100,3 +101,10 @@ def start_exclusive_periodic_task( usr_tsk_task_name=task_name, **kwargs, ) + + +async def handle_redis_returns_union_types(result: Any | Awaitable[Any]) -> Any: + """Used to handle mypy issues with redis 5.x return types""" + if isinstance(result, Awaitable): + return await result + return result diff --git a/services/web/server/src/simcore_service_webserver/catalog/_api.py b/services/web/server/src/simcore_service_webserver/catalog/_api.py index 9441af6e30e..02dc0b8c151 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_api.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_api.py @@ -1,6 +1,6 @@ import logging from collections.abc import Iterator -from typing import Any +from typing import Any, cast from aiohttp import web from aiohttp.web import Request @@ -319,8 +319,11 @@ async def get_service_output( service = await client.get_service( ctx.app, ctx.user_id, service_key, service_version, ctx.product_name ) - return await ServiceOutputGetFactory.from_catalog_service_api_model( - service=service, output_key=output_key + return cast( # mypy -> aiocache is not typed. + ServiceOutputGet, + await ServiceOutputGetFactory.from_catalog_service_api_model( + service=service, output_key=output_key + ), ) diff --git a/services/web/server/src/simcore_service_webserver/catalog/_models.py b/services/web/server/src/simcore_service_webserver/catalog/_models.py index a2872e57f42..a3803b04d08 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_models.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_models.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from typing import Any, Final -from aiocache import cached +from aiocache import cached # type: ignore[import-untyped] from models_library.api_schemas_webserver.catalog import ( ServiceInputGet, ServiceInputKey, diff --git a/services/web/server/src/simcore_service_webserver/db/plugin.py b/services/web/server/src/simcore_service_webserver/db/plugin.py index b7ad4a96284..720637bf993 100644 --- a/services/web/server/src/simcore_service_webserver/db/plugin.py +++ b/services/web/server/src/simcore_service_webserver/db/plugin.py @@ -4,7 +4,7 @@ import logging from collections.abc import AsyncIterator -from typing import Any +from typing import Any, cast from aiohttp import web from aiopg.sa import Engine, create_engine @@ -31,7 +31,7 @@ async def _ensure_pg_ready(settings: PostgresSettings) -> Engine: _logger.info("Connecting to postgres with %s", f"{settings=}") - engine = await create_engine( + engine: Engine = await create_engine( settings.dsn, application_name=settings.POSTGRES_CLIENT_NAME, minsize=settings.POSTGRES_MINSIZE, @@ -91,7 +91,7 @@ def get_engine_state(app: web.Application) -> dict[str, Any]: def get_database_engine(app: web.Application) -> Engine: - return app[APP_DB_ENGINE_KEY] + return cast(Engine, app[APP_DB_ENGINE_KEY]) @app_module_setup( diff --git a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py index 637d308c56e..3c5509e449e 100644 --- a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py +++ b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py @@ -13,6 +13,7 @@ NodeGetIdle, NodeGetUnknown, ) +from models_library.basic_types import IDStr from models_library.progress_bar import ProgressReport from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -108,7 +109,7 @@ async def stop_dynamic_services_in_project( user_id, project_id, ), - description="stopping services", + description=IDStr("stopping services"), ) ) @@ -123,7 +124,7 @@ async def stop_dynamic_services_in_project( save_state=save_state, ), progress=progress_bar.sub_progress( - 1, description=f"{service.node_uuid}" + 1, description=IDStr(f"{service.node_uuid}") ), ) for service in running_dynamic_services diff --git a/services/web/server/src/simcore_service_webserver/email/plugin.py b/services/web/server/src/simcore_service_webserver/email/plugin.py index 3ccf636c64c..cb72ea8135f 100644 --- a/services/web/server/src/simcore_service_webserver/email/plugin.py +++ b/services/web/server/src/simcore_service_webserver/email/plugin.py @@ -5,10 +5,11 @@ MIME: Multipurpose Internet Mail Extensions """ + import logging import aiohttp_jinja2 -import jinja_app_loader +import jinja_app_loader # type: ignore[import-untyped] from aiohttp import web from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup diff --git a/services/web/server/src/simcore_service_webserver/exporter/_handlers.py b/services/web/server/src/simcore_service_webserver/exporter/_handlers.py index 9b93e076e6f..cdb075638bd 100644 --- a/services/web/server/src/simcore_service_webserver/exporter/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/exporter/_handlers.py @@ -75,7 +75,7 @@ async def export_project(request: web.Request): ) headers = {"Content-Disposition": f'attachment; filename="{file_to_download.name}"'} - + assert delete_tmp_dir # nosec return CleanupFileResponse( remove_tmp_dir_cb=delete_tmp_dir, path=file_to_download, headers=headers ) diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py index 8d42ec6dde3..ed8f6108df6 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py @@ -8,6 +8,7 @@ FolderGetPage, PutFolderBodyParams, ) +from models_library.basic_types import IDStr from models_library.folders import FolderID from models_library.rest_ordering import OrderBy, OrderDirection from models_library.rest_pagination import Page, PageQueryParameters @@ -70,8 +71,9 @@ class FoldersPathParams(StrictRequestParams): class FolderListWithJsonStrQueryParams(PageQueryParameters): - order_by: Json[OrderBy] = Field( # pylint: disable=unsubscriptable-object - default=OrderBy(field="modified", direction=OrderDirection.DESC), + # pylint: disable=unsubscriptable-object + order_by: Json[OrderBy] = Field( # type: ignore[type-arg] + default=OrderBy(field=IDStr("modified"), direction=OrderDirection.DESC), description="Order by field (modified_at|name|description) and direction (asc|desc). The default sorting order is ascending.", example='{"field": "name", "direction": "desc"}', alias="order_by", @@ -89,7 +91,8 @@ def validate_order_by_field(cls, v): "name", "description", }: - raise ValueError(f"We do not support ordering by provided field {v.field}") + msg = f"We do not support ordering by provided field {v.field}" + raise ValueError(msg) if v.field == "modified_at": v.field = "modified" return v diff --git a/services/web/server/src/simcore_service_webserver/groups/_db.py b/services/web/server/src/simcore_service_webserver/groups/_db.py index 8105ef134b7..38bbb4e7d7c 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_db.py +++ b/services/web/server/src/simcore_service_webserver/groups/_db.py @@ -51,14 +51,16 @@ async def _get_user_group( group = await result.fetchone() if not group: raise GroupNotFoundError(gid) + assert isinstance(group, RowProxy) # nosec return group async def get_user_from_email(conn: SAConnection, email: str) -> RowProxy: result = await conn.execute(sa.select(users).where(users.c.email == email)) - user: RowProxy = await result.fetchone() + user = await result.fetchone() if not user: raise UserNotFoundError(email=email) + assert isinstance(user, RowProxy) # nosec return user diff --git a/services/web/server/src/simcore_service_webserver/groups/_handlers.py b/services/web/server/src/simcore_service_webserver/groups/_handlers.py index dcf69347056..a04df023405 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/groups/_handlers.py @@ -10,18 +10,17 @@ UsersGroup, ) from models_library.emails import LowerCaseEmailStr -from models_library.users import GroupID +from models_library.users import GroupID, UserID from models_library.utils.json_serialization import json_dumps -from pydantic import BaseModel, parse_obj_as +from pydantic import BaseModel, Extra, Field, parse_obj_as from servicelib.aiohttp.requests_validation import ( parse_request_path_parameters_as, parse_request_query_parameters_as, ) from servicelib.aiohttp.typing_extension import Handler from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON -from simcore_service_webserver.utils_aiohttp import envelope_json_response -from .._constants import RQT_USERID_KEY +from .._constants import RQ_PRODUCT_KEY, RQT_USERID_KEY from .._meta import API_VTAG from ..login.decorators import login_required from ..products.api import Product, get_current_product @@ -31,6 +30,7 @@ from ..scicrunch.service_client import SciCrunch from ..security.decorators import permission_required from ..users.exceptions import UserNotFoundError +from ..utils_aiohttp import envelope_json_response from . import api from ._classifiers import GroupClassifierRepository, build_rrids_tree_view from .exceptions import ( @@ -42,6 +42,11 @@ _logger = logging.getLogger(__name__) +class _GroupsRequestContext(BaseModel): + user_id: UserID = Field(..., alias=RQT_USERID_KEY) + product_name: str = Field(..., alias=RQ_PRODUCT_KEY) + + def _handle_groups_exceptions(handler: Handler): @functools.wraps(handler) async def wrapper(request: web.Request) -> web.StreamResponse: @@ -77,9 +82,10 @@ async def list_groups(request: web.Request): """ product: Product = get_current_product(request) - user_id = request[RQT_USERID_KEY] + req_ctx = _GroupsRequestContext.parse_obj(request) + primary_group, user_groups, all_group = await api.list_user_groups_with_read_access( - request.app, user_id + request.app, req_ctx.user_id ) result = { @@ -93,7 +99,7 @@ async def list_groups(request: web.Request): with suppress(GroupNotFoundError): result["product"] = await api.get_product_group_for_user( app=request.app, - user_id=user_id, + user_id=req_ctx.user_id, product_gid=product.group_id, ) @@ -101,16 +107,23 @@ async def list_groups(request: web.Request): return result +class _GroupPathParams(BaseModel): + gid: GroupID + + class Config: + extra = Extra.forbid + + @routes.get(f"/{API_VTAG}/groups/{{gid}}", name="get_group") @login_required @permission_required("groups.read") @_handle_groups_exceptions async def get_group(request: web.Request): """Get one group details""" - user_id = request[RQT_USERID_KEY] - gid = request.match_info["gid"] + req_ctx = _GroupsRequestContext.parse_obj(request) + path_params = parse_request_path_parameters_as(_GroupPathParams, request) - group = await api.get_user_group(request.app, user_id, gid) + group = await api.get_user_group(request.app, req_ctx.user_id, path_params.gid) assert parse_obj_as(UsersGroup, group) is not None # nosec return group @@ -121,10 +134,10 @@ async def get_group(request: web.Request): @_handle_groups_exceptions async def create_group(request: web.Request): """Creates organization groups""" - user_id = request[RQT_USERID_KEY] + req_ctx = _GroupsRequestContext.parse_obj(request) new_group = await request.json() - created_group = await api.create_user_group(request.app, user_id, new_group) + created_group = await api.create_user_group(request.app, req_ctx.user_id, new_group) assert parse_obj_as(UsersGroup, created_group) is not None # nosec raise web.HTTPCreated( text=json_dumps({"data": created_group}), content_type=MIMETYPE_APPLICATION_JSON @@ -136,12 +149,12 @@ async def create_group(request: web.Request): @permission_required("groups.*") @_handle_groups_exceptions async def update_group(request: web.Request): - user_id = request[RQT_USERID_KEY] - gid = request.match_info["gid"] + req_ctx = _GroupsRequestContext.parse_obj(request) + path_params = parse_request_path_parameters_as(_GroupPathParams, request) new_group_values = await request.json() updated_group = await api.update_user_group( - request.app, user_id, gid, new_group_values + request.app, req_ctx.user_id, path_params.gid, new_group_values ) assert parse_obj_as(UsersGroup, updated_group) is not None # nosec return envelope_json_response(updated_group) @@ -152,10 +165,10 @@ async def update_group(request: web.Request): @permission_required("groups.*") @_handle_groups_exceptions async def delete_group(request: web.Request): - user_id = request[RQT_USERID_KEY] - gid = request.match_info["gid"] + req_ctx = _GroupsRequestContext.parse_obj(request) + path_params = parse_request_path_parameters_as(_GroupPathParams, request) - await api.delete_user_group(request.app, user_id, gid) + await api.delete_user_group(request.app, req_ctx.user_id, path_params.gid) raise web.HTTPNoContent @@ -164,10 +177,12 @@ async def delete_group(request: web.Request): @permission_required("groups.*") @_handle_groups_exceptions async def get_group_users(request: web.Request): - user_id = request[RQT_USERID_KEY] - gid = request.match_info["gid"] + req_ctx = _GroupsRequestContext.parse_obj(request) + path_params = parse_request_path_parameters_as(_GroupPathParams, request) - group_user = await api.list_users_in_group(request.app, user_id, gid) + group_user = await api.list_users_in_group( + request.app, req_ctx.user_id, path_params.gid + ) assert parse_obj_as(list[GroupUserGet], group_user) is not None # nosec return envelope_json_response(group_user) @@ -180,8 +195,8 @@ async def add_group_user(request: web.Request): """ Adds a user in an organization group """ - user_id = request[RQT_USERID_KEY] - gid = request.match_info["gid"] + req_ctx = _GroupsRequestContext.parse_obj(request) + path_params = parse_request_path_parameters_as(_GroupPathParams, request) new_user_in_group = await request.json() assert "uid" in new_user_in_group or "email" in new_user_in_group # nosec @@ -195,14 +210,22 @@ async def add_group_user(request: web.Request): await api.add_user_in_group( request.app, - user_id, - gid, + req_ctx.user_id, + path_params.gid, new_user_id=new_user_id, new_user_email=new_user_email, ) raise web.HTTPNoContent +class _GroupUserPathParams(BaseModel): + gid: GroupID + uid: UserID + + class Config: + extra = Extra.forbid + + @routes.get(f"/{API_VTAG}/groups/{{gid}}/users/{{uid}}", name="get_group_user") @login_required @permission_required("groups.*") @@ -211,10 +234,11 @@ async def get_group_user(request: web.Request): """ Gets specific user in group """ - user_id = request[RQT_USERID_KEY] - gid = request.match_info["gid"] - the_user_id_in_group = request.match_info["uid"] - user = await api.get_user_in_group(request.app, user_id, gid, the_user_id_in_group) + req_ctx = _GroupsRequestContext.parse_obj(request) + path_params = parse_request_path_parameters_as(_GroupUserPathParams, request) + user = await api.get_user_in_group( + request.app, req_ctx.user_id, path_params.gid, path_params.uid + ) assert parse_obj_as(GroupUserGet, user) is not None # nosec return envelope_json_response(user) @@ -227,15 +251,14 @@ async def update_group_user(request: web.Request): """ Modify specific user in group """ - user_id = request[RQT_USERID_KEY] - gid = request.match_info["gid"] - the_user_id_in_group = request.match_info["uid"] + req_ctx = _GroupsRequestContext.parse_obj(request) + path_params = parse_request_path_parameters_as(_GroupUserPathParams, request) new_values_for_user_in_group = await request.json() user = await api.update_user_in_group( request.app, - user_id, - gid, - the_user_id_in_group, + req_ctx.user_id, + path_params.gid, + path_params.uid, new_values_for_user_in_group, ) assert parse_obj_as(GroupUserGet, user) is not None # nosec @@ -247,10 +270,11 @@ async def update_group_user(request: web.Request): @permission_required("groups.*") @_handle_groups_exceptions async def delete_group_user(request: web.Request): - user_id = request[RQT_USERID_KEY] - gid = request.match_info["gid"] - the_user_id_in_group = request.match_info["uid"] - await api.delete_user_in_group(request.app, user_id, gid, the_user_id_in_group) + req_ctx = _GroupsRequestContext.parse_obj(request) + path_params = parse_request_path_parameters_as(_GroupUserPathParams, request) + await api.delete_user_in_group( + request.app, req_ctx.user_id, path_params.gid, path_params.uid + ) raise web.HTTPNoContent @@ -293,7 +317,7 @@ async def get_group_classifiers(request: web.Request): def _handle_scicrunch_exceptions(handler: Handler): @functools.wraps(handler) - async def wrapper(request: web.Request) -> web.Response: + async def wrapper(request: web.Request) -> web.StreamResponse: try: return await handler(request) diff --git a/services/web/server/src/simcore_service_webserver/log.py b/services/web/server/src/simcore_service_webserver/log.py index dabc4ead386..a32c44d41ff 100644 --- a/services/web/server/src/simcore_service_webserver/log.py +++ b/services/web/server/src/simcore_service_webserver/log.py @@ -4,7 +4,7 @@ import logging -from aiodebug import log_slow_callbacks +from aiodebug import log_slow_callbacks # type: ignore[import-untyped] from aiohttp.log import access_logger from servicelib.logging_utils import config_all_loggers diff --git a/services/web/server/src/simcore_service_webserver/login/_2fa_api.py b/services/web/server/src/simcore_service_webserver/login/_2fa_api.py index 24b57e8b159..06a96f00e6d 100644 --- a/services/web/server/src/simcore_service_webserver/login/_2fa_api.py +++ b/services/web/server/src/simcore_service_webserver/login/_2fa_api.py @@ -17,8 +17,8 @@ from servicelib.logging_utils import LogExtra, get_log_record_extra, log_decorator from servicelib.utils_secrets import generate_passcode from settings_library.twilio import TwilioSettings -from twilio.base.exceptions import TwilioException -from twilio.rest import Client +from twilio.base.exceptions import TwilioException # type: ignore[import-untyped] +from twilio.rest import Client # type: ignore[import-untyped] from ..login.errors import SendingVerificationEmailError, SendingVerificationSmsError from ..products.api import Product diff --git a/services/web/server/src/simcore_service_webserver/login/_registration.py b/services/web/server/src/simcore_service_webserver/login/_registration.py index 6b6cdde006e..7da8a7b1085 100644 --- a/services/web/server/src/simcore_service_webserver/login/_registration.py +++ b/services/web/server/src/simcore_service_webserver/login/_registration.py @@ -74,7 +74,9 @@ class InvitationData(BaseModel): class _InvitationValidator(BaseModel): action: ConfirmationAction - data: Json[InvitationData] # pylint: disable=unsubscriptable-object + data: Json[ # type: ignore[type-arg] # pydantic upgrade to 1.10 needed # pylint: disable=unsubscriptable-object + InvitationData + ] @validator("action", pre=True) @classmethod @@ -136,10 +138,10 @@ async def check_other_registrations( ) if drop_previous_registration: if not _confirmation: - await db.delete_user(user=user) + await db.delete_user(user=dict(user)) else: await db.delete_confirmation_and_user( - user=user, confirmation=_confirmation + user=dict(user), confirmation=_confirmation ) _logger.warning( @@ -276,7 +278,7 @@ async def check_and_consume_invitation( # database-type invitations if confirmation_token := await validate_confirmation_code(invitation_code, db, cfg): try: - invitation_data: InvitationData = _InvitationValidator.parse_obj( + invitation_data: InvitationData = _InvitationValidator.parse_obj( # type: ignore[assignment] # need to update pydantic confirmation_token ).data return invitation_data diff --git a/services/web/server/src/simcore_service_webserver/login/handlers_change.py b/services/web/server/src/simcore_service_webserver/login/handlers_change.py index bdbd1ca3d41..f8b71ce8763 100644 --- a/services/web/server/src/simcore_service_webserver/login/handlers_change.py +++ b/services/web/server/src/simcore_service_webserver/login/handlers_change.py @@ -77,7 +77,7 @@ async def submit_request_to_reset_password(request: web.Request): reason=MSG_UNKNOWN_EMAIL, content_type=MIMETYPE_APPLICATION_JSON ) # 422 - validate_user_status(user=user, support_email=product.support_email) + validate_user_status(user=dict(user), support_email=product.support_email) assert user["status"] == ACTIVE # nosec assert user["email"] == request_body.email # nosec @@ -209,7 +209,8 @@ async def change_password(request: web.Request): ) # 422 await db.update_user( - user, {"password_hash": encrypt_password(passwords.new.get_secret_value())} + dict(user), + {"password_hash": encrypt_password(passwords.new.get_secret_value())}, ) return flash_response(MSG_PASSWORD_CHANGED) diff --git a/services/web/server/src/simcore_service_webserver/login/handlers_confirmation.py b/services/web/server/src/simcore_service_webserver/login/handlers_confirmation.py index da6c851fa59..ecb99ce84e7 100644 --- a/services/web/server/src/simcore_service_webserver/login/handlers_confirmation.py +++ b/services/web/server/src/simcore_service_webserver/login/handlers_confirmation.py @@ -236,7 +236,7 @@ async def phone_confirmation(request: web.Request): try: user = await db.get_user({"email": request_body.email}) assert user is not None # nosec - await db.update_user(user, {"phone": request_body.phone}) + await db.update_user(dict(user), {"phone": request_body.phone}) except UniqueViolation as err: raise web.HTTPUnauthorized( @@ -283,7 +283,7 @@ async def reset_password(request: web.Request): assert user # nosec await db.update_user( - user, + dict(user), { "password_hash": encrypt_password( request_body.password.get_secret_value() diff --git a/services/web/server/src/simcore_service_webserver/login/storage.py b/services/web/server/src/simcore_service_webserver/login/storage.py index a50b2f283b8..e1c5e18d70c 100644 --- a/services/web/server/src/simcore_service_webserver/login/storage.py +++ b/services/web/server/src/simcore_service_webserver/login/storage.py @@ -38,7 +38,11 @@ class ConfirmationTokenDict(BaseConfirmationTokenDict): class AsyncpgStorage: def __init__( - self, pool, *, user_table_name="users", confirmation_table_name="confirmations" + self, + pool: asyncpg.Pool, + *, + user_table_name: str = "users", + confirmation_table_name: str = "confirmations", ): self.pool = pool self.user_tbl = user_table_name @@ -52,10 +56,11 @@ async def get_user(self, with_data: dict[str, Any]) -> asyncpg.Record | None: async with self.pool.acquire() as conn: return await _sql.find_one(conn, self.user_tbl, with_data) - async def create_user(self, data: dict[str, Any]) -> asyncpg.Record: + async def create_user(self, data: dict[str, Any]) -> dict[str, Any]: async with self.pool.acquire() as conn: user_id = await _sql.insert(conn, self.user_tbl, data) new_user = await _sql.find_one(conn, self.user_tbl, {"id": user_id}) + assert new_user # nosec data.update( id=new_user["id"], created_at=new_user["created_at"], @@ -63,11 +68,11 @@ async def create_user(self, data: dict[str, Any]) -> asyncpg.Record: ) return data - async def update_user(self, user, updates) -> asyncpg.Record: + async def update_user(self, user: dict[str, Any], updates: dict[str, Any]) -> None: async with self.pool.acquire() as conn: await _sql.update(conn, self.user_tbl, {"id": user["id"]}, updates) - async def delete_user(self, user): + async def delete_user(self, user: dict[str, Any]) -> None: async with self.pool.acquire() as conn: await _sql.delete(conn, self.user_tbl, {"id": user["id"]}) @@ -96,12 +101,14 @@ async def create_confirmation( created_at=datetime.utcnow(), ) c = await _sql.insert( - conn, self.confirm_tbl, confirmation, returning="code" + conn, self.confirm_tbl, dict(confirmation), returning="code" ) assert numeric_code == c # nosec return confirmation - async def get_confirmation(self, filter_dict) -> ConfirmationTokenDict | None: + async def get_confirmation( + self, filter_dict: dict[str, Any] + ) -> ConfirmationTokenDict | None: if "user" in filter_dict: filter_dict["user_id"] = filter_dict.pop("user")["id"] async with self.pool.acquire() as conn: @@ -121,14 +128,14 @@ async def delete_confirmation(self, confirmation: ConfirmationTokenDict): # async def delete_confirmation_and_user( - self, user, confirmation: ConfirmationTokenDict + self, user: dict[str, Any], confirmation: ConfirmationTokenDict ): async with self.pool.acquire() as conn, conn.transaction(): await _sql.delete(conn, self.confirm_tbl, {"code": confirmation["code"]}) await _sql.delete(conn, self.user_tbl, {"id": user["id"]}) async def delete_confirmation_and_update_user( - self, user_id, updates, confirmation: ConfirmationTokenDict + self, user_id: int, updates: dict[str, Any], confirmation: ConfirmationTokenDict ): async with self.pool.acquire() as conn, conn.transaction(): await _sql.delete(conn, self.confirm_tbl, {"code": confirmation["code"]}) diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling/_iterations.py b/services/web/server/src/simcore_service_webserver/meta_modeling/_iterations.py index 2c6473aef36..05bb9ac778b 100644 --- a/services/web/server/src/simcore_service_webserver/meta_modeling/_iterations.py +++ b/services/web/server/src/simcore_service_webserver/meta_modeling/_iterations.py @@ -10,7 +10,7 @@ from typing import Any, Literal, Optional from aiohttp import web -from models_library.basic_types import MD5Str, SHA1Str +from models_library.basic_types import KeyIDStr, SHA1Str from models_library.function_services_catalog import is_iterator_service from models_library.projects import ProjectID from models_library.projects_nodes import Node, OutputID, OutputTypes @@ -37,7 +37,7 @@ _ParametersNodesPair = tuple[Parameters, NodesDict] -def _compute_params_checksum(parameters: Parameters) -> MD5Str: +def _compute_params_checksum(parameters: Parameters) -> SHA1Str: # NOTE: parameters are within a project's dataset which can # be considered small (based on test_compute_sh1_on_small_dataset) return compute_sha1_on_small_dataset(parameters) @@ -72,7 +72,10 @@ def _build_project_iterations(project_nodes: NodesDict) -> list[_ParametersNodes assert node_def.inputs # nosec node_call = _function_nodes.catalog.get_implementation(node.key, node.version) - g = node_call(**{name: node.inputs[name] for name in node_def.inputs}) + assert node_call # nosec + g = node_call( + **{f"{name}": node.inputs[KeyIDStr(name)] for name in node_def.inputs} + ) assert isinstance(g, Iterator) # nosec nodes_generators.append(g) diff --git a/services/web/server/src/simcore_service_webserver/meta_modeling/_version_control.py b/services/web/server/src/simcore_service_webserver/meta_modeling/_version_control.py index 05ddb1ea5e0..f52f2021b43 100644 --- a/services/web/server/src/simcore_service_webserver/meta_modeling/_version_control.py +++ b/services/web/server/src/simcore_service_webserver/meta_modeling/_version_control.py @@ -4,6 +4,7 @@ import logging from types import SimpleNamespace +from typing import cast from aiopg.sa.result import RowProxy from models_library.projects import ProjectIDStr @@ -107,7 +108,7 @@ async def create_workcopy_and_branch_from_commit( .set_filter(name=tag_name) .fetch() ): - return existing_tag.commit_id + return cast(CommitID, existing_tag.commit_id) # get workcopy for start_commit_id and update with 'project' repo = ( @@ -163,7 +164,7 @@ async def create_workcopy_and_branch_from_commit( hidden=IS_INTERNAL_OPERATION, ) - return branch.head_commit_id + return cast(CommitID, branch.head_commit_id) async def get_children_tags( self, repo_id: int, commit_id: int diff --git a/services/web/server/src/simcore_service_webserver/notifications/_rabbitmq_exclusive_queue_consumers.py b/services/web/server/src/simcore_service_webserver/notifications/_rabbitmq_exclusive_queue_consumers.py index 4055be3121c..d9a6b1f0861 100644 --- a/services/web/server/src/simcore_service_webserver/notifications/_rabbitmq_exclusive_queue_consumers.py +++ b/services/web/server/src/simcore_service_webserver/notifications/_rabbitmq_exclusive_queue_consumers.py @@ -65,8 +65,10 @@ async def _convert_to_node_update_event( async def _progress_message_parser(app: web.Application, data: bytes) -> bool: - rabbit_message: ProgressRabbitMessageNode | ProgressRabbitMessageProject = ( - parse_raw_as(ProgressRabbitMessageNode | ProgressRabbitMessageProject, data) + rabbit_message: ( + ProgressRabbitMessageNode | ProgressRabbitMessageProject + ) = parse_raw_as( + ProgressRabbitMessageNode | ProgressRabbitMessageProject, data # type: ignore[arg-type] # from pydantic v2 --> https://github.com/pydantic/pydantic/discussions/4950 ) message: SocketMessageDict | None = None if isinstance(rabbit_message, ProgressRabbitMessageProject): diff --git a/services/web/server/src/simcore_service_webserver/payments/_events.py b/services/web/server/src/simcore_service_webserver/payments/_events.py index 769f0b9ba9a..fbc4ebc2047 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_events.py +++ b/services/web/server/src/simcore_service_webserver/payments/_events.py @@ -26,6 +26,9 @@ async def validate_prices_in_product_settings_on_startup(app: web.Application): amount_usd=payment_settings.PAYMENTS_AUTORECHARGE_DEFAULT_TOP_UP_AMOUNT, min_payment_amount_usd=product.min_payment_amount_usd, ) + assert ( # nosec + payment_settings.PAYMENTS_AUTORECHARGE_DEFAULT_MONTHLY_LIMIT is not None + ) if ( product.min_payment_amount_usd > payment_settings.PAYMENTS_AUTORECHARGE_DEFAULT_MONTHLY_LIMIT diff --git a/services/web/server/src/simcore_service_webserver/payments/_methods_api.py b/services/web/server/src/simcore_service_webserver/payments/_methods_api.py index 47d3c00a323..a1eac2b440d 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_methods_api.py +++ b/services/web/server/src/simcore_service_webserver/payments/_methods_api.py @@ -15,6 +15,7 @@ from models_library.products import ProductName from models_library.users import UserID from models_library.wallets import WalletID +from pydantic import HttpUrl, parse_obj_as from servicelib.logging_utils import log_decorator from simcore_postgres_database.models.payments_methods import InitPromptAckFlowState from yarl import URL @@ -68,7 +69,7 @@ def _to_api_model( @log_decorator(_logger, level=logging.INFO) async def _fake_init_creation_of_wallet_payment_method( app, settings, user_id, wallet_id -): +) -> PaymentMethodInitiated: # NOTE: this will be removed as soon as dev payment gateway is available in master # hold timestamp initiated_at = arrow.utcnow().datetime @@ -96,7 +97,7 @@ async def _fake_init_creation_of_wallet_payment_method( return PaymentMethodInitiated( wallet_id=wallet_id, payment_method_id=payment_method_id, - payment_method_form_url=f"{form_link}", + payment_method_form_url=parse_obj_as(HttpUrl, f"{form_link}"), ) @@ -192,7 +193,9 @@ async def _fake_list_wallet_payment_methods( @log_decorator(_logger, level=logging.INFO) -async def _fake_get_wallet_payment_method(app, user_id, wallet_id, payment_method_id): +async def _fake_get_wallet_payment_method( + app, user_id, wallet_id, payment_method_id +) -> PaymentMethodGet: acked = await get_successful_payment_method( app, user_id=user_id, diff --git a/services/web/server/src/simcore_service_webserver/products/_api.py b/services/web/server/src/simcore_service_webserver/products/_api.py index 1753b6e0aa8..3d8c38a14f3 100644 --- a/services/web/server/src/simcore_service_webserver/products/_api.py +++ b/services/web/server/src/simcore_service_webserver/products/_api.py @@ -1,5 +1,6 @@ from decimal import Decimal from pathlib import Path +from typing import cast import aiofiles from aiohttp import web @@ -48,7 +49,10 @@ async def get_current_product_credit_price_info( """ current_product_name = get_product_name(request) repo = ProductRepository.create_from_request(request) - return await repo.get_product_latest_price_info_or_none(current_product_name) + return cast( # mypy: not sure why + ProductPriceInfo | None, + await repo.get_product_latest_price_info_or_none(current_product_name), + ) async def get_credit_amount( @@ -99,7 +103,7 @@ async def get_product_stripe_info( ): msg = f"Missing product stripe for product {product_name}" raise ValueError(msg) - return product_stripe_info + return cast(ProductStripeInfoGet, product_stripe_info) # mypy: not sure why # diff --git a/services/web/server/src/simcore_service_webserver/products/_events.py b/services/web/server/src/simcore_service_webserver/products/_events.py index edcbb68abe6..22f7ced4f33 100644 --- a/services/web/server/src/simcore_service_webserver/products/_events.py +++ b/services/web/server/src/simcore_service_webserver/products/_events.py @@ -6,6 +6,7 @@ from aiohttp import web from aiopg.sa.engine import Engine +from aiopg.sa.result import RowProxy from pydantic import ValidationError from servicelib.exceptions import InvalidConfig from simcore_postgres_database.utils_products import ( @@ -78,8 +79,9 @@ async def load_products_on_startup(app: web.Application): engine: Engine = app[APP_DB_ENGINE_KEY] async with engine.acquire() as connection: async for row in iter_products(connection): + assert isinstance(row, RowProxy) # nosec try: - name = row.name # type: ignore[attr-defined] # sqlalchemy + name = row.name payments = await get_product_payment_fields( connection, product_name=name diff --git a/services/web/server/src/simcore_service_webserver/products/_model.py b/services/web/server/src/simcore_service_webserver/products/_model.py index 7652782d540..d9a93f3e286 100644 --- a/services/web/server/src/simcore_service_webserver/products/_model.py +++ b/services/web/server/src/simcore_service_webserver/products/_model.py @@ -1,6 +1,10 @@ import logging import string -from typing import Any, ClassVar, Pattern # noqa: UP035 +from typing import ( # noqa: UP035 # pydantic does not validate with re.Pattern + Any, + ClassVar, + Pattern, +) from models_library.basic_regex import ( PUBLIC_VARIABLE_NAME_RE, @@ -20,6 +24,7 @@ Vendor, WebFeedback, ) +from sqlalchemy import Column from ..db.models import products from ..statics._constants import FRONTEND_APPS_AVAILABLE @@ -143,9 +148,11 @@ class Config: }, # defaults from sqlalchemy table **{ - str(c.name): c.server_default.arg + str(c.name): c.server_default.arg # type: ignore[union-attr] for c in products.columns - if c.server_default and isinstance(c.server_default.arg, str) + if isinstance(c, Column) + and c.server_default + and isinstance(c.server_default.arg, str) # type: ignore[union-attr] }, }, # Example of data in the dabase with a url set with blanks diff --git a/services/web/server/src/simcore_service_webserver/projects/_access_rights_db.py b/services/web/server/src/simcore_service_webserver/projects/_access_rights_db.py index c6b5b45099d..0f6a9dafbd3 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_access_rights_db.py +++ b/services/web/server/src/simcore_service_webserver/projects/_access_rights_db.py @@ -16,4 +16,5 @@ async def get_project_owner(engine: Engine, project_uuid: ProjectID) -> UserID: owner_id = await connection.scalar(stmt) if owner_id is None: raise ProjectNotFoundError(project_uuid=project_uuid) + assert isinstance(owner_id, int) return owner_id diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py index dd70fe4a98d..79186150c90 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers.py @@ -132,10 +132,10 @@ async def create_project(request: web.Request): predefined_project = None else: # request w/ body (I found cases in which body = {}) - project_create: ProjectCreateNew | ProjectCopyOverride | EmptyModel = ( - await parse_request_body_as( - ProjectCreateNew | ProjectCopyOverride | EmptyModel, request - ) + project_create: ( + ProjectCreateNew | ProjectCopyOverride | EmptyModel + ) = await parse_request_body_as( + ProjectCreateNew | ProjectCopyOverride | EmptyModel, request # type: ignore[arg-type] # from pydantic v2 --> https://github.com/pydantic/pydantic/discussions/4950 ) predefined_project = ( project_create.dict( @@ -148,7 +148,7 @@ async def create_project(request: web.Request): return await start_long_running_task( request, - _crud_api_create.create_project, + _crud_api_create.create_project, # type: ignore[arg-type] # @GitHK, @pcrespov this one I don't know how to fix fire_and_forget=True, task_context=jsonable_encoder(req_ctx), # arguments @@ -605,7 +605,7 @@ async def clone_project(request: web.Request): return await start_long_running_task( request, - _crud_api_create.create_project, + _crud_api_create.create_project, # type: ignore[arg-type] # @GitHK, @pcrespov this one I don't know how to fix fire_and_forget=True, task_context=jsonable_encoder(req_ctx), # arguments diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py index 9d9ec98ed1d..369b240a004 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_handlers_models.py @@ -6,6 +6,7 @@ from typing import Any +from models_library.basic_types import IDStr from models_library.folders import FolderID from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -111,8 +112,8 @@ def search_check_empty_string(cls, v): class ProjectListWithJsonStrParams(ProjectListParams): - order_by: Json[OrderBy] = Field( # pylint: disable=unsubscriptable-object - default=OrderBy(field="last_change_date", direction=OrderDirection.DESC), + order_by: Json[OrderBy] = Field( # type: ignore[type-arg] # need update to pydantic 1.10 # pylint: disable=unsubscriptable-object + default=OrderBy(field=IDStr("last_change_date"), direction=OrderDirection.DESC), description="Order by field (type|uuid|name|description|prj_owner|creation_date|last_change_date) and direction (asc|desc). The default sorting order is ascending.", example='{"field": "prj_owner", "direction": "desc"}', alias="order_by", @@ -130,7 +131,8 @@ def validate_order_by_field(cls, v): "creation_date", "last_change_date", }: - raise ValueError(f"We do not support ordering by provided field {v.field}") + msg = f"We do not support ordering by provided field {v.field}" + raise ValueError(msg) return v class Config: diff --git a/services/web/server/src/simcore_service_webserver/projects/_db_utils.py b/services/web/server/src/simcore_service_webserver/projects/_db_utils.py index d3576b74b03..8cd7ba3deb4 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_db_utils.py +++ b/services/web/server/src/simcore_service_webserver/projects/_db_utils.py @@ -4,7 +4,7 @@ from copy import deepcopy from datetime import datetime from enum import Enum -from typing import Any, Literal +from typing import Any, Literal, cast import sqlalchemy as sa from aiopg.sa.connection import SAConnection @@ -54,7 +54,7 @@ class ProjectAccessRights(Enum): def check_project_permissions( project: ProjectProxy | ProjectDict, user_id: int, - user_groups: list[dict[str, Any]], + user_groups: list[dict[str, Any]] | list[RowProxy], permission: str, ) -> None: """ @@ -197,13 +197,13 @@ async def _get_everyone_group(cls, conn: SAConnection) -> RowProxy: ) row = await result.first() assert row is not None # nosec - return row + return cast(RowProxy, row) # mypy: not sure why this cast is necessary @classmethod async def _list_user_groups( cls, conn: SAConnection, user_id: int ) -> list[RowProxy]: - user_groups: list[RowProxy] = [] + user_groups = [] if user_id == ANY_USER_ID_SENTINEL: everyone_group = await cls._get_everyone_group(conn) @@ -215,7 +215,7 @@ async def _list_user_groups( .select_from(groups.join(user_to_groups)) .where(user_to_groups.c.uid == user_id) ) - user_groups = await result.fetchall() + user_groups = await result.fetchall() or [] return user_groups @staticmethod @@ -270,6 +270,7 @@ async def _execute_with_permission_check( db_projects: list[dict] = [] # DB model-compatible projects project_types: list[ProjectType] = [] async for row in conn.execute(select_projects_query): + assert isinstance(row, RowProxy) # nosec try: check_project_permissions(row, user_id, user_groups, "read") diff --git a/services/web/server/src/simcore_service_webserver/projects/_metadata_db.py b/services/web/server/src/simcore_service_webserver/projects/_metadata_db.py index 0bfa7467382..6a511a8ba4c 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_metadata_db.py +++ b/services/web/server/src/simcore_service_webserver/projects/_metadata_db.py @@ -40,21 +40,21 @@ async def wrapper(*args, **kwargs) -> Any: return await fct(*args, **kwargs) except DBProjectNotFoundError as err: - raise ProjectNotFoundError(project_uuid=err.project_uuid) from err # type: ignore[attr-defined] # context defined in pydantic error + raise ProjectNotFoundError(project_uuid=err.project_uuid) from err # type: ignore[attr-defined] # context defined in pydantic error # pylint: disable=no-member except ProjectNodesNodeNotFoundError as err: raise NodeNotFoundError( - project_uuid=err.project_uuid, node_uuid=err.node_id # type: ignore[attr-defined] # context defined in pydantic error + project_uuid=err.project_uuid, node_uuid=err.node_id # type: ignore[attr-defined] # context defined in pydantic error # pylint: disable=no-member ) from err except ProjectNodesNonUniqueNodeFoundError as err: raise ProjectInvalidUsageError from err except DBProjectInvalidParentNodeError as err: raise ParentNodeNotFoundError( - project_uuid=err.project_uuid, node_uuid=err.parent_node_id # type: ignore[attr-defined] # context defined in pydantic error + project_uuid=err.project_uuid, node_uuid=err.parent_node_id # type: ignore[attr-defined] # context defined in pydantic error # pylint: disable=no-member ) from err except DBProjectInvalidParentProjectError as err: raise ParentProjectNotFoundError( - project_uuid=err.parent_project_uuid # type: ignore[attr-defined] # context defined in pydantic error + project_uuid=err.parent_project_uuid # type: ignore[attr-defined] # context defined in pydantic error # pylint: disable=no-member ) from err except DBProjectInvalidAncestorsError as err: raise ProjectInvalidUsageError from err diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_api.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_api.py index 67a85d012f4..22866cf6b2a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_api.py @@ -8,6 +8,7 @@ from aiohttp import web from aiohttp.client import ClientError from models_library.api_schemas_storage import FileMetaDataGet +from models_library.basic_types import KeyIDStr from models_library.projects import ProjectID from models_library.projects_nodes import Node from models_library.projects_nodes_io import NodeID, SimCoreFileLink @@ -227,7 +228,7 @@ async def get_node_screenshots( assert node.outputs is not None # nosec - filelink = parse_obj_as(SimCoreFileLink, node.outputs["outFile"]) + filelink = parse_obj_as(SimCoreFileLink, node.outputs[KeyIDStr("outFile")]) file_url = await get_download_link(app, user_id, filelink) screenshots.append( diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py index a2a7eabc8a7..b2ac322b031 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_handlers.py @@ -295,7 +295,7 @@ async def update_node_outputs(request: web.Request) -> web.Response: node_outputs = await parse_request_body_as(NodeOutputs, request) ui_changed_keys = set() - ui_changed_keys.add(path_params.node_id) + ui_changed_keys.add(f"{path_params.node_id}") await nodes_utils.update_node_outputs( app=request.app, user_id=req_ctx.user_id, @@ -377,7 +377,7 @@ async def stop_node(request: web.Request) -> web.Response: return await start_long_running_task( request, - _stop_dynamic_service_task, + _stop_dynamic_service_task, # type: ignore[arg-type] # @GitHK, @pcrespov this one I don't know how to fix task_context=jsonable_encoder(req_ctx), # task arguments from here on --- app=request.app, diff --git a/services/web/server/src/simcore_service_webserver/projects/_ports_api.py b/services/web/server/src/simcore_service_webserver/projects/_ports_api.py index 5704c7cf768..30b7bd2ca12 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_ports_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_ports_api.py @@ -8,7 +8,7 @@ TasksOutputs, TasksSelection, ) -from models_library.basic_types import KeyIDStr +from models_library.basic_types import IDStr, KeyIDStr from models_library.function_services_catalog.api import ( catalog, is_parameter_service, @@ -17,6 +17,8 @@ from models_library.projects import ProjectID from models_library.projects_nodes import Node, OutputsDict from models_library.projects_nodes_io import NodeID, PortLink +from models_library.services_io import ServiceInput, ServiceOutput +from models_library.services_types import ServicePortKey from models_library.utils.json_schema import ( JsonSchemaValidationError, jsonschema_validate_data, @@ -32,7 +34,7 @@ class ProjectPortData: kind: Literal["input", "output"] node_id: NodeID - io_key: str + io_key: ServicePortKey node: Node @property @@ -55,7 +57,9 @@ def get_schema(self) -> JsonSchemaDict | None: return self._get_port_schema(output_meta) return None - def _get_port_schema(self, io_meta): + def _get_port_schema( + self, io_meta: ServiceInput | ServiceOutput + ) -> JsonSchemaDict | None: schema = get_service_io_json_schema(io_meta) if schema: # uses node label instead of service title @@ -91,7 +95,12 @@ def iter_project_ports( for output_key in node.outputs: yield ProjectPortData( - kind="input", node_id=node_id, io_key=output_key, node=node + kind="input", + node_id=node_id, + io_key=ServicePortKey( + output_key + ), # NOTE: PC: ServicePortKey and KeyIDStr are the same why do we need both? + node=node, ) # nodes representing OUTPUT ports: can read this node's input @@ -101,7 +110,12 @@ def iter_project_ports( for inputs_key in node.inputs: yield ProjectPortData( - kind="output", node_id=node_id, io_key=inputs_key, node=node + kind="output", + node_id=node_id, + io_key=ServicePortKey( + inputs_key + ), # NOTE: PC: ServicePortKey and KeyIDStr are the same why do we need both? + node=node, ) @@ -110,7 +124,7 @@ def get_project_inputs(workbench: dict[NodeID, Node]) -> dict[NodeID, Any]: input_to_value = {} for port in iter_project_ports(workbench, "input"): input_to_value[port.node_id] = ( - port.node.outputs["out_1"] if port.node.outputs else None + port.node.outputs[KeyIDStr("out_1")] if port.node.outputs else None ) return input_to_value @@ -131,7 +145,10 @@ def set_inputs_in_project( # validates value against jsonschema try: port = ProjectPortData( - kind="input", node_id=node_id, io_key="out_1", node=node + kind="input", + node_id=node_id, + io_key=ServicePortKey("out_1"), + node=node, ) if schema := port.get_schema(): jsonschema_validate_data(value, schema) @@ -159,7 +176,7 @@ class _OutputPortInfo(NamedTuple): def _get_outputs_in_workbench(workbench: dict[NodeID, Node]) -> dict[NodeID, Any]: """Get the outputs values in the workbench associated to every output""" - output_to_value = {} + output_to_value: dict[NodeID, Any] = {} for port in iter_project_ports(workbench, "output"): if port.node.inputs: try: @@ -171,7 +188,7 @@ def _get_outputs_in_workbench(workbench: dict[NodeID, Node]) -> dict[NodeID, Any task_node_id = port_link.node_uuid task_output_name = port_link.output task_node = workbench[task_node_id] - value = _OutputPortInfo( + output_to_value[port.node_id] = _OutputPortInfo( port_node_id=port.node_id, task_node_id=task_node_id, task_output_name=task_output_name, @@ -185,11 +202,10 @@ def _get_outputs_in_workbench(workbench: dict[NodeID, Node]) -> dict[NodeID, Any ) except ValidationError: # not a link - value = port.node.inputs[KeyIDStr("in_1")] + output_to_value[port.node_id] = port.node.inputs[KeyIDStr("in_1")] else: - value = None + output_to_value[port.node_id] = None - output_to_value[port.node_id] = value return output_to_value @@ -226,7 +242,7 @@ async def get_project_outputs( if isinstance(v, _OutputPortInfo): assert v.port_node_id == port_node_id # nosec outputs_map[port_node_id] = tasks_outputs[v.task_node_id].get( - v.task_output_name, v.task_output_in_workbench + IDStr(v.task_output_name), v.task_output_in_workbench ) return outputs_map diff --git a/services/web/server/src/simcore_service_webserver/projects/_ports_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_ports_handlers.py index 2ab24d8c0e5..e014014cd59 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_ports_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_ports_handlers.py @@ -5,6 +5,7 @@ import functools import logging +from collections.abc import Awaitable, Callable from typing import Any, Literal from aiohttp import web @@ -13,6 +14,7 @@ ProjectInputUpdate, ProjectOutputGet, ) +from models_library.basic_types import KeyIDStr from models_library.projects import ProjectID from models_library.projects_nodes import Node from models_library.projects_nodes_io import NodeID @@ -51,7 +53,9 @@ def _web_json_response_enveloped(data: Any) -> web.Response: ) -def _handle_project_exceptions(handler): +def _handle_project_exceptions( + handler: Callable[[web.Request], Awaitable[web.Response]] +) -> Callable[[web.Request], Awaitable[web.Response]]: @functools.wraps(handler) async def wrapper(request: web.Request) -> web.Response: try: @@ -142,7 +146,7 @@ async def update_project_inputs(request: web.Request) -> web.Response: if node_id not in current_inputs: raise web.HTTPBadRequest(reason=f"Invalid input key [{node_id}]") - workbench[node_id].outputs = {"out_1": input_update.value} + workbench[node_id].outputs = {KeyIDStr("out_1"): input_update.value} partial_workbench_data[node_id] = workbench[node_id].dict( include={"outputs"}, exclude_unset=True ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_access.py b/services/web/server/src/simcore_service_webserver/projects/_projects_access.py index ddf39e59304..685fc0c00ac 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_access.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_access.py @@ -1,4 +1,4 @@ -import jsondiff +import jsondiff # type: ignore[import-untyped] from aiohttp import web from simcore_postgres_database.models.users import UserRole diff --git a/services/web/server/src/simcore_service_webserver/projects/db.py b/services/web/server/src/simcore_service_webserver/projects/db.py index 059ea7ea404..0431d4a5a21 100644 --- a/services/web/server/src/simcore_service_webserver/projects/db.py +++ b/services/web/server/src/simcore_service_webserver/projects/db.py @@ -1189,7 +1189,7 @@ async def get_project_type(self, project_uuid: ProjectID) -> ProjectType: ) row = await result.first() if row: - return row[projects.c.type] + return ProjectType(row[projects.c.type]) raise ProjectNotFoundError(project_uuid=project_uuid) # diff --git a/services/web/server/src/simcore_service_webserver/projects/exceptions.py b/services/web/server/src/simcore_service_webserver/projects/exceptions.py index 152d8d187cb..7b8ff61a971 100644 --- a/services/web/server/src/simcore_service_webserver/projects/exceptions.py +++ b/services/web/server/src/simcore_service_webserver/projects/exceptions.py @@ -4,7 +4,6 @@ import redis.exceptions from models_library.projects import ProjectID -from models_library.projects_nodes_io import NodeID from models_library.users import UserID from ..errors import WebServerBaseError @@ -168,7 +167,7 @@ def __init__( self, *, unset_required_inputs: list[str], - node_with_required_inputs: NodeID, + node_with_required_inputs: str, **ctx, ): super().__init__( diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index 5eca2ca5ff7..a8829e7f084 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -37,7 +37,7 @@ from models_library.products import ProductName from models_library.projects import Project, ProjectID, ProjectIDStr from models_library.projects_access import Owner -from models_library.projects_nodes import Node, OutputsDict +from models_library.projects_nodes import Node from models_library.projects_nodes_io import NodeID, NodeIDStr, PortLink from models_library.projects_state import ( ProjectLocked, @@ -474,7 +474,7 @@ async def _check_project_node_has_all_required_inputs( unset_outputs_in_upstream: list[tuple[str, str]] = [] def _check_required_input(required_input_key: KeyIDStr) -> None: - input_entry: PortLink | None = None + input_entry = None if node.inputs: input_entry = node.inputs.get(required_input_key, None) if input_entry is None: @@ -482,12 +482,13 @@ def _check_required_input(required_input_key: KeyIDStr) -> None: unset_required_inputs.append(required_input_key) return - source_node_id: NodeID = input_entry.node_uuid + assert isinstance(input_entry, PortLink) # nosec + source_node_id = input_entry.node_uuid source_output_key = input_entry.output source_node = nodes_map[source_node_id] - output_entry: OutputsDict | None = None + output_entry = None if source_node.outputs: output_entry = source_node.outputs.get(source_output_key, None) if output_entry is None: @@ -1014,7 +1015,9 @@ async def update_project_node_outputs( # changed entries come in the form of {node_uuid: {outputs: {changed_key1: value1, changed_key2: value2}}} # we do want only the key names - changed_keys = changed_entries.get(f"{node_id}", {}).get("outputs", {}).keys() + changed_keys = ( + changed_entries.get(NodeIDStr(f"{node_id}"), {}).get("outputs", {}).keys() + ) return updated_project, changed_keys diff --git a/services/web/server/src/simcore_service_webserver/publications/_handlers.py b/services/web/server/src/simcore_service_webserver/publications/_handlers.py index dd93aa788d5..3a88ca641c5 100644 --- a/services/web/server/src/simcore_service_webserver/publications/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/publications/_handlers.py @@ -1,7 +1,7 @@ import logging from aiohttp import MultipartReader, hdrs, web -from json2html import json2html +from json2html import json2html # type: ignore[import-untyped] from models_library.utils.json_serialization import json_dumps from servicelib.mimetype_constants import ( MIMETYPE_APPLICATION_JSON, @@ -26,7 +26,7 @@ @login_required async def service_submission(request: web.Request): product = get_current_product(request) - reader = MultipartReader.from_response(request) + reader = MultipartReader.from_response(request) # type: ignore[arg-type] # PC, IP Whoever is in charge of this. please have a look. this looks very weird data = None filename = None filedata = None @@ -37,16 +37,16 @@ async def service_submission(request: web.Request): if part is None: break if part.headers[hdrs.CONTENT_TYPE] == MIMETYPE_APPLICATION_JSON: - data = await part.json() + data = await part.json() # type: ignore[union-attr] # PC, IP Whoever is in charge of this. please have a look continue if part.headers[hdrs.CONTENT_TYPE] == MIMETYPE_APPLICATION_ZIP: - filedata = await part.read(decode=True) + filedata = await part.read(decode=True) # type: ignore[union-attr] # PC, IP Whoever is in charge of this. please have a look # Validate max file size maxsize = 10 * 1024 * 1024 # 10MB actualsize = len(filedata) if actualsize > maxsize: raise web.HTTPRequestEntityTooLarge(maxsize, actualsize) - filename = part.filename + filename = part.filename # type: ignore[union-attr] # PC, IP Whoever is in charge of this. please have a look continue raise web.HTTPUnsupportedMediaType( reason=f"One part had an unexpected type: {part.headers[hdrs.CONTENT_TYPE]}" @@ -56,7 +56,9 @@ async def service_submission(request: web.Request): db: AsyncpgStorage = get_plugin_storage(request.app) user = await db.get_user({"id": request[RQT_USERID_KEY]}) + assert user # nosec user_email = user.get("email") + assert user_email # nosec try: attachments = [ @@ -89,6 +91,6 @@ async def service_submission(request: web.Request): ) except Exception as exc: _logger.exception("Error while sending the 'new service submission' mail.") - raise web.HTTPServiceUnavailable() from exc + raise web.HTTPServiceUnavailable from exc raise web.HTTPNoContent(content_type=MIMETYPE_APPLICATION_JSON) diff --git a/services/web/server/src/simcore_service_webserver/rabbitmq.py b/services/web/server/src/simcore_service_webserver/rabbitmq.py index b533cab7b2a..c415c53057f 100644 --- a/services/web/server/src/simcore_service_webserver/rabbitmq.py +++ b/services/web/server/src/simcore_service_webserver/rabbitmq.py @@ -1,6 +1,6 @@ import logging from collections.abc import AsyncIterator -from typing import Final +from typing import Final, cast from aiohttp import web from models_library.errors import RABBITMQ_CLIENT_UNHEALTHY_MSG @@ -85,12 +85,12 @@ def setup_rabbitmq(app: web.Application) -> None: def get_rabbitmq_rpc_client(app: web.Application) -> RabbitMQRPCClient: - return app[_RPC_CLIENT_KEY] + return cast(RabbitMQRPCClient, app[_RPC_CLIENT_KEY]) def get_rabbitmq_client(app: web.Application) -> RabbitMQClient: - return app[APP_RABBITMQ_CLIENT_KEY] + return cast(RabbitMQClient, app[APP_RABBITMQ_CLIENT_KEY]) def get_rabbitmq_rpc_server(app: web.Application) -> RabbitMQRPCClient: - return app[APP_RABBITMQ_RPC_SERVER_KEY] + return cast(RabbitMQRPCClient, app[APP_RABBITMQ_RPC_SERVER_KEY]) diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/registry.py b/services/web/server/src/simcore_service_webserver/resource_manager/registry.py index 9ba7c7fcee8..f1457308dd8 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/registry.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/registry.py @@ -19,6 +19,7 @@ import redis.asyncio as aioredis from aiohttp import web from models_library.basic_types import UUIDStr +from servicelib.redis_utils import handle_redis_returns_union_types from ..redis import get_redis_resources_client from ._constants import APP_CLIENT_SOCKET_REGISTRY_KEY @@ -94,16 +95,20 @@ async def set_resource( ) -> None: hash_key = f"{self._hash_key(key)}:{_RESOURCE_SUFFIX}" field, value = resource - await self.client.hset(hash_key, mapping={field: value}) # type: ignore[misc] + await handle_redis_returns_union_types( + self.client.hset(hash_key, mapping={field: value}) + ) async def get_resources(self, key: UserSessionDict) -> ResourcesDict: hash_key = f"{self._hash_key(key)}:{_RESOURCE_SUFFIX}" - fields = await self.client.hgetall(hash_key) + fields = await handle_redis_returns_union_types(self.client.hgetall(hash_key)) return ResourcesDict(**fields) async def remove_resource(self, key: UserSessionDict, resource_name: str) -> None: hash_key = f"{self._hash_key(key)}:{_RESOURCE_SUFFIX}" - await self.client.hdel(hash_key, resource_name) + await handle_redis_returns_union_types( + self.client.hdel(hash_key, resource_name) + ) async def find_resources( self, key: UserSessionDict, resource_name: str @@ -112,23 +117,27 @@ async def find_resources( # the key might only be partialy complete partial_hash_key = f"{self._hash_key(key)}:{_RESOURCE_SUFFIX}" async for scanned_key in self.client.scan_iter(match=partial_hash_key): - if await self.client.hexists(scanned_key, resource_name): - key_value = await self.client.hget(scanned_key, resource_name) + if await handle_redis_returns_union_types( + self.client.hexists(scanned_key, resource_name) + ): + key_value = await handle_redis_returns_union_types( + self.client.hget(scanned_key, resource_name) + ) if key_value is not None: resources.append(key_value) return resources async def find_keys(self, resource: tuple[str, str]) -> list[UserSessionDict]: - keys: list[UserSessionDict] = [] if not resource: - return keys + return [] field, value = resource - - async for hash_key in self.client.scan_iter(match=f"*:{_RESOURCE_SUFFIX}"): - if value == await self.client.hget(hash_key, field): - keys.append(self._decode_hash_key(hash_key)) - return keys + return [ + self._decode_hash_key(hash_key) + async for hash_key in self.client.scan_iter(match=f"*:{_RESOURCE_SUFFIX}") + if value + == await handle_redis_returns_union_types(self.client.hget(hash_key, field)) + ] async def set_key_alive(self, key: UserSessionDict, timeout: int) -> None: # setting the timeout to always expire, timeout > 0 @@ -138,7 +147,7 @@ async def set_key_alive(self, key: UserSessionDict, timeout: int) -> None: async def is_key_alive(self, key: UserSessionDict) -> bool: hash_key = f"{self._hash_key(key)}:{_ALIVE_SUFFIX}" - return await self.client.exists(hash_key) > 0 + return bool(await self.client.exists(hash_key) > 0) async def remove_key(self, key: UserSessionDict) -> None: await self.client.delete( diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_handlers.py b/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_handlers.py index 8d1d25742b8..1539168e03d 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_handlers.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_handlers.py @@ -6,6 +6,7 @@ OsparcCreditsAggregatedUsagesPage, ServiceRunPage, ) +from models_library.basic_types import IDStr from models_library.resource_tracker import ( ServiceResourceUsagesFilters, ServicesAggregatedUsagesTimePeriod, @@ -70,12 +71,12 @@ class _RequestContext(BaseModel): class _ListServicesResourceUsagesQueryParams(BaseModel): wallet_id: WalletID | None = Field(default=None) - order_by: Json[OrderBy] = Field( # pylint: disable=unsubscriptable-object - default=OrderBy(field="started_at", direction=OrderDirection.DESC), + order_by: Json[OrderBy] = Field( # type: ignore[type-arg] # need to update pydantic # pylint: disable=unsubscriptable-object + default=OrderBy(field=IDStr("started_at"), direction=OrderDirection.DESC), description=ORDER_BY_DESCRIPTION, example='{"field": "started_at", "direction": "desc"}', ) - filters: ( + filters: ( # type: ignore[type-arg] # need to update pydantic Json[ServiceResourceUsagesFilters] # pylint: disable=unsubscriptable-object | None ) = Field( @@ -169,7 +170,7 @@ async def list_resource_usage_services(request: web.Request): offset=query_params.offset, limit=query_params.limit, order_by=parse_obj_as(OrderBy, query_params.order_by), - filters=parse_obj_as(ServiceResourceUsagesFilters | None, query_params.filters), + filters=parse_obj_as(ServiceResourceUsagesFilters | None, query_params.filters), # type: ignore[arg-type] # from pydantic v2 --> https://github.com/pydantic/pydantic/discussions/4950 ) page = Page[dict[str, Any]].parse_obj( @@ -246,7 +247,7 @@ async def export_resource_usage_services(request: web.Request): user_id=req_ctx.user_id, product_name=req_ctx.product_name, wallet_id=query_params.wallet_id, - order_by=parse_obj_as(OrderBy | None, query_params.order_by), - filters=parse_obj_as(ServiceResourceUsagesFilters | None, query_params.filters), + order_by=parse_obj_as(OrderBy | None, query_params.order_by), # type: ignore[arg-type] # from pydantic v2 --> https://github.com/pydantic/pydantic/discussions/4950 + filters=parse_obj_as(ServiceResourceUsagesFilters | None, query_params.filters), # type: ignore[arg-type] # from pydantic v2 --> https://github.com/pydantic/pydantic/discussions/4950 ) raise web.HTTPFound(location=f"{download_url}") diff --git a/services/web/server/src/simcore_service_webserver/session/plugin.py b/services/web/server/src/simcore_service_webserver/session/plugin.py index c5c4e2938a5..9f3f7263689 100644 --- a/services/web/server/src/simcore_service_webserver/session/plugin.py +++ b/services/web/server/src/simcore_service_webserver/session/plugin.py @@ -1,6 +1,7 @@ """ user's session plugin """ + import logging import aiohttp_session @@ -42,4 +43,4 @@ def setup_session(app: web.Application): samesite=settings.SESSION_COOKIE_SAMESITE, ) aiohttp_session.setup(app=app, storage=encrypted_cookie_sessions) - app.middlewares[-1].__middleware_name__ = f"{__name__}.session" + app.middlewares[-1].__middleware_name__ = f"{__name__}.session" # type: ignore[union-attr] # PC this attribute does not exist and mypy does not like it diff --git a/services/web/server/src/simcore_service_webserver/socketio/_handlers.py b/services/web/server/src/simcore_service_webserver/socketio/_handlers.py index d6ac87f2c7c..356e2cc1ba7 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/socketio/_handlers.py @@ -7,6 +7,7 @@ import logging from typing import Any +import socketio.exceptions # type: ignore[import-untyped] from aiohttp import web from models_library.api_schemas_webserver.socketio import SocketIORoomStr from models_library.products import ProductName @@ -15,7 +16,6 @@ from servicelib.aiohttp.observer import emit from servicelib.logging_utils import get_log_record_extra, log_context from servicelib.request_keys import RQT_USERID_KEY -from socketio.exceptions import ConnectionRefusedError as SocketIOConnectionError from ..groups.api import list_user_groups_with_read_access from ..login.decorators import login_required @@ -117,8 +117,8 @@ async def connect( environ -- the WSGI environ, among other contains the original request Raises: - SocketIOConnectionError: HTTPUnauthorized - SocketIOConnectionError: Unexpected error + SIoConnectionRefusedError: HTTPUnauthorized + SIoConnectionRefusedError: Unexpected error Returns: True if socket.io connection accepted @@ -153,10 +153,10 @@ async def connect( except web.HTTPUnauthorized as exc: msg = "authentification failed" - raise SocketIOConnectionError(msg) from exc + raise socketio.exceptions.ConnectionRefusedError(msg) from exc except Exception as exc: # pylint: disable=broad-except msg = f"Unexpected error: {exc}" - raise SocketIOConnectionError(msg) from exc + raise socketio.exceptions.ConnectionRefusedError(msg) from exc return True diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py index 8ed9d777c65..1b60fd5f7e0 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py @@ -224,7 +224,7 @@ async def get_redirection_to_viewer(request: web.Request): NOTE: Can be set as login_required programatically with STUDIES_ACCESS_ANONYMOUS_ALLOWED env var. """ query_params: RedirectionQueryParams = parse_request_query_parameters_as( - RedirectionQueryParams, request + RedirectionQueryParams, request # type: ignore[arg-type] # from pydantic v2 --> https://github.com/pydantic/pydantic/discussions/4950 ) _logger.debug("Requesting viewer %s [%s]", query_params, type(query_params)) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py index bad1a918b8f..65f079f7420 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py @@ -314,12 +314,14 @@ async def get_redirection_to_study_page(request: web.Request) -> web.Response: ) from exc # COPY + assert user # nosec try: _logger.debug( "Granted access to study name='%s' for user email='%s'. Copying study over ...", template_project.get("name"), user.get("email"), ) + copied_project_id = await copy_study_to_account(request, template_project, user) _logger.debug("Study %s copied", copied_project_id) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_users.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_users.py index 54ae3a2d912..c9ff40adbd9 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_users.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_users.py @@ -14,7 +14,6 @@ from contextlib import suppress from datetime import datetime -import asyncpg import redis.asyncio as aioredis from aiohttp import web from models_library.emails import LowerCaseEmailStr @@ -49,7 +48,7 @@ class UserInfo(BaseModel): id: int name: str - email: str + email: LowerCaseEmailStr primary_gid: int needs_login: bool = False is_guest: bool = True @@ -112,7 +111,7 @@ async def create_temporary_guest_user(request: web.Request): # # (1) read details above - usr: asyncpg.Record | None = None + usr = None try: async with redis_locks_client.lock( GUEST_USER_RC_LOCK_FORMAT.format(user_id=random_user_name), @@ -129,7 +128,7 @@ async def create_temporary_guest_user(request: web.Request): "expires_at": expires_at, } ) - user: dict = await get_user(request.app, usr["id"]) + user = await get_user(request.app, usr["id"]) await auto_add_user_to_product_group( request.app, user_id=user["id"], product_name=product_name ) diff --git a/services/web/server/src/simcore_service_webserver/users/_notifications_handlers.py b/services/web/server/src/simcore_service_webserver/users/_notifications_handlers.py index 8b8fcbdd2d3..b30a435210b 100644 --- a/services/web/server/src/simcore_service_webserver/users/_notifications_handlers.py +++ b/services/web/server/src/simcore_service_webserver/users/_notifications_handlers.py @@ -1,5 +1,5 @@ -import logging import json +import logging import redis.asyncio as aioredis from aiohttp import web @@ -9,6 +9,7 @@ parse_request_path_parameters_as, ) from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON +from servicelib.redis_utils import handle_redis_returns_union_types from .._meta import API_VTAG from ..login.decorators import login_required @@ -38,8 +39,10 @@ async def _get_user_notifications( redis_client: aioredis.Redis, user_id: int, product_name: str ) -> list[UserNotification]: """returns a list of notifications where the latest notification is at index 0""" - raw_notifications: list[str] = await redis_client.lrange( - get_notification_key(user_id), -1 * MAX_NOTIFICATIONS_FOR_USER_TO_SHOW, -1 + raw_notifications: list[str] = await handle_redis_returns_union_types( + redis_client.lrange( + get_notification_key(user_id), -1 * MAX_NOTIFICATIONS_FOR_USER_TO_SHOW, -1 + ) ) notifications = [json.loads(x) for x in raw_notifications] # Make it backwards compatible @@ -47,7 +50,7 @@ async def _get_user_notifications( if "product" not in n: n["product"] = "UNDEFINED" # Filter by product - included = [product_name, "UNDEFINED"] + included = [product_name, "UNDEFINED"] filtered_notifications = [n for n in notifications if n["product"] in included] return [UserNotification.parse_obj(x) for x in filtered_notifications] @@ -59,7 +62,9 @@ async def list_user_notifications(request: web.Request) -> web.Response: redis_client = get_redis_user_notifications_client(request.app) req_ctx = UsersRequestContext.parse_obj(request) product_name = get_product_name(request) - notifications = await _get_user_notifications(redis_client, req_ctx.user_id, product_name) + notifications = await _get_user_notifications( + redis_client, req_ctx.user_id, product_name + ) return envelope_json_response(notifications) @@ -101,12 +106,15 @@ async def mark_notification_as_read(request: web.Request) -> web.Response: # NOTE: only the user's notifications can be patched key = get_notification_key(req_ctx.user_id) all_user_notifications: list[UserNotification] = [ - UserNotification.parse_raw(x) for x in await redis_client.lrange(key, 0, -1) + UserNotification.parse_raw(x) + for x in await handle_redis_returns_union_types(redis_client.lrange(key, 0, -1)) ] for k, user_notification in enumerate(all_user_notifications): if req_path_params.notification_id == user_notification.id: user_notification.read = body.read - await redis_client.lset(key, k, user_notification.json()) + await handle_redis_returns_union_types( + redis_client.lset(key, k, user_notification.json()) + ) raise web.HTTPNoContent(content_type=MIMETYPE_APPLICATION_JSON) raise web.HTTPNoContent(content_type=MIMETYPE_APPLICATION_JSON) diff --git a/services/web/server/src/simcore_service_webserver/users/_preferences_api.py b/services/web/server/src/simcore_service_webserver/users/_preferences_api.py index 524e35addeb..8e17a4a25d4 100644 --- a/services/web/server/src/simcore_service_webserver/users/_preferences_api.py +++ b/services/web/server/src/simcore_service_webserver/users/_preferences_api.py @@ -130,6 +130,6 @@ async def set_frontend_user_preference( await _preferences_db.set_user_preference( app, user_id=user_id, - preference=parse_obj_as(preference_class, {"value": value}), + preference=parse_obj_as(preference_class, {"value": value}), # type: ignore[arg-type] # GitHK this is suspicious product_name=product_name, ) diff --git a/services/web/server/src/simcore_service_webserver/users/api.py b/services/web/server/src/simcore_service_webserver/users/api.py index ea52df70f20..afd406bf774 100644 --- a/services/web/server/src/simcore_service_webserver/users/api.py +++ b/services/web/server/src/simcore_service_webserver/users/api.py @@ -13,9 +13,10 @@ from aiohttp import web from aiopg.sa.engine import Engine from aiopg.sa.result import RowProxy +from models_library.basic_types import IDStr from models_library.products import ProductName from models_library.users import GroupID, UserID -from pydantic import ValidationError, parse_obj_as +from pydantic import EmailStr, ValidationError, parse_obj_as from simcore_postgres_database.models.users import UserRole from ..db.models import GroupType, groups, user_to_groups, users @@ -203,13 +204,13 @@ async def get_user_name_and_email( class UserDisplayAndIdNamesTuple(NamedTuple): name: str - email: str - first_name: str - last_name: str + email: EmailStr + first_name: IDStr + last_name: IDStr @property - def full_name(self): - return f"{self.first_name} {self.last_name}" + def full_name(self) -> IDStr: + return IDStr.concatenate(self.first_name, self.last_name) async def get_user_display_and_id_names( @@ -228,7 +229,7 @@ async def get_user_display_and_id_names( name=row.name, email=row.email, first_name=row.first_name or row.name.capitalize(), - last_name=row.last_name or "", + last_name=IDStr(row.last_name or ""), ) @@ -257,7 +258,7 @@ async def delete_user_without_projects(app: web.Application, user_id: UserID) -> ) return - await db.delete_user(user) + await db.delete_user(dict(user)) # This user might be cached in the auth. If so, any request # with this user-id will get thru producing unexpected side-effects @@ -291,7 +292,7 @@ async def get_user_fullname(app: web.Application, user_id: UserID) -> FullNameDi ) -async def get_user(app: web.Application, user_id: UserID) -> dict: +async def get_user(app: web.Application, user_id: UserID) -> dict[str, Any]: """ :raises UserNotFoundError: """ diff --git a/services/web/server/src/simcore_service_webserver/version_control/db.py b/services/web/server/src/simcore_service_webserver/version_control/db.py index 87b4237babf..9ae6162c09e 100644 --- a/services/web/server/src/simcore_service_webserver/version_control/db.py +++ b/services/web/server/src/simcore_service_webserver/version_control/db.py @@ -1,7 +1,7 @@ import json import logging from types import SimpleNamespace -from typing import Any +from typing import Any, cast from uuid import UUID import sqlalchemy as sa @@ -141,7 +141,7 @@ async def _fetch_workcopy_project_id( repo = await self.ReposOrm(conn).set_filter(id=repo_id).fetch("project_uuid") assert repo # nosec - return repo.project_uuid + return cast(ProjectIDStr, repo.project_uuid) async def _update_state( self, repo_id: int, conn: SAConnection diff --git a/services/web/server/src/simcore_service_webserver/wallets/_events.py b/services/web/server/src/simcore_service_webserver/wallets/_events.py index 33be62859d5..67d5ebcd259 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_events.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_events.py @@ -1,6 +1,7 @@ import functools from aiohttp import web +from models_library.basic_types import IDStr from models_library.products import ProductName from models_library.users import UserID from pydantic import PositiveInt @@ -47,7 +48,7 @@ async def _auto_add_default_wallet( user_id=user_id, user_email=user.email, osparc_credits=extra_credits_in_usd * product.credits_per_usd, - payment_id="INVITATION", # TODO: invitation id??? + payment_id=IDStr("INVITATION"), created_at=wallet.created, )