diff --git a/.env-devel b/.env-devel index eba22721263..d434c63b113 100644 --- a/.env-devel +++ b/.env-devel @@ -1,12 +1,16 @@ # -# - Keep it alfphabetical order and grouped by prefix +# - Keep it alfphabetical order and grouped by prefix [see vscode cmd: Sort Lines Ascending] # - To expose: export $(grep -v '^#' .env | xargs -0) # + API_SERVER_DEV_FEATURES_ENABLED=0 BF_API_KEY=none BF_API_SECRET=none +DIRECTOR_REGISTRY_CACHING_TTL=900 +DIRECTOR_REGISTRY_CACHING=True + POSTGRES_DB=simcoredb POSTGRES_ENDPOINT=postgres:5432 POSTGRES_HOST=postgres @@ -14,8 +18,8 @@ POSTGRES_PASSWORD=adminadmin POSTGRES_PORT=5432 POSTGRES_USER=scu -RABBIT_HOST=rabbit RABBIT_CHANNELS={"log": "comp.backend.channels.log", "instrumentation": "comp.backend.channels.instrumentation"} +RABBIT_HOST=rabbit RABBIT_PASSWORD=adminadmin RABBIT_PORT=5672 RABBIT_USER=admin @@ -28,12 +32,10 @@ REGISTRY_PW=adminadmin REGISTRY_SSL=True REGISTRY_URL=registry.osparc-master.speag.com REGISTRY_USER=admin -DIRECTOR_REGISTRY_CACHING=True -DIRECTOR_REGISTRY_CACHING_TTL=900 +# NOTE: 172.17.0.1 is the docker0 interface, which redirect from inside a container onto the host network interface. S3_ACCESS_KEY=12345678 S3_BUCKET_NAME=simcore -# 172.17.0.1 is the docker0 interface, which redirect from inside a container onto the host network interface. S3_ENDPOINT=172.17.0.1:9001 S3_SECRET_KEY=12345678 S3_SECURE=0 @@ -51,25 +53,25 @@ TRACING_ZIPKIN_ENDPOINT=http://jaeger:9411 TRAEFIK_SIMCORE_ZONE=internal_simcore_stack +# NOTE: WEBSERVER_SESSION_SECRET_KEY = $(python3 -c "from cryptography.fernet import Fernet; print(Fernet.generate_key())") WEBSERVER_DEV_FEATURES_ENABLED=0 WEBSERVER_HOST=webserver WEBSERVER_LOGIN_REGISTRATION_CONFIRMATION_REQUIRED=0 WEBSERVER_LOGIN_REGISTRATION_INVITATION_REQUIRED=0 -# python3 -c "from cryptography.fernet import Fernet; print(Fernet.generate_key())" -WEBSERVER_SESSION_SECRET_KEY=REPLACE ME with a key of at least length 32. -WEBSERVER_STUDIES_ACCESS_ENABLED=0 +WEBSERVER_FEEDBACK_FORM_URL=https://docs.google.com/forms/d/e/1FAIpQLSe232bTigsM2zV97Kjp2OhCenl6o9gNGcDFt2kO_dfkIjtQAQ/viewform?usp=sf_link +WEBSERVER_FOGBUGZ_LOGIN_URL=https://z43.manuscript.com/login +WEBSERVER_FOGBUGZ_NEWCASE_URL=https://z43.manuscript.com/f/cases/new?command=new&pg=pgEditBug&ixProject=45&ixArea=449 +WEBSERVER_GARBAGE_COLLECTION_INTERVAL_SECONDS=30 +WEBSERVER_MANUAL_EXTRA_URL=https://itisfoundation.github.io/osparc-manual-z43/ +WEBSERVER_MANUAL_MAIN_URL=http://docs.osparc.io/ +WEBSERVER_PROMETHEUS_API_VERSION=v1 WEBSERVER_PROMETHEUS_HOST=http://prometheus WEBSERVER_PROMETHEUS_PORT=9090 -WEBSERVER_PROMETHEUS_API_VERSION=v1 WEBSERVER_RESOURCES_DELETION_TIMEOUT_SECONDS=900 -WEBSERVER_GARBAGE_COLLECTION_INTERVAL_SECONDS=30 -WEBSERVER_MANUAL_MAIN_URL=http://docs.osparc.io/ -WEBSERVER_MANUAL_EXTRA_URL=https://itisfoundation.github.io/osparc-manual-z43/ -WEBSERVER_FOGBUGZ_LOGIN_URL=https://z43.manuscript.com/login -WEBSERVER_FOGBUGZ_NEWCASE_URL=https://z43.manuscript.com/f/cases/new?command=new&pg=pgEditBug&ixProject=45&ixArea=449 WEBSERVER_S4L_FOGBUGZ_NEWCASE_URL=https://z43.manuscript.com/f/cases/new?command=new&pg=pgEditBug&ixProject=45&ixArea=458 +WEBSERVER_SESSION_SECRET_KEY=REPLACE ME with a key of at least length 32. +WEBSERVER_STUDIES_ACCESS_ENABLED=0 WEBSERVER_TIS_FOGBUGZ_NEWCASE_URL=https://z43.manuscript.com/f/cases/new?command=new&pg=pgEditBug&ixProject=45&ixArea=459 -WEBSERVER_FEEDBACK_FORM_URL=https://docs.google.com/forms/d/e/1FAIpQLSe232bTigsM2zV97Kjp2OhCenl6o9gNGcDFt2kO_dfkIjtQAQ/viewform?usp=sf_link WEBSERVER_VIEWER_RAWGRAPH_VERSION=2.11.1 WEBSERVER_VIEWER_SIM4LIFE_VERSION=1.0.29 diff --git a/api/specs/webserver/openapi-catalog.yaml b/api/specs/webserver/openapi-catalog.yaml index a51b423cb8d..7d8159e4fef 100644 --- a/api/specs/webserver/openapi-catalog.yaml +++ b/api/specs/webserver/openapi-catalog.yaml @@ -66,18 +66,18 @@ paths: "422": description: Validation Error - /catalog/services: + catalog_services: get: tags: - catalog summary: List Services - operationId: list_catalog_services + operationId: list_services_handler responses: "200": description: Returns list of services from the catalog default: $ref: "./openapi.yaml#/components/responses/DefaultErrorResponse" - /catalog/services/{service_key}/{service_version}: + catalog_services_service_key_service_version: parameters: - in: path name: service_key @@ -97,7 +97,7 @@ paths: tags: - catalog summary: Get Service - operationId: get_catalog_service + operationId: get_service_handler responses: "200": description: Returns service @@ -107,7 +107,7 @@ paths: tags: - catalog summary: Update Service - operationId: update_catalog_service + operationId: update_service_handler requestBody: content: application/json: diff --git a/api/specs/webserver/openapi.yaml b/api/specs/webserver/openapi.yaml index 68b5e77b67d..5e9d955e32b 100644 --- a/api/specs/webserver/openapi.yaml +++ b/api/specs/webserver/openapi.yaml @@ -1,8 +1,8 @@ openapi: 3.0.0 info: - title: "osparc-simcore RESTful API" + title: "osparc-simcore web API" version: 0.6.0 - description: "RESTful API designed for web clients" + description: "API designed for the front-end app" contact: name: IT'IS Foundation email: support@simcore.io @@ -218,11 +218,11 @@ paths: $ref: "./openapi-catalog.yaml#/paths/~1catalog~1dags" /catalog/dags/{dag_id}: $ref: "./openapi-catalog.yaml#/paths/~1catalog~1dags~1{dag_id}" + /catalog/services: - $ref: "./openapi-catalog.yaml#/paths/~1catalog~1services" + $ref: "./openapi-catalog.yaml#/paths/catalog_services" /catalog/services/{service_key}/{service_version}: - $ref: "./openapi-catalog.yaml#/paths/~1catalog~1services~1{service_key}~1{service_version}" - + $ref: "./openapi-catalog.yaml#/paths/catalog_services_service_key_service_version" /catalog/services/{service_key}/{service_version}/inputs: $ref: "./openapi-catalog.yaml#/paths/catalog_services_service_key_service_version_inputs" /catalog/services/{service_key}/{service_version}/inputs/{input_key}: diff --git a/api/tests/conftest.py b/api/tests/conftest.py index 2b962b69129..1f0319393b7 100644 --- a/api/tests/conftest.py +++ b/api/tests/conftest.py @@ -10,55 +10,65 @@ from pathlib import Path import pytest - from utils import is_json_schema, load_specs log = logging.getLogger(__name__) # Conventions -COMMON = 'common' -OPENAPI_MAIN_FILENAME = 'openapi.yaml' +COMMON = "common" +OPENAPI_MAIN_FILENAME = "openapi.yaml" current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -@pytest.fixture(scope='session') -def this_repo_root_dir(): +@pytest.fixture(scope="session") +def this_repo_root_dir() -> Path: root_dir = current_dir.parent.parent assert root_dir assert any(root_dir.glob(".git")) return root_dir -@pytest.fixture(scope='session') -def api_specs_dir(): +@pytest.fixture(scope="session") +def api_specs_dir() -> Path: return current_dir.parent / "specs" @pytest.fixture(scope="session") -def webserver_api_dir(api_specs_dir): +def webserver_api_dir(api_specs_dir) -> Path: return api_specs_dir / "webserver" -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def api_specs_info(api_specs_dir): """ - Returns a namedtuple with info on every + Returns a namedtuple with info on every """ - service_dirs = [d for d in api_specs_dir.iterdir() if d.is_dir() and not d.name.endswith(COMMON)] + service_dirs = [ + d for d in api_specs_dir.iterdir() if d.is_dir() and not d.name.endswith(COMMON) + ] - info_cls = namedtuple("ApiSpecsInfo", "service version openapi_path url_path".split()) + info_cls = namedtuple( + "ApiSpecsInfo", "service version openapi_path url_path".split() + ) info = [] for srv_dir in service_dirs: - version_dirs = [d for d in srv_dir.iterdir() if d.is_dir() and not d.name.endswith(COMMON)] + version_dirs = [ + d for d in srv_dir.iterdir() if d.is_dir() and not d.name.endswith(COMMON) + ] for ver_dir in version_dirs: openapi_path = ver_dir / OPENAPI_MAIN_FILENAME if openapi_path.exists(): - info.append( info_cls( - service=srv_dir.name, - version=ver_dir.name, - openapi_path=openapi_path, - url_path=relpath(openapi_path, srv_dir) # ${version}/openapi.yaml - )) + info.append( + info_cls( + service=srv_dir.name, + version=ver_dir.name, + openapi_path=openapi_path, + url_path=relpath( + openapi_path, srv_dir + ), # ${version}/openapi.yaml + ) + ) # https://yarl.readthedocs.io/en/stable/api.html#yarl.URL # [scheme:]//[user[:password]@]host[:port][/path][?query][#fragment] return info @@ -66,42 +76,41 @@ def api_specs_info(api_specs_dir): @pytest.fixture(scope="session") def all_api_specs_tails(api_specs_dir): - """ Returns openapi/jsonschema spec files path relative to specs_dir - - """ + """Returns openapi/jsonschema spec files path relative to specs_dir""" return _all_api_specs_tails_impl(api_specs_dir) + def _all_api_specs_tails_impl(api_specs_dir): tails = [] - for fpath in chain(*[api_specs_dir.rglob(wildcard) for wildcard in ("*.json", "*.y*ml")]): + for fpath in chain( + *[api_specs_dir.rglob(wildcard) for wildcard in ("*.json", "*.y*ml")] + ): tail = relpath(fpath, api_specs_dir) - tails.append(Path(tail) ) + tails.append(Path(tail)) return tails def list_openapi_tails(): - """ Returns relative path to all non-jsonschema (i.e. potential openapi) + """Returns relative path to all non-jsonschema (i.e. potential openapi) - SEE api_specs_tail to get one at a time + SEE api_specs_tail to get one at a time """ tails = [] specs_dir = current_dir.parent / "specs" for tail in _all_api_specs_tails_impl(specs_dir): - specs = load_specs( specs_dir / tail) + specs = load_specs(specs_dir / tail) if not is_json_schema(specs): - tails.append( str(tail) ) + tails.append(str(tail)) return tails -@pytest.fixture(scope="session", - params=list_openapi_tails() - ) +@pytest.fixture(scope="session", params=list_openapi_tails()) def api_specs_tail(request, api_specs_dir): - """ Returns api specs file path relative to api_specs_dir + """Returns api specs file path relative to api_specs_dir - NOTE: this is a parametrized fixture that - represents one api-specs tail at a time! - NOTE: as_str==True, so it gets printed + NOTE: this is a parametrized fixture that + represents one api-specs tail at a time! + NOTE: as_str==True, so it gets printed """ specs_tail = request.param assert exists(api_specs_dir / specs_tail) diff --git a/packages/models-library/src/models_library/projects_nodes.py b/packages/models-library/src/models_library/projects_nodes.py index 969e5b9dacc..f1666d98a19 100644 --- a/packages/models-library/src/models_library/projects_nodes.py +++ b/packages/models-library/src/models_library/projects_nodes.py @@ -125,7 +125,6 @@ class Node(BaseModel): run_hash: Optional[str] = Field( None, description="the hex digest of the resolved inputs +outputs hash at the time when the last outputs were generated", - examples=["a4337bc45a8fc544c03f52dc550cd6e1e87021bc896588bd79e901e2"], alias="runHash", ) @@ -158,7 +157,6 @@ class Node(BaseModel): parent: Optional[NodeID] = Field( None, description="Parent's (group-nodes') node ID s. Used to group", - examples=["nodeUUid1", "nodeUuid2"], ) # NOTE: use projects_ui.py diff --git a/packages/models-library/src/models_library/services.py b/packages/models-library/src/models_library/services.py index d58ac45d231..b427394a42e 100644 --- a/packages/models-library/src/models_library/services.py +++ b/packages/models-library/src/models_library/services.py @@ -363,10 +363,13 @@ class ServiceAccessRights(BaseModel): class ServiceMetaData(ServiceCommonData): - # for a partial update all members must be Optional + # Overrides all fields of ServiceCommonData: + # - for a partial update all members must be Optional name: Optional[str] thumbnail: Optional[HttpUrl] description: Optional[str] + + # user-defined metatada classifiers: Optional[List[str]] quality: Dict[str, Any] = {} diff --git a/packages/service-library/tests/conftest.py b/packages/service-library/tests/conftest.py index d1d3e0e9400..65a578a254f 100644 --- a/packages/service-library/tests/conftest.py +++ b/packages/service-library/tests/conftest.py @@ -4,10 +4,9 @@ import sys from pathlib import Path +from typing import Dict import pytest -import yaml - import servicelib from servicelib.openapi import create_openapi_specs @@ -18,14 +17,14 @@ def here(): @pytest.fixture(scope="session") -def package_dir(): +def package_dir() -> Path: pdir = Path(servicelib.__file__).resolve().parent assert pdir.exists() return pdir @pytest.fixture(scope="session") -def osparc_simcore_root_dir(here): +def osparc_simcore_root_dir(here) -> Path: root_dir = here.parent.parent.parent.resolve() assert root_dir.exists(), "Is this service within osparc-simcore repo?" assert any(root_dir.glob("packages/service-library")), ( @@ -35,13 +34,13 @@ def osparc_simcore_root_dir(here): @pytest.fixture -def petstore_spec_file(here): +def petstore_spec_file(here) -> Path: filepath = here / "data/oas3/petstore.yaml" assert filepath.exists() return filepath @pytest.fixture -async def petstore_specs(loop, petstore_spec_file): +async def petstore_specs(loop, petstore_spec_file) -> Dict: specs = await create_openapi_specs(petstore_spec_file) return specs diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 69d13d2bf3f..0bb86f98374 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -36,3 +36,7 @@ idna<3,>=2.5 # vulnerability https://github.com/advisories/GHSA-rhm9-p9w5-fwm7 Feb.2021 cryptography>=3.3.2 + + +# constraint since https://github.com/MagicStack/uvloop/releases/tag/v0.15.0: drops support for 3.5/3.6 Feb.2021 +uvloop<0.15.0 ; python_version < '3.7' diff --git a/scripts/common-service.Makefile b/scripts/common-service.Makefile index 6b80bab74ef..345664c152d 100644 --- a/scripts/common-service.Makefile +++ b/scripts/common-service.Makefile @@ -70,7 +70,7 @@ shell: ## runs shell inside $(APP_NAME) container .PHONY: tail tail: ## tails log of $(APP_NAME) container - docker logs --follow $(shell docker ps -f "name=$(APP_NAME)*" --format {{.ID}}) 2>&1 + docker logs --follow $(shell docker ps --filter "name=$(APP_NAME)*" --format {{.ID}}) 2>&1 .PHONY: stats diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py index a9ec74a38db..af7c57c404f 100644 --- a/services/api-server/tests/unit/conftest.py +++ b/services/api-server/tests/unit/conftest.py @@ -75,7 +75,7 @@ def project_env_devel_environment(project_env_devel_dict, monkeypatch): @pytest.fixture(scope="session") -def project_slug_dir(): +def project_slug_dir() -> Path: folder = current_dir.parent.parent assert folder.exists() assert any(folder.glob("src/simcore_service_api_server")) @@ -83,7 +83,7 @@ def project_slug_dir(): @pytest.fixture(scope="session") -def package_dir(): +def package_dir() -> Path: """Notice that this might be under src (if installed as edit mode) or in the installation folder """ diff --git a/services/catalog/openapi.json b/services/catalog/openapi.json index 6cf53443d69..b3e96cc2889 100644 --- a/services/catalog/openapi.json +++ b/services/catalog/openapi.json @@ -806,8 +806,8 @@ "title": "HTTPValidationError", "type": "object", "properties": { - "detail": { - "title": "Detail", + "errors": { + "title": "Validation errors", "type": "array", "items": { "$ref": "#/components/schemas/ValidationError" @@ -864,30 +864,18 @@ "title": "Key", "pattern": "^(simcore)/(services)/(comp|dynamic|frontend)(/[\\w/-]+)+$", "type": "string", - "description": "distinctive name for the node based on the docker registry path", - "example": [ - "simcore/services/comp/sleeper", - "simcore/services/dynamic/3dviewer", - "simcore/services/frontend/file-picker" - ] + "description": "distinctive name for the node based on the docker registry path" }, "version": { "title": "Version", "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", "type": "string", - "description": "semantic version number of the node", - "example": [ - "1.0.0", - "0.0.1" - ] + "description": "semantic version number of the node" }, "label": { "title": "Label", "type": "string", - "description": "The short name of the node", - "example": [ - "JupyterLab" - ] + "description": "The short name of the node" }, "progress": { "title": "Progress", @@ -902,10 +890,7 @@ "minLength": 1, "type": "string", "description": "url of the latest screenshot of the node", - "format": "uri", - "example": [ - "https://placeimg.com/171/96/tech/grayscale/?0.jpg" - ] + "format": "uri" }, "runHash": { "anyOf": [ @@ -915,9 +900,6 @@ { "title": "Runhash", "description": "the hex digest of the resolved inputs +outputs hash at the time when the last outputs were generated", - "example": [ - "a4337bc45a8fc544c03f52dc550cd6e1e87021bc896588bd79e901e2" - ], "type": "string" } ] @@ -938,11 +920,7 @@ "type": "string", "format": "uuid" }, - "description": "node IDs of where the node is connected to", - "example": [ - "nodeUuid1", - "nodeUuid2" - ] + "description": "node IDs of where the node is connected to" }, "outputs": { "title": "Outputs", @@ -961,39 +939,70 @@ "type": "string", "format": "uuid" }, - "description": "Used in group-nodes. Node IDs of those connected to the output", - "example": [ - "nodeUuid1", - "nodeUuid2" - ] + "description": "Used in group-nodes. Node IDs of those connected to the output" }, "parent": { - "title": "Parent", - "type": "string", - "description": "Parent's (group-nodes') node ID s. Used to group", - "format": "uuid", - "example": [ - "nodeUUid1", - "nodeUuid2" + "anyOf": [ + { + "type": "null" + }, + { + "title": "Parent", + "description": "Parent's (group-nodes') node ID s. Used to group", + "type": "string", + "format": "uuid" + } ] }, + "position": { + "title": "Position", + "allOf": [ + { + "$ref": "#/components/schemas/Position" + } + ], + "deprecated": true + }, "state": { + "title": "State", "allOf": [ { - "$ref": "#/components/schemas/RunningState" + "$ref": "#/components/schemas/NodeState" } ], - "description": "the node's running state", - "default": "NOT_STARTED" + "description": "The node's state object" + } + }, + "additionalProperties": false + }, + "NodeState": { + "title": "NodeState", + "type": "object", + "properties": { + "modified": { + "title": "Modified", + "type": "boolean", + "description": "true if the node's outputs need to be re-computed", + "default": true }, - "position": { - "title": "Position", + "dependencies": { + "title": "Dependencies", + "uniqueItems": true, + "type": "array", + "items": { + "type": "string", + "format": "uuid" + }, + "description": "contains the node inputs dependencies if they need to be computed first" + }, + "currentStatus": { "allOf": [ { - "$ref": "#/components/schemas/Position" + "$ref": "#/components/schemas/RunningState" } ], - "deprecated": true + "description": "the node's current state", + "default": "NOT_STARTED" } }, "additionalProperties": false @@ -1144,6 +1153,11 @@ "type": "object", "description": "Place the data associated with the named keys in files" }, + "unit": { + "title": "Unit", + "type": "string", + "description": "Units, when it refers to a physical quantity" + }, "defaultValue": { "title": "Defaultvalue", "anyOf": [ @@ -1171,7 +1185,8 @@ "description": "custom widget to use instead of the default one determined from the data-type" } }, - "additionalProperties": false + "additionalProperties": false, + "description": "Metadata on a service input port" }, "ServiceOut": { "title": "ServiceOut", @@ -1290,8 +1305,43 @@ "format": "email" } }, - "additionalProperties": false, - "description": "Service base schema (used for docker labels on docker images)" + "description": "Service base schema (used for docker labels on docker images)", + "example": { + "name": "File Picker", + "description": "File Picker", + "classifiers": [], + "quality": {}, + "access_rights": { + "1": { + "execute_access": true, + "write_access": false + }, + "4": { + "execute_access": true, + "write_access": true + } + }, + "key": "simcore/services/frontend/file-picker", + "version": "1.0.0", + "type": "dynamic", + "authors": [ + { + "name": "Odei Maiz", + "email": "maiz@itis.swiss" + } + ], + "contact": "maiz@itis.swiss", + "inputs": {}, + "outputs": { + "outFile": { + "displayOrder": 0, + "label": "File", + "description": "Chosen File", + "type": "data:*/*" + } + }, + "owner": "maiz@itis.swiss" + } }, "ServiceOutput": { "title": "ServiceOutput", @@ -1331,22 +1381,10 @@ "type": "object", "description": "Place the data associated with the named keys in files" }, - "defaultValue": { - "title": "Defaultvalue", - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "integer" - }, - { - "type": "number" - }, - { - "type": "string" - } - ] + "unit": { + "title": "Unit", + "type": "string", + "description": "Units, when it refers to a physical quantity" }, "widget": { "title": "Widget", @@ -1359,7 +1397,8 @@ "deprecated": true } }, - "additionalProperties": false + "additionalProperties": false, + "description": "Metadata on a service input or output port" }, "ServiceType": { "title": "ServiceType", @@ -1410,6 +1449,82 @@ "type": "object", "default": {} } + }, + "example": { + "access_rights": { + "1": { + "execute_access": false, + "write_access": false + }, + "2": { + "execute_access": true, + "write_access": true + }, + "44": { + "execute_access": false, + "write_access": false + } + }, + "name": "My Human Readable Service Name", + "description": "An interesting service that does something", + "classifiers": [ + "RRID:SCR_018997", + "RRID:SCR_019001" + ], + "quality": { + "tsr": { + "r01": { + "level": 3, + "references": "" + }, + "r02": { + "level": 2, + "references": "" + }, + "r03": { + "level": 0, + "references": "" + }, + "r04": { + "level": 0, + "references": "" + }, + "r05": { + "level": 2, + "references": "" + }, + "r06": { + "level": 0, + "references": "" + }, + "r07": { + "level": 0, + "references": "" + }, + "r08": { + "level": 1, + "references": "" + }, + "r09": { + "level": 0, + "references": "" + }, + "r10": { + "level": 0, + "references": "" + } + }, + "enabled": true, + "annotations": { + "vandv": "", + "purpose": "", + "standards": "", + "limitations": "", + "documentation": "", + "certificationLink": "", + "certificationStatus": "Uncertified" + } + } } }, "SimCoreFileLink": { diff --git a/services/catalog/requirements/_base.txt b/services/catalog/requirements/_base.txt index 9d8200e74bb..d011bff7883 100644 --- a/services/catalog/requirements/_base.txt +++ b/services/catalog/requirements/_base.txt @@ -6,7 +6,7 @@ # aiofiles==0.5.0 # via fastapi -aiopg[sa]==1.0.0 +aiopg[sa]==1.1.0 # via -r requirements/_base.in aniso8601==7.0.0 # via graphene @@ -18,7 +18,7 @@ async-generator==1.10 ; python_version < "3.7" # via # -r requirements/_base.in # fastapi -certifi==2020.6.20 +certifi==2020.12.5 # via # httpx # requests @@ -30,17 +30,17 @@ click==7.1.2 # via uvicorn contextvars==2.4 # via sniffio -dataclasses==0.7 ; python_version < "3.7" +dataclasses==0.8 ; python_version < "3.7" # via # -r requirements/_base.in # pydantic -dnspython==2.0.0 +dnspython==2.1.0 # via email-validator -email-validator==1.1.1 +email-validator==1.1.2 # via # fastapi # pydantic -fastapi[all]==0.61.1 +fastapi[all]==0.63.0 # via -r requirements/_base.in graphene==2.1.8 # via fastapi @@ -73,17 +73,17 @@ idna==2.10 # requests # rfc3986 # yarl -immutables==0.14 +immutables==0.15 # via contextvars itsdangerous==1.1.0 # via fastapi -jinja2==2.11.2 +jinja2==2.11.3 # via fastapi markupsafe==1.1.1 # via jinja2 -multidict==4.7.6 +multidict==5.1.0 # via yarl -orjson==3.3.1 +orjson==3.4.8 # via fastapi promise==2.3 # via @@ -93,23 +93,26 @@ psycopg2-binary==2.8.6 # via # aiopg # sqlalchemy -pydantic[dotenv,email]==1.7.2 +pydantic[dotenv,email]==1.7.3 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/_base.in # fastapi -python-dotenv==0.14.0 - # via pydantic +python-dotenv==0.15.0 + # via + # pydantic + # uvicorn python-multipart==0.0.5 # via fastapi -pyyaml==5.3.1 +pyyaml==5.4.1 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in # fastapi -requests==2.24.0 + # uvicorn +requests==2.25.1 # via fastapi rfc3986[idna2008]==1.4.0 # via httpx @@ -122,11 +125,11 @@ six==1.15.0 # graphql-relay # python-multipart # tenacity -sniffio==1.1.0 +sniffio==1.2.0 # via # httpcore # httpx -sqlalchemy[postgresql_psycopg2binary]==1.3.20 +sqlalchemy[postgresql_psycopg2binary]==1.3.23 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -135,23 +138,31 @@ sqlalchemy[postgresql_psycopg2binary]==1.3.20 # aiopg starlette==0.13.6 # via fastapi -tenacity==6.2.0 +tenacity==6.3.1 # via -r requirements/_base.in typing-extensions==3.7.4.3 - # via yarl + # via + # uvicorn + # yarl ujson==3.2.0 # via fastapi -urllib3==1.25.11 +urllib3==1.26.3 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests -uvicorn==0.11.8 +uvicorn[standard]==0.13.3 # via fastapi -uvloop==0.14.0 +uvloop==0.14.0 ; python_version < "3.7" + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # uvicorn +watchgod==0.6 # via uvicorn websockets==8.1 # via uvicorn -yarl==1.5.1 +yarl==1.6.3 # via -r requirements/../../../packages/postgres-database/requirements/_base.in diff --git a/services/catalog/requirements/_test.txt b/services/catalog/requirements/_test.txt index 126ad78e47d..1878d08c79d 100644 --- a/services/catalog/requirements/_test.txt +++ b/services/catalog/requirements/_test.txt @@ -8,7 +8,7 @@ aiohttp==3.7.3 # via pytest-aiohttp alembic==1.5.4 # via -r requirements/_test.in -astroid==2.4.2 +astroid==2.5 # via pylint async-timeout==3.0.1 # via aiohttp @@ -22,7 +22,7 @@ bcrypt==3.2.0 # via paramiko cached-property==1.5.2 # via docker-compose -certifi==2020.6.20 +certifi==2020.12.5 # via # -c requirements/_base.txt # httpx @@ -65,7 +65,7 @@ docker-compose==1.27.4 # via # -c requirements/../../../requirements/constraints.txt # pytest-docker -docker[ssh]==4.4.1 +docker[ssh]==4.4.2 # via # -r requirements/_test.in # docker-compose @@ -100,7 +100,7 @@ idna==2.10 # requests # rfc3986 # yarl -immutables==0.14 +immutables==0.15 # via # -c requirements/_base.txt # contextvars @@ -117,7 +117,7 @@ jsonschema==3.2.0 # via # -r requirements/_test.in # docker-compose -lazy-object-proxy==1.4.3 +lazy-object-proxy==1.5.2 # via astroid mako==1.1.4 # via alembic @@ -127,7 +127,7 @@ markupsafe==1.1.1 # mako mccabe==0.6.1 # via pylint -multidict==4.7.6 +multidict==5.1.0 # via # -c requirements/_base.txt # aiohttp @@ -177,18 +177,18 @@ python-dateutil==2.8.1 # via # alembic # faker -python-dotenv==0.14.0 +python-dotenv==0.15.0 # via # -c requirements/_base.txt # docker-compose python-editor==1.0.4 # via alembic -pyyaml==5.3.1 +pyyaml==5.4.1 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # docker-compose -requests==2.24.0 +requests==2.25.1 # via # -c requirements/_base.txt # codecov @@ -206,7 +206,6 @@ rfc3986[idna2008]==1.4.0 six==1.15.0 # via # -c requirements/_base.txt - # astroid # bcrypt # docker # dockerpty @@ -214,12 +213,12 @@ six==1.15.0 # pynacl # python-dateutil # websocket-client -sniffio==1.1.0 +sniffio==1.2.0 # via # -c requirements/_base.txt # httpcore # httpx -sqlalchemy[postgresql_psycopg2binary]==1.3.20 +sqlalchemy[postgresql_psycopg2binary]==1.3.23 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -240,7 +239,7 @@ typing-extensions==3.7.4.3 # aiohttp # importlib-metadata # yarl -urllib3==1.25.11 +urllib3==1.26.3 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -251,7 +250,7 @@ websocket-client==0.57.0 # docker-compose wrapt==1.12.1 # via astroid -yarl==1.5.1 +yarl==1.6.3 # via # -c requirements/_base.txt # aiohttp diff --git a/services/catalog/requirements/_tools.txt b/services/catalog/requirements/_tools.txt index 226622e5875..e2237849ff0 100644 --- a/services/catalog/requirements/_tools.txt +++ b/services/catalog/requirements/_tools.txt @@ -22,7 +22,7 @@ click==7.1.2 # -c requirements/_test.txt # black # pip-tools -dataclasses==0.7 ; python_version < "3.7" +dataclasses==0.8 ; python_version < "3.7" # via # -c requirements/_base.txt # black @@ -55,7 +55,7 @@ pip-tools==5.5.0 # via -r requirements/../../../requirements/devenv.txt pre-commit==2.10.1 # via -r requirements/../../../requirements/devenv.txt -pyyaml==5.3.1 +pyyaml==5.4.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/catalog/src/simcore_service_catalog/api/routes/services.py b/services/catalog/src/simcore_service_catalog/api/routes/services.py index 4d49bd8a13a..7ef5e9d116d 100644 --- a/services/catalog/src/simcore_service_catalog/api/routes/services.py +++ b/services/catalog/src/simcore_service_catalog/api/routes/services.py @@ -24,8 +24,20 @@ router = APIRouter() logger = logging.getLogger(__name__) +ServicesSelection = Set[Tuple[str, str]] -@router.get("", response_model=List[ServiceOut]) +# These are equivalent to pydantic export models but for responses +# SEE https://pydantic-docs.helpmanual.io/usage/exporting_models/#modeldict +# SEE https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter +RESPONSE_MODEL_POLICY = { + "response_model_by_alias": True, + "response_model_exclude_unset": True, + "response_model_exclude_defaults": False, + "response_model_exclude_none": False, +} + + +@router.get("", response_model=List[ServiceOut], **RESPONSE_MODEL_POLICY) async def list_services( # pylint: disable=too-many-arguments user_id: PositiveInt, @@ -35,7 +47,7 @@ async def list_services( services_repo: ServicesRepository = Depends(get_repository(ServicesRepository)), x_simcore_products_name: str = Header(...), ): - # get user groups + # Access layer user_groups = await groups_repository.list_user_groups(user_id) if not user_groups: # deny access @@ -45,12 +57,13 @@ async def list_services( ) # now get the executable services - _services = await services_repo.list_services( + _services: List[ServiceMetaDataAtDB] = await services_repo.list_services( gids=[group.gid for group in user_groups], execute_access=True, product_name=x_simcore_products_name, ) - executable_services: Set[Tuple[str, str]] = { + # TODO: get this directly in DB + executable_services: ServicesSelection = { (service.key, service.version) for service in _services } @@ -60,14 +73,16 @@ async def list_services( write_access=True, product_name=x_simcore_products_name, ) - writable_services: Set[Tuple[str, str]] = { + writable_services: ServicesSelection = { (service.key, service.version) for service in _services } visible_services = executable_services | writable_services + # Non-detailed views from the services_repo database if not details: # only return a stripped down version - services = [ + # FIXME: add name, ddescription, type, etc... + services_overview = [ ServiceOut( key=key, version=version, @@ -81,22 +96,28 @@ async def list_services( ) for key, version in visible_services ] - return services + return services_overview + + # Detailed view re-directing to # get the services from the registry and filter them out frontend_services = [s.dict(by_alias=True) for s in get_frontend_services()] registry_services = await director_client.get("/services") - data = frontend_services + registry_services - services: List[ServiceOut] = [] - for x in data: + detailed_services_metadata = frontend_services + registry_services + detailed_services: List[ServiceOut] = [] + for detailed_metadata in detailed_services_metadata: try: - service = ServiceOut.parse_obj(x) - - if not (service.key, service.version) in visible_services: + service_key, service_version = ( + detailed_metadata.get("key"), + detailed_metadata.get("version"), + ) + if (service_key, service_version) not in visible_services: # no access to that service continue - # we have write access for that service, fill in the service rights + service = ServiceOut.parse_obj(detailed_metadata) + + # Write Access Granted: fill in the service rights access_rights: List[ ServiceAccessRightsAtDB ] = await services_repo.get_service_access_rights( @@ -104,7 +125,7 @@ async def list_services( ) service.access_rights = {rights.gid: rights for rights in access_rights} - # access is allowed, override some of the values with what is in the db + # Write Access Granted: override some of the values with what is in the db service_in_db: Optional[ ServiceMetaDataAtDB ] = await services_repo.get_service(service.key, service.version) @@ -115,6 +136,7 @@ async def list_services( service.version, ) continue + service = service.copy( update=service_in_db.dict(exclude_unset=True, exclude={"owner"}) ) @@ -124,21 +146,25 @@ async def list_services( service_in_db.owner ) - services.append(service) + detailed_services.append(service) # services = parse_obj_as(List[ServiceOut], data) this does not work since if one service has an issue it fails except ValidationError as exc: logger.warning( "skip service %s:%s that has invalid fields\n%s", - x["key"], - x["version"], + detailed_metadata.get("key"), + detailed_metadata.get("version"), exc, ) - return services + return detailed_services -@router.get("/{service_key:path}/{service_version}", response_model=ServiceOut) +@router.get( + "/{service_key:path}/{service_version}", + response_model=ServiceOut, + **RESPONSE_MODEL_POLICY, +) async def get_service( # pylint: disable=too-many-arguments user_id: int, @@ -208,7 +234,11 @@ async def get_service( return service -@router.patch("/{service_key:path}/{service_version}", response_model=ServiceOut) +@router.patch( + "/{service_key:path}/{service_version}", + response_model=ServiceOut, + **RESPONSE_MODEL_POLICY, +) async def modify_service( # pylint: disable=too-many-arguments user_id: int, diff --git a/services/catalog/src/simcore_service_catalog/models/schemas/services.py b/services/catalog/src/simcore_service_catalog/models/schemas/services.py index 8af936f967d..0be2864edd0 100644 --- a/services/catalog/src/simcore_service_catalog/models/schemas/services.py +++ b/services/catalog/src/simcore_service_catalog/models/schemas/services.py @@ -5,15 +5,133 @@ ServiceDockerData, ServiceMetaData, ) -from pydantic import EmailStr +from pydantic import EmailStr, Extra +from pydantic.main import BaseModel # OpenAPI models (contain both service metadata and access rights) class ServiceUpdate(ServiceMetaData, ServiceAccessRights): - pass + class Config: + schema_extra = { + "example": { + # ServiceAccessRights + "access_rights": { + 1: { + "execute_access": False, + "write_access": False, + }, + 2: { + "execute_access": True, + "write_access": True, + }, + 44: { + "execute_access": False, + "write_access": False, + }, + }, + # ServiceMetaData = ServiceCommonData + + "name": "My Human Readable Service Name", + "thumbnail": None, + "description": "An interesting service that does something", + "classifiers": ["RRID:SCR_018997", "RRID:SCR_019001"], + "quality": { + "tsr": { + "r01": {"level": 3, "references": ""}, + "r02": {"level": 2, "references": ""}, + "r03": {"level": 0, "references": ""}, + "r04": {"level": 0, "references": ""}, + "r05": {"level": 2, "references": ""}, + "r06": {"level": 0, "references": ""}, + "r07": {"level": 0, "references": ""}, + "r08": {"level": 1, "references": ""}, + "r09": {"level": 0, "references": ""}, + "r10": {"level": 0, "references": ""}, + }, + "enabled": True, + "annotations": { + "vandv": "", + "purpose": "", + "standards": "", + "limitations": "", + "documentation": "", + "certificationLink": "", + "certificationStatus": "Uncertified", + }, + }, + } + } class ServiceOut( ServiceDockerData, ServiceAccessRights, ServiceMetaData ): # pylint: disable=too-many-ancestors owner: Optional[EmailStr] + + class Config: + extra = Extra.ignore + schema_extra = { + "example": { + "name": "File Picker", + "thumbnail": None, + "description": "File Picker", + "classifiers": [], + "quality": {}, + "access_rights": { + "1": {"execute_access": True, "write_access": False}, + "4": {"execute_access": True, "write_access": True}, + }, + "key": "simcore/services/frontend/file-picker", + "version": "1.0.0", + "integration-version": None, + "type": "dynamic", + "badges": None, + "authors": [ + { + "name": "Odei Maiz", + "email": "maiz@itis.swiss", + "affiliation": None, + } + ], + "contact": "maiz@itis.swiss", + "inputs": {}, + "outputs": { + "outFile": { + "displayOrder": 0, + "label": "File", + "description": "Chosen File", + "type": "data:*/*", + "fileToKeyMap": None, + "widget": None, + } + }, + "owner": "maiz@itis.swiss", + } + } + + +# TODO: prototype for next iteration +# Items are non-detailed version of resources listed +class ServiceItem(BaseModel): + class Config: + extra = Extra.ignore + schema_extra = { + "example": { + "title": "File Picker", # NEW: rename 'name' as title (so it is not confused with an identifier!) + "thumbnail": None, # optional + "description": "File Picker", + "classifiers_url": "https://catalog:8080/services/a8f5a503-01d5-40bc-b416-f5b7cc5d1fa4/classifiers", + "quality": "https://catalog:8080/services/a8f5a503-01d5-40bc-b416-f5b7cc5d1fa4/quality", + "access_rights_url": "https://catalog:8080/services/a8f5a503-01d5-40bc-b416-f5b7cc5d1fa4/access_rights", + "key_id": "simcore/services/frontend/file-picker", # NEW: renames key -> key_id + "version": "1.0.0", + "id": "a8f5a503-01d5-40bc-b416-f5b7cc5d1fa4", # NEW: alternative identifier to key_id:version + "integration-version": "1.0.0", + "type": "dynamic", + "badges_url": "https://catalog:8080/services/a8f5a503-01d5-40bc-b416-f5b7cc5d1fa4/badges", + "authors_url": "https://catalog:8080/services/a8f5a503-01d5-40bc-b416-f5b7cc5d1fa4/authors", + "inputs_url": "https://catalog:8080/services/a8f5a503-01d5-40bc-b416-f5b7cc5d1fa4/inputs", + "outputs_url": "https://catalog:8080/services/a8f5a503-01d5-40bc-b416-f5b7cc5d1fa4/outputs", + "owner": "maiz@itis.swiss", # NEW, replaces "contact": "maiz@itis.swiss" + "url": "https://catalog:8080/services/a8f5a503-01d5-40bc-b416-f5b7cc5d1fa4", # NEW self + } + } diff --git a/services/catalog/src/simcore_service_catalog/services/frontend_services.py b/services/catalog/src/simcore_service_catalog/services/frontend_services.py index f2e46b20727..05d2b56e8fa 100644 --- a/services/catalog/src/simcore_service_catalog/services/frontend_services.py +++ b/services/catalog/src/simcore_service_catalog/services/frontend_services.py @@ -4,6 +4,7 @@ def _file_picker_service() -> ServiceDockerData: + # TODO: create once and just create copies here return ServiceDockerData( key="simcore/services/frontend/file-picker", version="1.0.0", diff --git a/services/catalog/tests/unit/conftest.py b/services/catalog/tests/unit/conftest.py index ff87898171b..8862e02583a 100644 --- a/services/catalog/tests/unit/conftest.py +++ b/services/catalog/tests/unit/conftest.py @@ -8,7 +8,6 @@ from typing import Dict import pytest - import simcore_service_catalog pytest_plugins = [ @@ -19,12 +18,14 @@ "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", "pytest_simcore.postgres_service", + "pytest_simcore.pydantic_models", ] + current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -## FOLDER LAYOUT ------ +## FOLDER LAYOUT --------------------------------------------------------------------- @pytest.fixture(scope="session") @@ -36,7 +37,10 @@ def project_slug_dir() -> Path: @pytest.fixture(scope="session") -def installed_package_dir() -> Path: +def package_dir() -> Path: + """Notice that this might be under src (if installed as edit mode) + or in the installation folder + """ dirpath = Path(simcore_service_catalog.__file__).resolve().parent assert dirpath.exists() return dirpath diff --git a/services/catalog/tests/unit/test_package.py b/services/catalog/tests/unit/test__code_syntax.py similarity index 57% rename from services/catalog/tests/unit/test_package.py rename to services/catalog/tests/unit/test__code_syntax.py index 2b2a685b40a..814d7435521 100644 --- a/services/catalog/tests/unit/test_package.py +++ b/services/catalog/tests/unit/test__code_syntax.py @@ -11,22 +11,20 @@ @pytest.fixture -def pylintrc(project_slug_dir, osparc_simcore_root_dir): - pylintrc = project_slug_dir / ".pylintrc" - if not pylintrc.exists(): - pylintrc = osparc_simcore_root_dir / ".pylintrc" - assert pylintrc.exists() - return pylintrc +def pylintrc(osparc_simcore_root_dir): + pylintrc_path = osparc_simcore_root_dir / ".pylintrc" + assert pylintrc_path.exists() + return pylintrc_path -def test_run_pylint(pylintrc, installed_package_dir): - assert_pylint_is_passing(pylintrc=pylintrc, package_dir=installed_package_dir) +def test_run_pylint(pylintrc, package_dir): + assert_pylint_is_passing(pylintrc=pylintrc, package_dir=package_dir) -def test_no_pdbs_in_place(installed_package_dir): +def test_no_pdbs_in_place(package_dir): MATCH = re.compile(r"pdb.set_trace()") EXCLUDE = ["__pycache__", ".git"] - for root, dirs, files in os.walk(installed_package_dir): + for root, dirs, files in os.walk(package_dir): for name in files: if name.endswith(".py"): pypth = Path(root) / name diff --git a/services/catalog/tests/unit/test_models_schemas_services.py b/services/catalog/tests/unit/test_models_schemas_services.py new file mode 100644 index 00000000000..96bf5fe0bf3 --- /dev/null +++ b/services/catalog/tests/unit/test_models_schemas_services.py @@ -0,0 +1,19 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name +from pprint import pformat + +import pytest +from simcore_service_catalog.models.schemas.services import ( + ServiceItem, + ServiceOut, + ServiceUpdate, +) + + +@pytest.mark.parametrize("model_cls", (ServiceOut, ServiceUpdate, ServiceItem)) +def test_service_api_models_examples(model_cls, model_cls_examples): + for name, example in model_cls_examples.items(): + print(name, ":", pformat(example)) + model_instance = model_cls(**example) + assert model_instance, f"Failed with {name}" diff --git a/services/catalog/tests/unit/test_openapi_specs.py b/services/catalog/tests/unit/test_openapi_specs.py deleted file mode 100644 index 6e3a45eb6f4..00000000000 --- a/services/catalog/tests/unit/test_openapi_specs.py +++ /dev/null @@ -1,25 +0,0 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name - -import json - -import pytest - -from simcore_service_catalog.core.application import init_app - - -@pytest.mark.skip( - reason="FIXME: fails with make tests but does not fail with pytest test_!???" -) -def test_openapi_json_is_updated(project_slug_dir, devel_environ): - # devel_environ needed to build app - - app = init_app() - - openapi_path = project_slug_dir / "openapi.json" - assert openapi_path.exists(), "Missing required oas file" - - openapi_in_place = json.loads(openapi_path.read_text()) - openapi_in_app = app.openapi() - assert openapi_in_place == openapi_in_app, "Missed to run 'make openapi-specs' ???" diff --git a/services/catalog/tests/unit/test_schemas.py b/services/catalog/tests/unit/test_schemas.py deleted file mode 100644 index 8fce7463efa..00000000000 --- a/services/catalog/tests/unit/test_schemas.py +++ /dev/null @@ -1,64 +0,0 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name -# pylint:disable=protected-access - -import json - -import pytest -import simcore_postgres_database.models.direct_acyclic_graphs as orm - -from simcore_service_catalog.db import tables -from simcore_service_catalog.models.domain.dag import DAGAtDB -from simcore_service_catalog.models.schemas.dag import DAGIn, DAGOut - - -@pytest.mark.skip(reason="DEV") -def test_dev(): - - dag_in = DAGIn( - key="simcore/services/frontend/nodes-group/macros/", version="1.0.0", name="foo" - ) - assert "key" in dag_in.__fields_set__ - assert "version" in dag_in.__fields_set__ - assert "description" not in dag_in.__fields_set__ - - print() - # to update_dat - print(dag_in.dict(exclude_unset=True)) - - # to set or create dat - print(dag_in.dict()) - print(dag_in.dict(exclude_none=True)) - - -def test_api_in_to_orm(fake_data_dag_in): - # dag in to db - dag_in = DAGIn(**fake_data_dag_in) - - # TODO: create DAG.from_api( :DAGIn) - # SEE crud_dags.create_dag - selection = set(tables.dags.columns.keys()).remove("workbench") - dag_orm = orm.DAG( - id=1, - workbench=json.dumps(fake_data_dag_in["workbench"]), - **dag_in.dict(include=selection, exclude={"workbench"}), - ) - - -def test_orm_to_api_out(fake_data_dag_in): - dag_orm = orm.DAG( - id=1, - key="simcore/services/comp/foo", - version="1.0.0", - name="bar", - description="some", - contact="me@me.com", - workbench=json.dumps(fake_data_dag_in["workbench"]), - ) - - dag_db = DAGAtDB.from_orm(dag_orm) - assert type(dag_db.workbench) == dict - - dag_out = DAGOut(**dag_db.dict(by_alias=True)) - assert dag_out.id == 1 # pylint: disable=no-member diff --git a/services/catalog/tests/unit/test_services_director.py b/services/catalog/tests/unit/test_services_director.py index 58fef23b2d2..0449f156cb3 100644 --- a/services/catalog/tests/unit/test_services_director.py +++ b/services/catalog/tests/unit/test_services_director.py @@ -1,3 +1,5 @@ +from typing import Iterator + # pylint:disable=unused-variable # pylint:disable=unused-argument # pylint:disable=redefined-outer-name @@ -5,21 +7,20 @@ import pytest import respx from fastapi import FastAPI -from starlette.testclient import TestClient - from simcore_service_catalog.api.dependencies.director import get_director_api from simcore_service_catalog.core.application import init_app from simcore_service_catalog.core.settings import ( AppSettings, + ClientRequestSettings, DirectorSettings, PostgresSettings, - ClientRequestSettings, ) from simcore_service_catalog.services.director import DirectorApi +from starlette.testclient import TestClient @pytest.fixture -def minimal_app(loop, devel_environ) -> FastAPI: +def minimal_app(loop, devel_environ) -> Iterator[FastAPI]: # TODO: auto generate fakes # avoid init of pg or director API clients diff --git a/services/catalog/tests/unit/test_services_frontend_services.py b/services/catalog/tests/unit/test_services_frontend_services.py new file mode 100644 index 00000000000..c5376f89e35 --- /dev/null +++ b/services/catalog/tests/unit/test_services_frontend_services.py @@ -0,0 +1,29 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name +# pylint:disable=protected-access + +from simcore_service_catalog.models.schemas.services import ServiceDockerData +from simcore_service_catalog.services.frontend_services import ( + _file_picker_service, + _node_group_service, +) + + +def test_create_file_picker(): + + image_metadata = _file_picker_service() + assert isinstance(image_metadata, ServiceDockerData) + + assert ( + not image_metadata.inputs and image_metadata.outputs + ), "Expected a source node" + + +def tests_create_node_group(): + image_metadata = _node_group_service() + assert isinstance(image_metadata, ServiceDockerData) + + assert ( + not image_metadata.inputs and image_metadata.outputs + ), "Expected a source node" diff --git a/services/sidecar/tests/conftest.py b/services/sidecar/tests/conftest.py index 2987edbc9a4..1ff888eaa38 100644 --- a/services/sidecar/tests/conftest.py +++ b/services/sidecar/tests/conftest.py @@ -19,7 +19,7 @@ @pytest.fixture(scope="session") -def project_slug_dir(): +def project_slug_dir() -> Path: folder = current_dir.parent assert folder.exists() assert any(folder.glob("src/simcore_service_sidecar")) @@ -27,7 +27,7 @@ def project_slug_dir(): @pytest.fixture(scope="session") -def package_dir(): +def package_dir() -> Path: dirpath = Path(simcore_service_sidecar.__file__).resolve().parent assert dirpath.exists() return dirpath diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 849ad394c91..c367d2bd863 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -13,41 +13,40 @@ from concurrent.futures import ThreadPoolExecutor from pathlib import Path from random import randrange -from typing import Tuple, Dict -import dotenv +from typing import Dict, Iterator, Tuple +import dotenv import pytest -from aiohttp import web -from aiopg.sa import create_engine - import simcore_service_storage import utils +from aiohttp import web +from aiopg.sa import create_engine from servicelib.application import create_safe_application from simcore_service_storage.datcore_wrapper import DatcoreWrapper from simcore_service_storage.dsm import DataStorageManager, DatCoreApiToken from simcore_service_storage.models import FileMetaData from simcore_service_storage.settings import SIMCORE_S3_STR -from utils import (ACCESS_KEY, BUCKET_NAME, DATABASE, PASS, SECRET_KEY, USER, - USER_ID) +from utils import ACCESS_KEY, BUCKET_NAME, DATABASE, PASS, SECRET_KEY, USER, USER_ID current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent # TODO: replace by pytest_simcore sys.path.append(str(current_dir / "helpers")) + @pytest.fixture(scope="session") -def here(): +def here() -> Path: return current_dir @pytest.fixture(scope="session") -def package_dir(here): +def package_dir(here) -> Path: dirpath = Path(simcore_service_storage.__file__).parent assert dirpath.exists() return dirpath @pytest.fixture(scope="session") -def osparc_simcore_root_dir(here): +def osparc_simcore_root_dir(here) -> Path: root_dir = here.parent.parent.parent assert root_dir.exists() and any( root_dir.glob("services") @@ -56,7 +55,7 @@ def osparc_simcore_root_dir(here): @pytest.fixture(scope="session") -def osparc_api_specs_dir(osparc_simcore_root_dir): +def osparc_api_specs_dir(osparc_simcore_root_dir) -> Path: dirpath = osparc_simcore_root_dir / "api" / "specs" assert dirpath.exists() return dirpath @@ -80,15 +79,14 @@ def project_env_devel_dict(project_slug_dir: Path) -> Dict: @pytest.fixture(scope="function") -def project_env_devel_environment(project_env_devel_dict, monkeypatch): +def project_env_devel_environment(project_env_devel_dict, monkeypatch) -> None: for key, value in project_env_devel_dict.items(): monkeypatch.setenv(key, value) @pytest.fixture(scope="session") -def docker_compose_file(here): - """ Overrides pytest-docker fixture - """ +def docker_compose_file(here) -> Iterator[str]: + """Overrides pytest-docker fixture""" old = os.environ.copy() # docker-compose reads these environs @@ -119,7 +117,9 @@ def postgres_service(docker_services, docker_ip): # Wait until service is responsive. docker_services.wait_until_responsive( - check=lambda: utils.is_postgres_responsive(url), timeout=30.0, pause=0.1, + check=lambda: utils.is_postgres_responsive(url), + timeout=30.0, + pause=0.1, ) postgres_service = { @@ -138,12 +138,14 @@ def postgres_service(docker_services, docker_ip): @pytest.fixture(scope="session") def postgres_service_url(postgres_service, docker_services, docker_ip): - postgres_service_url = "postgresql://{user}:{password}@{host}:{port}/{database}".format( - user=USER, - password=PASS, - database=DATABASE, - host=docker_ip, - port=docker_services.port_for("postgres", 5432), + postgres_service_url = ( + "postgresql://{user}:{password}@{host}:{port}/{database}".format( + user=USER, + password=PASS, + database=DATABASE, + host=docker_ip, + port=docker_services.port_for("postgres", 5432), + ) ) return postgres_service_url @@ -164,11 +166,16 @@ async def postgres_engine(loop, postgres_service_url): def minio_service(docker_services, docker_ip): # Build URL to service listening on random port. - url = "http://%s:%d/" % (docker_ip, docker_services.port_for("minio", 9000),) + url = "http://%s:%d/" % ( + docker_ip, + docker_services.port_for("minio", 9000), + ) # Wait until service is responsive. docker_services.wait_until_responsive( - check=lambda: utils.is_responsive(url, 403), timeout=30.0, pause=0.1, + check=lambda: utils.is_responsive(url, 403), + timeout=30.0, + pause=0.1, ) return { diff --git a/services/storage/tests/utils.py b/services/storage/tests/utils.py index a5d55ea283e..40cf9f359f1 100644 --- a/services/storage/tests/utils.py +++ b/services/storage/tests/utils.py @@ -7,7 +7,6 @@ import pytest import requests import sqlalchemy as sa - from simcore_service_storage.models import ( FileMetaData, file_meta_data, @@ -31,11 +30,11 @@ USER_ID = "0" -def current_dir(): +def current_dir() -> Path: return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -def data_dir(): +def data_dir() -> Path: return current_dir() / Path("data") @@ -50,7 +49,7 @@ def has_datcore_tokens() -> bool: return True -def is_responsive(url, code=200): +def is_responsive(url, code=200) -> bool: """Check if something responds to ``url`` syncronously""" try: response = requests.get(url) @@ -62,7 +61,7 @@ def is_responsive(url, code=200): return False -def is_postgres_responsive(url): +def is_postgres_responsive(url) -> bool: """Check if something responds to ``url`` """ try: engine = sa.create_engine(url) @@ -126,11 +125,24 @@ def create_full_tables(url): meta.drop_all( bind=engine, - tables=[user_to_groups, file_meta_data, projects, users, groups,], + tables=[ + user_to_groups, + file_meta_data, + projects, + users, + groups, + ], checkfirst=True, ) meta.create_all( - bind=engine, tables=[file_meta_data, projects, users, groups, user_to_groups,], + bind=engine, + tables=[ + file_meta_data, + projects, + users, + groups, + user_to_groups, + ], ) for t in ["users", "file_meta_data", "projects"]: @@ -176,6 +188,13 @@ def drop_all_tables(url): engine = sa.create_engine(url) meta.drop_all( - bind=engine, tables=[file_meta_data, projects, users, groups, user_to_groups,], + bind=engine, + tables=[ + file_meta_data, + projects, + users, + groups, + user_to_groups, + ], ) engine.dispose() diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index 85894437be9..0de2d953bba 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -1,8 +1,8 @@ openapi: 3.0.0 info: - title: osparc-simcore RESTful API + title: osparc-simcore web API version: 0.6.0 - description: RESTful API designed for web clients + description: API designed for the front-end app contact: name: IT'IS Foundation email: support@simcore.io @@ -13859,7 +13859,7 @@ paths: tags: - catalog summary: List Services - operationId: list_catalog_services + operationId: list_services_handler responses: '200': description: Returns list of services from the catalog @@ -13966,7 +13966,7 @@ paths: tags: - catalog summary: Get Service - operationId: get_catalog_service + operationId: get_service_handler responses: '200': description: Returns service @@ -14057,7 +14057,7 @@ paths: tags: - catalog summary: Update Service - operationId: update_catalog_service + operationId: update_service_handler requestBody: content: application/json: diff --git a/services/web/server/src/simcore_service_webserver/catalog.py b/services/web/server/src/simcore_service_webserver/catalog.py index cef3cc2fd0c..bd4f3642efb 100644 --- a/services/web/server/src/simcore_service_webserver/catalog.py +++ b/services/web/server/src/simcore_service_webserver/catalog.py @@ -2,8 +2,6 @@ """ import logging -import os -from distutils.util import strtobool from typing import List, Optional from aiohttp import web @@ -101,8 +99,7 @@ def setup_catalog(app: web.Application, *, disable_auth=False): route_def.kwargs["name"] = operation_id = route_def.handler.__name__ exclude.append(operation_id) - if strtobool(os.environ.get("WEBSERVER_DEV_FEATURES_ENABLED", "0")): - app.add_routes(catalog_api_handlers.routes) + app.add_routes(catalog_api_handlers.routes) # bind the rest routes with the reverse-proxy-handler # FIXME: this would reroute **anything** to the catalog service! diff --git a/services/web/server/src/simcore_service_webserver/catalog_api_handlers.py b/services/web/server/src/simcore_service_webserver/catalog_api_handlers.py index 443203a1a3d..70701ce89a4 100644 --- a/services/web/server/src/simcore_service_webserver/catalog_api_handlers.py +++ b/services/web/server/src/simcore_service_webserver/catalog_api_handlers.py @@ -1,6 +1,6 @@ from contextlib import contextmanager from dataclasses import dataclass -from typing import Iterator, List, Tuple +from typing import Any, Dict, Iterator, List, Tuple import orjson from aiohttp import web @@ -17,17 +17,13 @@ ServiceOutputApiOut, ServiceOutputKey, ServiceVersion, + json_dumps, + replace_service_input_outputs, ) from .constants import RQ_PRODUCT_KEY from .login.decorators import RQT_USERID_KEY, login_required from .security_decorators import permission_required - -def json_dumps(v) -> str: - # orjson.dumps returns bytes, to match standard json.dumps we need to decode - return orjson.dumps(v).decode() - - ############### # API HANDLERS # @@ -38,6 +34,15 @@ def json_dumps(v) -> str: VX = f"/{api_version_prefix}" + +# SEE https://pydantic-docs.helpmanual.io/usage/exporting_models/#modeldict +RESPONSE_MODEL_POLICY = { + "by_alias": True, + "exclude_unset": True, + "exclude_defaults": False, + "exclude_none": False, +} + routes = RouteTableDef() @@ -68,11 +73,13 @@ def parameters_validation(request: web.Request): raise web.HTTPBadRequest(reason="Invalid headers") from err yield context + # # wraps match, parse and validate # For instance # service_key: ServiceKey = request.match_info["service_key"] # from_service_version: ServiceVersion = request.query["fromVersion"] + # body = await request.json() # except ValidationError as err: raise web.HTTPUnprocessableEntity( @@ -83,6 +90,53 @@ def parameters_validation(request: web.Request): raise web.HTTPBadRequest(reason=f"Expected parameter {err}") from err +@routes.get(VX + "/catalog/services") +@login_required +@permission_required("services.catalog.*") +async def list_services_handler(request: Request): + with parameters_validation(request) as ctx: + # match, parse and validate + data_array = await list_services(ctx) + + enveloped: str = json_dumps({"data": data_array}) + return web.Response(text=enveloped, content_type="application/json") + + +@routes.get(VX + "/catalog/services/{service_key}/{service_version}") +@login_required +@permission_required("services.catalog.*") +async def get_service_handler(request: Request): + with parameters_validation(request) as ctx: + # match, parse and validate + service_key: ServiceKey = request.match_info["service_key"] + service_version: ServiceVersion = request.match_info["service_version"] + + # Evaluate and return validated model + data = await get_service(service_key, service_version, ctx) + + # format response + enveloped: str = json_dumps({"data": data}) + return web.Response(text=enveloped, content_type="application/json") + + +@routes.patch(VX + "/catalog/services/{service_key}/{service_version}") +@login_required +@permission_required("services.catalog.*") +async def update_service_handler(request: Request): + with parameters_validation(request) as ctx: + # match, parse and validate + service_key: ServiceKey = request.match_info["service_key"] + service_version: ServiceVersion = request.match_info["service_version"] + update_data: Dict[str, Any] = await request.json(loads=orjson.loads) + + # Evaluate and return validated model + data = await update_service(service_key, service_version, update_data, ctx) + + # format response + enveloped: str = json_dumps({"data": data}) + return web.Response(text=enveloped, content_type="application/json") + + @routes.get(VX + "/catalog/services/{service_key}/{service_version}/inputs") @login_required @permission_required("services.catalog.*") @@ -97,9 +151,8 @@ async def list_service_inputs_handler(request: Request): # format response enveloped: str = json_dumps( - {"data": [m.dict(by_alias=True) for m in response_model]} + {"data": [m.dict(**RESPONSE_MODEL_POLICY) for m in response_model]} ) - return web.Response(text=enveloped, content_type="application/json") @@ -119,7 +172,7 @@ async def get_service_input_handler(request: Request): ) # format response - enveloped: str = json_dumps({"data": response_model.dict(by_alias=True)}) + enveloped: str = json_dumps({"data": response_model.dict(**RESPONSE_MODEL_POLICY)}) return web.Response(text=enveloped, content_type="application/json") @@ -136,7 +189,7 @@ async def get_compatible_inputs_given_source_output_handler(request: Request): from_output_key: ServiceOutputKey = request.query["fromOutput"] # Evaluate and return validated model - response_model = await get_compatible_inputs_given_source_output( + data = await get_compatible_inputs_given_source_output( service_key, service_version, from_service_key, @@ -146,8 +199,7 @@ async def get_compatible_inputs_given_source_output_handler(request: Request): ) # format response - enveloped: str = json_dumps({"data": response_model}) - + enveloped: str = json_dumps({"data": data}) return web.Response(text=enveloped, content_type="application/json") @@ -167,9 +219,8 @@ async def list_service_outputs_handler(request: Request): # format response enveloped: str = json_dumps( - {"data": [m.dict(by_alias=True) for m in response_model]} + {"data": [m.dict(**RESPONSE_MODEL_POLICY) for m in response_model]} ) - return web.Response(text=enveloped, content_type="application/json") @@ -191,7 +242,7 @@ async def get_service_output_handler(request: Request): ) # format response - enveloped: str = json_dumps({"data": response_model.dict(by_alias=True)}) + enveloped: str = json_dumps({"data": response_model.dict(**RESPONSE_MODEL_POLICY)}) return web.Response(text=enveloped, content_type="application/json") @@ -213,7 +264,7 @@ async def get_compatible_outputs_given_target_input_handler(request: Request): to_input_key: ServiceInputKey = request.query["toInput"] # Evaluate and return validated model - response_model = await get_compatible_outputs_given_target_input( + data = await get_compatible_outputs_given_target_input( service_key, service_version, to_service_key, @@ -223,8 +274,7 @@ async def get_compatible_outputs_given_target_input_handler(request: Request): ) # format response - enveloped: str = json_dumps({"data": response_model}) - + enveloped: str = json_dumps({"data": data}) return web.Response(text=enveloped, content_type="application/json") @@ -266,6 +316,43 @@ def can_connect(from_output: ServiceOutput, to_input: ServiceInput) -> bool: return ok +async def list_services(ctx: _RequestContext): + services = await catalog_client.get_services_for_user_in_product( + ctx.app, ctx.user_id, ctx.product_name, only_key_versions=False + ) + for service in services: + replace_service_input_outputs(service, **RESPONSE_MODEL_POLICY) + return services + + +async def get_service( + service_key: ServiceKey, service_version: ServiceVersion, ctx: _RequestContext +) -> Dict[str, Any]: + service = await catalog_client.get_service( + ctx.app, ctx.user_id, service_key, service_version, ctx.product_name + ) + replace_service_input_outputs(service, **RESPONSE_MODEL_POLICY) + return service + + +async def update_service( + service_key: ServiceKey, + service_version: ServiceVersion, + update_data: Dict[str, Any], + ctx: _RequestContext, +): + service = await catalog_client.update_service( + ctx.app, + ctx.user_id, + service_key, + service_version, + ctx.product_name, + update_data, + ) + replace_service_input_outputs(service, **RESPONSE_MODEL_POLICY) + return service + + async def list_service_inputs( service_key: ServiceKey, service_version: ServiceVersion, ctx: _RequestContext ) -> List[ServiceOutputApiOut]: @@ -276,7 +363,7 @@ async def list_service_inputs( inputs = [] for input_key in service["inputs"].keys(): - service_input = ServiceInputApiOut.from_service(service, input_key) + service_input = ServiceInputApiOut.from_catalog_service(service, input_key) inputs.append(service_input) return inputs @@ -291,7 +378,7 @@ async def get_service_input( service = await catalog_client.get_service( ctx.app, ctx.user_id, service_key, service_version, ctx.product_name ) - service_input = ServiceInputApiOut.from_service(service, input_key) + service_input = ServiceInputApiOut.from_catalog_service(service, input_key) return service_input @@ -349,7 +436,7 @@ async def list_service_outputs( outputs = [] for output_key in service["outputs"].keys(): - service_output = ServiceOutputApiOut.from_service(service, output_key) + service_output = ServiceOutputApiOut.from_catalog_service(service, output_key) outputs.append(service_output) return outputs @@ -363,7 +450,7 @@ async def get_service_output( service = await catalog_client.get_service( ctx.app, ctx.user_id, service_key, service_version, ctx.product_name ) - service_output = ServiceOutputApiOut.from_service(service, output_key) + service_output = ServiceOutputApiOut.from_catalog_service(service, output_key) return service_output diff --git a/services/web/server/src/simcore_service_webserver/catalog_api_models.py b/services/web/server/src/simcore_service_webserver/catalog_api_models.py index b1462184d97..d78fd45cd2e 100644 --- a/services/web/server/src/simcore_service_webserver/catalog_api_models.py +++ b/services/web/server/src/simcore_service_webserver/catalog_api_models.py @@ -1,5 +1,6 @@ from typing import Any, Dict, Optional +import orjson from models_library.services import ( KEY_RE, VERSION_RE, @@ -14,58 +15,46 @@ ServiceKey = constr(regex=KEY_RE) ServiceVersion = constr(regex=VERSION_RE) - - ServiceInputKey = PropertyName ServiceOutputKey = PropertyName -# Using ApiOut/ApiIn suffix to distinguish API models vs internal domain model -# - internal domain models should be consise, non-verbose, minimal, correct -# - API models should be adapted to API user needs -# - warning with couplings! Add example to ensure that API model maintain -# backwards compatibility -# - schema samples could have multiple schemas to tests backwards compatibility -# -# TODO: reduce to a minimum returned input/output models (ask OM) -# -INPUT_SAMPLE = { - "displayOrder": 2, - "label": "Sleep Time", - "description": "Time to wait before completion", - "type": "number", - "defaultValue": 0, - "unit": "second", - "widget": {"type": "TextArea", "details": {"minHeight": 1}}, - "keyId": "input_2", - "unitLong": "seconds", - "unitShort": "sec", -} - -OUTPUT_SAMPLE = { - "displayOrder": 2, - "label": "Time Slept", - "description": "Time the service waited before completion", - "type": "number", - "unit": "second", - "unitLong": "seconds", - "unitShort": "sec", - "keyId": "output_2", -} - - # TODO: will be replaced by pynt functionality FAKE_UNIT_TO_FORMATS = {"SECOND": ("s", "seconds"), "METER": ("m", "meters")} +class CannotFormatUnitError(ValueError): + """ Either unit is not provided or is invalid or is not registered """ + + def get_formatted_unit(data: dict): - unit = data.get("unit") - if unit: - return FAKE_UNIT_TO_FORMATS.get(unit.upper(), [None, None]) - return [None, None] + try: + unit = data["unit"] + if unit is None: + raise CannotFormatUnitError() + return FAKE_UNIT_TO_FORMATS[unit.upper()] + except KeyError as err: + raise CannotFormatUnitError() from err + + +def json_dumps(v, *, default=None) -> str: + # orjson.dumps returns bytes, to match standard json.dumps we need to decode + return orjson.dumps(v, default=default).decode() -class _CommonApiExtension(BaseModel): +##### +# +# API models specifics to front-end needs +# +# Using WebApi prefix and In/Out suffix to distinguish web API models +# - internal domain models should be consise, non-verbose, minimal, correct +# - API models should be adapted to API user needs +# - warning with couplings! Add example to ensure that API model maintain +# backwards compatibility +# - schema samples could have multiple schemas to tests backwards compatibility +# +# TODO: reduce to a minimum returned input/output models (ask OM) +class _BaseCommonApiExtension(BaseModel): unit_long: Optional[str] = Field( None, description="Long name of the unit, if available" ) @@ -73,41 +62,94 @@ class _CommonApiExtension(BaseModel): None, description="Short name for the unit, if available" ) + class Config: + extra = Extra.forbid + alias_generator = snake_to_camel + json_loads = orjson.loads + json_dumps = json_dumps + -class ServiceInputApiOut(ServiceInput, _CommonApiExtension): +class ServiceInputApiOut(ServiceInput, _BaseCommonApiExtension): key_id: ServiceInputKey = Field( ..., description="Unique name identifier for this input" ) - class Config: - extra = Extra.forbid - alias_generator = snake_to_camel - schema_extra = {"example": INPUT_SAMPLE} + class Config(_BaseCommonApiExtension.Config): + schema_extra = { + "example": { + "displayOrder": 2, + "label": "Sleep Time", + "description": "Time to wait before completion", + "type": "number", + "defaultValue": 0, + "unit": "second", + "widget": {"type": "TextArea", "details": {"minHeight": 1}}, + "keyId": "input_2", + "unitLong": "seconds", + "unitShort": "sec", + } + } @classmethod - def from_service(cls, service: Dict[str, Any], input_key: ServiceInputKey): + def from_catalog_service(cls, service: Dict[str, Any], input_key: ServiceInputKey): data = service["inputs"][input_key] - ushort, ulong = get_formatted_unit(data) + try: + ushort, ulong = get_formatted_unit(data) + return cls(keyId=input_key, unitLong=ulong, unitShort=ushort, **data) + except CannotFormatUnitError: + return cls(keyId=input_key, **data) - return cls(keyId=input_key, unitLong=ulong, unitShort=ushort, **data) - -class ServiceOutputApiOut(ServiceOutput, _CommonApiExtension): +class ServiceOutputApiOut(ServiceOutput, _BaseCommonApiExtension): key_id: ServiceOutputKey = Field( ..., description="Unique name identifier for this input" ) - class Config: - extra = Extra.forbid - alias_generator = snake_to_camel - schema_extra = {"example": OUTPUT_SAMPLE} + class Config(_BaseCommonApiExtension.Config): + schema_extra = { + "example": { + "displayOrder": 2, + "label": "Time Slept", + "description": "Time the service waited before completion", + "type": "number", + "unit": "second", + "unitLong": "seconds", + "unitShort": "sec", + "keyId": "output_2", + } + } @classmethod - def from_service(cls, service: Dict[str, Any], output_key: ServiceOutputKey): + def from_catalog_service( + cls, service: Dict[str, Any], output_key: ServiceOutputKey + ): data = service["outputs"][output_key] - ushort, ulong = get_formatted_unit(data) - return cls(keyId=output_key, unitLong=ulong, unitShort=ushort, **data) + try: + # NOTE: prunes invalid field that might have remained in database + # TODO: remove from root and remove this cleanup operation + if "defaultValue" in data: + data.pop("defaultValue") + + ushort, ulong = get_formatted_unit(data) + return cls(keyId=output_key, unitLong=ulong, unitShort=ushort, **data) + except CannotFormatUnitError: + return cls(keyId=output_key, **data) + + +####################### +# Helper functions +# + + +def replace_service_input_outputs(service: Dict[str, Any], **export_options): + """ Thin wrapper to replace i/o ports in returned service model """ + # This is a fast solution until proper models are available for the web API + for input_key in service["inputs"]: + new_input = ServiceInputApiOut.from_catalog_service(service, input_key) + service["inputs"][input_key] = new_input.dict(**export_options) -# TODO: from models_library.api_schemas_catalog + for output_key in service["outputs"]: + new_output = ServiceOutputApiOut.from_catalog_service(service, output_key) + service["outputs"][output_key] = new_output.dict(**export_options) diff --git a/services/web/server/src/simcore_service_webserver/catalog_client.py b/services/web/server/src/simcore_service_webserver/catalog_client.py index d9f0e2bb4e9..389d7ffd570 100644 --- a/services/web/server/src/simcore_service_webserver/catalog_client.py +++ b/services/web/server/src/simcore_service_webserver/catalog_client.py @@ -132,3 +132,31 @@ async def get_service( async with session.get(url, headers={X_PRODUCT_NAME_HEADER: product_name}) as resp: resp.raise_for_status() # FIXME: error handling for session and response exceptions return await resp.json() + + +async def update_service( + app: web.Application, + user_id: int, + service_key: str, + service_version: str, + product_name: str, + update_data: Dict[str, Any], +) -> Dict[str, Any]: + url = ( + URL(app[KCATALOG_ORIGIN]) + .with_path( + app[KCATALOG_VERSION_PREFIX] + + f"/services/{urllib.parse.quote_plus(service_key)}/{service_version}" + ) + .with_query( + { + "user_id": user_id, + } + ) + ) + session = get_client_session(app) + async with session.patch( + url, headers={X_PRODUCT_NAME_HEADER: product_name}, json=update_data + ) as resp: + resp.raise_for_status() # FIXME: error handling for session and response exceptions + return await resp.json() diff --git a/services/web/server/src/simcore_service_webserver/diagnostics_core.py b/services/web/server/src/simcore_service_webserver/diagnostics_core.py index 5706b2cf8bc..3683827b62b 100644 --- a/services/web/server/src/simcore_service_webserver/diagnostics_core.py +++ b/services/web/server/src/simcore_service_webserver/diagnostics_core.py @@ -5,7 +5,6 @@ import attr from aiohttp import web - from servicelib.incidents import LimitedOrderedStack, SlowCallback log = logging.getLogger(__name__) @@ -34,8 +33,8 @@ def max_delay(self) -> float: @attr.s(auto_attribs=True) class DelayWindowProbe: """ - Collects a window of delay samples that satisfy - some conditions (see observe code) + Collects a window of delay samples that satisfy + some conditions (see observe code) """ min_threshold_secs: int = 0.3 @@ -56,27 +55,33 @@ def value(self) -> float: return 0 +logged_once = False + + def is_sensing_enabled(app: web.Application): - """ Diagnostics will not activate sensing inmediatly but after some - time since the app started + """Diagnostics will not activate sensing inmediatly but after some + time since the app started """ + global logged_once # pylint: disable=global-statement + time_elapsed_since_setup = time.time() - app[kPLUGIN_START_TIME] enabled = time_elapsed_since_setup > app[kSTART_SENSING_DELAY_SECS] - if enabled: + if enabled and not logged_once: log.debug( "Diagnostics starts sensing after waiting %3.2f secs [> %3.2f secs] since submodule init", time_elapsed_since_setup, app[kSTART_SENSING_DELAY_SECS], ) + logged_once = True return enabled def assert_healthy_app(app: web.Application) -> None: - """ Diagnostics function that determins whether - current application is healthy based on incidents - occured up to now + """Diagnostics function that determins whether + current application is healthy based on incidents + occured up to now - raises DiagnosticError if any incient detected + raises DiagnosticError if any incient detected """ # CRITERIA 1: incidents: Optional[IncidentsRegistry] = app.get(kINCIDENTS_REGISTRY) @@ -98,7 +103,8 @@ def assert_healthy_app(app: web.Application) -> None: if max_delay > max_delay_allowed: msg = "{:3.1f} secs delay [at most {:3.1f} secs allowed]".format( - max_delay, max_delay_allowed, + max_delay, + max_delay_allowed, ) raise HealthError(msg) diff --git a/services/web/server/tests/unit/isolated/test_catalog_api_models.py b/services/web/server/tests/unit/isolated/test_catalog_api_models.py index f097754bc97..5c38bebfacc 100644 --- a/services/web/server/tests/unit/isolated/test_catalog_api_models.py +++ b/services/web/server/tests/unit/isolated/test_catalog_api_models.py @@ -3,12 +3,16 @@ # pylint:disable=no-name-in-module +import json +from copy import deepcopy from pprint import pformat import pytest +from simcore_service_webserver.catalog_api_handlers import RESPONSE_MODEL_POLICY from simcore_service_webserver.catalog_api_models import ( ServiceInputApiOut, ServiceOutputApiOut, + replace_service_input_outputs, ) @@ -24,3 +28,80 @@ def test_webserver_catalog_api_models(model_cls, model_cls_examples): print(name, ":", pformat(example)) model_instance = model_cls(**example) assert model_instance, f"Failed with {name}" + + # tests export policy w/o errors + data = model_instance.dict(**RESPONSE_MODEL_POLICY) + assert model_cls(**data) == model_instance + + +def test_from_catalog_to_webapi_service(): + + # Taken from services/catalog/src/simcore_service_catalog/models/schemas/services.py on Feb.2021 + catalog_service = { + "name": "File Picker", + "thumbnail": None, + "description": "File Picker", + "classifiers": [], + "quality": {}, + "access_rights": { + 1: {"execute_access": True, "write_access": False}, + 4: {"execute_access": True, "write_access": True}, + }, + "key": "simcore/services/frontend/file-picker", + "version": "1.0.0", + "integration-version": None, + "type": "dynamic", + "badges": None, + "authors": [ + { + "name": "Odei Maiz", + "email": "maiz@itis.swiss", + "affiliation": None, + } + ], + "contact": "maiz@itis.swiss", + "inputs": { + "uno": { + "displayOrder": 0, + "label": "num", + "description": "Chosen int", + "type": "number", + "defaultValue": 33, + } + }, + "outputs": { + "outFile": { + "displayOrder": 0, + "label": "File", + "unit": "second", + "description": "Chosen File", + "type": "data:*/*", + "fileToKeyMap": None, + "defaultValue": None, # <<<<< --- on purpose to emulate old datasets with this invalid field in db + "widget": None, + } + }, + "owner": "maiz@itis.swiss", + } + + webapi_service = deepcopy(catalog_service) + replace_service_input_outputs(webapi_service, **RESPONSE_MODEL_POLICY) + + print(json.dumps(webapi_service, indent=2)) + + # If units are defined, I want unitShort and unitLong + assert webapi_service["outputs"]["outFile"]["unit"] is "second" + assert webapi_service["outputs"]["outFile"]["unitShort"] is "s" + assert webapi_service["outputs"]["outFile"]["unitLong"] is "seconds" + + # if units are NOT defined => must NOT set Long/Short units + fields = set(webapi_service["inputs"]["uno"].keys()) + assert not fields.intersection({"unit", "unitShort", "unitLong"}) + + # Trimmed! + assert "defaultValue" not in webapi_service["outputs"]["outFile"] + + # All None are trimmed + for field, value in catalog_service["outputs"]["outFile"].items(): + if field != "defaultValue": + assert webapi_service["outputs"]["outFile"][field] == value diff --git a/tests/public-api/test_jobs_api.py b/tests/public-api/test_jobs_api.py index 6667f07e897..29d4b55c0e3 100644 --- a/tests/public-api/test_jobs_api.py +++ b/tests/public-api/test_jobs_api.py @@ -152,8 +152,6 @@ def test_run_job( assert status.started_at < status.stopped_at # check solver outputs - # FIXME: client auto-generator does not support polymorphism in responses(i.e response) - # https://openapi-generator.tech/docs/generators/python-legacy#schema-support-feature outputs: JobOutputs = solvers_api.get_job_outputs(solver.id, solver.version, job.id) assert isinstance(outputs, JobOutputs) assert outputs.job_id == job.id @@ -175,8 +173,13 @@ def test_run_job( # } output_file = outputs.results["output_1"] number = outputs.results["output_2"] - assert isinstance(output_file, File) - assert isinstance(number, float) + + if status.state == "success": + assert isinstance(output_file, File) + assert isinstance(number, float) + else: + # one of them is not finished + assert output_file is None or number is None # file exists in the cloud # FIXME: when director-v2 is connected instead of fake