From 03b15259628e9151398ddde21412f04e33d38705 Mon Sep 17 00:00:00 2001
From: Sylvain <35365065+sanderegg@users.noreply.github.com>
Date: Mon, 15 Jun 2020 23:06:56 +0200
Subject: [PATCH 01/43] add traefik endpoint to api-gateway (#1555)
added mockserver to simulate apiserver service
fix e2e test: check for api-gateway
---
.../src/pytest_simcore/traefik_service.py | 17 ++++++----
.../src/simcore_service_director/producer.py | 2 +-
services/docker-compose.local.yml | 6 ++--
services/docker-compose.yml | 32 ++++++++++++++++---
tests/swarm-deploy/test_swarm_runs.py | 1 +
5 files changed, 44 insertions(+), 14 deletions(-)
diff --git a/packages/pytest-simcore/src/pytest_simcore/traefik_service.py b/packages/pytest-simcore/src/pytest_simcore/traefik_service.py
index 93548fdf9af..3e9ee90d987 100644
--- a/packages/pytest-simcore/src/pytest_simcore/traefik_service.py
+++ b/packages/pytest-simcore/src/pytest_simcore/traefik_service.py
@@ -15,22 +15,23 @@
@pytest.fixture(scope="module")
-def traefik_endpoints(docker_stack: Dict, devel_environ: Dict) -> Tuple[URL, URL]:
+def traefik_endpoints(docker_stack: Dict, devel_environ: Dict) -> Tuple[URL, URL, URL]:
"""get the endpoint for the given simcore_service.
NOTE: simcore_service defined as a parametrization
"""
assert "simcore_traefik" in docker_stack["services"]
- api_endpoint = f"127.0.0.1:{get_service_published_port('traefik', 8080)}"
+ traefik_api_endpoint = f"127.0.0.1:{get_service_published_port('traefik', 8080)}"
webserver_endpoint = f"127.0.0.1:{get_service_published_port('traefik', 80)}"
- return (URL(f"http://{api_endpoint}"), URL(f"http://{webserver_endpoint}"))
+ apiserver_endpoint = f"127.0.0.1:{get_service_published_port('traefik', 10081)}"
+ return (URL(f"http://{traefik_api_endpoint}"), URL(f"http://{webserver_endpoint}"), URL(f"http://{apiserver_endpoint}"))
@pytest.fixture(scope="function")
async def traefik_service(
- loop, traefik_endpoints: Tuple[URL, URL], docker_stack: Dict
-) -> URL:
- api_endpoint, webserver_endpoint = traefik_endpoints
- await wait_till_traefik_responsive(api_endpoint)
+ loop, traefik_endpoints: Tuple[URL, URL, URL], docker_stack: Dict
+) -> Tuple[URL, URL, URL]:
+ traefik_api_endpoint, webserver_endpoint, apiserver_endpoint = traefik_endpoints
+ await wait_till_traefik_responsive(traefik_api_endpoint)
yield traefik_endpoints
@@ -47,3 +48,5 @@ async def wait_till_traefik_responsive(api_endpoint: URL):
assert "service" in proxied_service
if "webserver" in proxied_service["service"]:
assert proxied_service["status"] == "enabled"
+ elif "api-gateway" in proxied_service["service"]:
+ assert proxied_service["status"] == "enabled"
diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py
index e02dc814c11..70247f8b743 100644
--- a/services/director/src/simcore_service_director/producer.py
+++ b/services/director/src/simcore_service_director/producer.py
@@ -176,7 +176,7 @@ async def _create_docker_service_params(
f"traefik.http.routers.{service_name}.rule": f"PathPrefix(`/x/{node_uuid}`)",
f"traefik.http.routers.{service_name}.entrypoints": "http",
f"traefik.http.routers.{service_name}.priority": "10",
- f"traefik.http.routers.{service_name}.middlewares": "gzip@docker",
+ f"traefik.http.routers.{service_name}.middlewares": f"{config.SWARM_STACK_NAME}_gzip@docker",
},
"networks": [internal_network_id] if internal_network_id else [],
}
diff --git a/services/docker-compose.local.yml b/services/docker-compose.local.yml
index b99946792ed..9b1db0ca2ef 100644
--- a/services/docker-compose.local.yml
+++ b/services/docker-compose.local.yml
@@ -83,6 +83,8 @@ services:
ports:
- target: 80
published: 9081
+ - target: 10081
+ published: 10081
- target: 8080
published: 8080
deploy:
@@ -94,7 +96,7 @@ services:
- traefik.http.routers.${SWARM_STACK_NAME}_api_internal.service=api@internal
- traefik.http.routers.${SWARM_STACK_NAME}_api_internal.rule=PathPrefix(`/dashboard`) || PathPrefix(`/api`)
- traefik.http.routers.${SWARM_STACK_NAME}_api_internal.entrypoints=traefik_monitor
- - traefik.http.routers.${SWARM_STACK_NAME}_api_internal.middlewares=gzip@docker
+ - traefik.http.routers.${SWARM_STACK_NAME}_api_internal.middlewares=${SWARM_STACK_NAME}_gzip@docker
- traefik.http.services.${SWARM_STACK_NAME}_api_internal.loadbalancer.server.port=8080
whoami:
@@ -108,4 +110,4 @@ services:
- traefik.http.services.${SWARM_STACK_NAME}_whoami.loadbalancer.server.port=80
- traefik.http.routers.${SWARM_STACK_NAME}_whoami.rule=PathPrefix(`/whoami`)
- traefik.http.routers.${SWARM_STACK_NAME}_whoami.entrypoints=traefik_monitor
- - traefik.http.routers.${SWARM_STACK_NAME}_whoami.middlewares=gzip@docker
+ - traefik.http.routers.${SWARM_STACK_NAME}_whoami.middlewares=${SWARM_STACK_NAME}_gzip@docker
diff --git a/services/docker-compose.yml b/services/docker-compose.yml
index 78d68183cfa..aea850e08b9 100644
--- a/services/docker-compose.yml
+++ b/services/docker-compose.yml
@@ -1,5 +1,27 @@
version: "3.7"
services:
+ api-gateway:
+ # get info here: https://www.mock-server.com
+ image: mockserver/mockserver
+ init: true
+ environment:
+ - MOCKSERVER_LIVENESS_HTTP_GET_PATH=/live
+ deploy:
+ labels:
+ - io.simcore.zone=${TRAEFIK_SIMCORE_ZONE}
+ # gzip compression
+ - traefik.http.middlewares.${SWARM_STACK_NAME}_gzip.compress=true
+ # ssl header necessary so that socket.io upgrades correctly from polling to websocket mode. the middleware must be attached to the right connection.
+ - traefik.http.middlewares.${SWARM_STACK_NAME}_sslheader.headers.customrequestheaders.X-Forwarded-Proto=http
+ - traefik.enable=true
+ - traefik.http.services.${SWARM_STACK_NAME}_api-gateway.loadbalancer.server.port=1080
+ - traefik.http.routers.${SWARM_STACK_NAME}_api-gateway.rule=hostregexp(`{host:.+}`)
+ - traefik.http.routers.${SWARM_STACK_NAME}_api-gateway.entrypoints=simcore_api
+ - traefik.http.routers.${SWARM_STACK_NAME}_api-gateway.priority=1
+ - traefik.http.routers.${SWARM_STACK_NAME}_api-gateway.middlewares=${SWARM_STACK_NAME}_gzip@docker, ${SWARM_STACK_NAME}_sslheader
+ networks:
+ - default
+
catalog:
image: ${DOCKER_REGISTRY:-itisfoundation}/catalog:${DOCKER_IMAGE_TAG:-latest}
init: true
@@ -88,15 +110,15 @@ services:
labels:
- io.simcore.zone=${TRAEFIK_SIMCORE_ZONE}
# gzip compression
- - traefik.http.middlewares.gzip.compress=true
+ - traefik.http.middlewares.${SWARM_STACK_NAME}_gzip.compress=true
# ssl header necessary so that socket.io upgrades correctly from polling to websocket mode. the middleware must be attached to the right connection.
- - traefik.http.middlewares.simcore_sslheader.headers.customrequestheaders.X-Forwarded-Proto=http
+ - traefik.http.middlewares.${SWARM_STACK_NAME}_sslheader.headers.customrequestheaders.X-Forwarded-Proto=http
- traefik.enable=true
- traefik.http.services.${SWARM_STACK_NAME}_webserver.loadbalancer.server.port=8080
- traefik.http.routers.${SWARM_STACK_NAME}_webserver.rule=hostregexp(`{host:.+}`)
- traefik.http.routers.${SWARM_STACK_NAME}_webserver.entrypoints=http
- traefik.http.routers.${SWARM_STACK_NAME}_webserver.priority=1
- - traefik.http.routers.${SWARM_STACK_NAME}_webserver.middlewares=gzip@docker, simcore_sslheader@docker
+ - traefik.http.routers.${SWARM_STACK_NAME}_webserver.middlewares=${SWARM_STACK_NAME}_gzip@docker, ${SWARM_STACK_NAME}_sslheader
networks:
- default
- interactive_services_subnet
@@ -227,7 +249,7 @@ services:
- default
traefik:
- image: traefik:v2.2.0
+ image: traefik:v2.2.1
init: true
command:
- "--api=true"
@@ -241,6 +263,8 @@ services:
- "--metrics.prometheus.entryPoint=metrics"
- "--entryPoints.http.address=:80"
- "--entryPoints.http.forwardedHeaders.insecure"
+ - "--entryPoints.simcore_api.address=:10081"
+ - "--entryPoints.simcore_api.forwardedHeaders.insecure"
- "--entryPoints.traefik_monitor.address=:8080"
- "--entryPoints.traefik_monitor.forwardedHeaders.insecure"
- "--providers.docker.endpoint=unix:///var/run/docker.sock"
diff --git a/tests/swarm-deploy/test_swarm_runs.py b/tests/swarm-deploy/test_swarm_runs.py
index 6bffe2145ba..d1025f4632e 100644
--- a/tests/swarm-deploy/test_swarm_runs.py
+++ b/tests/swarm-deploy/test_swarm_runs.py
@@ -27,6 +27,7 @@
docker_compose_service_names = [
+ "api-gateway",
"catalog",
"director",
"sidecar",
From 680eb5952e8ddacc3cae7d1555253680a9153290 Mon Sep 17 00:00:00 2001
From: Sylvain <35365065+sanderegg@users.noreply.github.com>
Date: Wed, 17 Jun 2020 08:58:59 +0200
Subject: [PATCH 02/43] added simcore_webserver_service in pytest simcore
package (#1563)
ensure the database is filled up
---
.../simcore_webserver_service.py | 43 +++++++++++++++++++
tests/swarm-deploy/conftest.py | 9 +++-
tests/swarm-deploy/test_swarm_runs.py | 18 +++++++-
3 files changed, 68 insertions(+), 2 deletions(-)
create mode 100644 packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py
diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py b/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py
new file mode 100644
index 00000000000..0d745698ac9
--- /dev/null
+++ b/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_service.py
@@ -0,0 +1,43 @@
+# pylint:disable=unused-variable
+# pylint:disable=unused-argument
+# pylint:disable=redefined-outer-name
+
+from typing import Dict
+
+import aiohttp
+import pytest
+import tenacity
+from yarl import URL
+
+from servicelib.minio_utils import MinioRetryPolicyUponInitialization
+
+from .helpers.utils_docker import get_service_published_port
+
+
+@pytest.fixture(scope="module")
+def webserver_endpoint(docker_stack: Dict, devel_environ: Dict) -> URL:
+ assert "simcore_webserver" in docker_stack["services"]
+ endpoint = f"127.0.0.1:{get_service_published_port('webserver', '8080')}"
+
+ return URL(f"http://{endpoint}")
+
+
+@pytest.fixture(scope="function")
+async def webserver_service(webserver_endpoint: URL, docker_stack: Dict) -> URL:
+ await wait_till_webserver_responsive(webserver_endpoint)
+
+ yield webserver_endpoint
+
+
+# HELPERS --
+
+# TODO: this can be used by ANY of the simcore services!
+@tenacity.retry(**MinioRetryPolicyUponInitialization().kwargs)
+async def wait_till_webserver_responsive(webserver_endpoint: URL):
+ async with aiohttp.ClientSession() as session:
+ async with session.get(webserver_endpoint.with_path("/v0/")) as resp:
+ assert resp.status == 200
+ data = await resp.json()
+ assert "data" in data
+ assert "status" in data["data"]
+ assert data["data"]["status"] == "SERVICE_RUNNING"
diff --git a/tests/swarm-deploy/conftest.py b/tests/swarm-deploy/conftest.py
index 6b91f2d17dc..8068ff98ae2 100644
--- a/tests/swarm-deploy/conftest.py
+++ b/tests/swarm-deploy/conftest.py
@@ -19,7 +19,8 @@
"pytest_simcore.rabbit_service",
"pytest_simcore.postgres_service",
"pytest_simcore.minio_service",
- "pytest_simcore.traefik_service"
+ "pytest_simcore.traefik_service",
+ "pytest_simcore.simcore_webserver_service",
]
log = logging.getLogger(__name__)
@@ -44,8 +45,14 @@ def prepare_all_services(
return services
+@pytest.fixture(scope="module")
+def create_db_on_start(devel_environ: Dict[str, str]):
+ devel_environ["WEBSERVER_DB_INITTABLES"] = "1"
+
+
@pytest.fixture(scope="module")
def make_up_prod(
+ create_db_on_start,
prepare_all_services: Dict,
simcore_docker_compose: Dict,
ops_docker_compose: Dict,
diff --git a/tests/swarm-deploy/test_swarm_runs.py b/tests/swarm-deploy/test_swarm_runs.py
index d1025f4632e..9b5208117e7 100644
--- a/tests/swarm-deploy/test_swarm_runs.py
+++ b/tests/swarm-deploy/test_swarm_runs.py
@@ -12,6 +12,7 @@
from typing import Dict, List
import pytest
+import tenacity
from docker import DockerClient
from docker.models.services import Service
from yarl import URL
@@ -146,10 +147,25 @@ def test_core_service_running(
)
+RETRY_WAIT_SECS = 2
+RETRY_COUNT = 20
+
+
def test_check_serve_root(loop, make_up_prod: Dict, traefik_service: URL):
+
req = urllib.request.Request("http://127.0.0.1:9081/")
try:
- resp = urllib.request.urlopen(req)
+ # it takes a bit of time until traefik sets up the correct proxy and the webserver takes time to start
+ @tenacity.retry(
+ wait=tenacity.wait_fixed(RETRY_WAIT_SECS),
+ stop=tenacity.stop_after_attempt(RETRY_COUNT),
+ before_sleep=tenacity.before_sleep_log(logger, logging.INFO),
+ )
+ def check_root(request):
+ resp = urllib.request.urlopen(req)
+ return resp
+
+ resp = check_root(req)
charset = resp.info().get_content_charset()
content = resp.read().decode(charset)
# TODO: serch osparc-simcore commit id e.g. 'osparc-simcore v817d82e'
From 821c8e61a5d23e4480781db669b19217c8ad220f Mon Sep 17 00:00:00 2001
From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com>
Date: Wed, 17 Jun 2020 13:38:44 +0200
Subject: [PATCH 03/43] Is1269/api-server upgrade (#1475)
- new api-server refactored
- api-server connected to simcore-stack
---
.codeclimate.yml | 1 +
.env-devel | 1 +
.github/CODEOWNERS | 2 +-
.github/workflows/ci-testing-deploy.yml | 18 +-
.pylintrc | 2 +-
.travis.yml | 16 +-
Makefile | 17 +-
.../{api-gateway.bash => api-server.bash} | 6 +-
ci/helpers/install_pylint.bash | 3 +-
.../{api-gateway.bash => api-server.bash} | 10 +-
.../src/simcore_postgres_database/cli.py | 5 +-
.../models/api_keys.py | 2 +-
packages/postgres-database/tests/conftest.py | 12 +-
.../tests/test_delete_projects_and_users.py | 52 +++
.../postgres-database/tests/test_groups.py | 3 +-
.../src/pytest_simcore/traefik_service.py | 2 +-
scripts/common.Makefile | 4 +-
scripts/demo/create_portal_markdown.py | 8 +-
...es-{{ deploy }}-{{ datestamp }}.md.jinja2 | 22 ++
scripts/openapi-generator-cli.bash | 3 +
services/api-gateway/.cookiecutterrc | 20 --
services/api-gateway/.env-devel | 13 -
services/api-gateway/Makefile | 63 ----
services/api-gateway/VERSION | 1 -
services/api-gateway/client | 1 -
services/api-gateway/docker/entrypoint.sh | 85 -----
services/api-gateway/requirements/_base.in | 14 -
services/api-gateway/requirements/_base.txt | 48 ---
services/api-gateway/requirements/_test.txt | 84 -----
services/api-gateway/setup.cfg | 10 -
.../simcore_service_api_gateway/__init__.py | 4 -
.../simcore_service_api_gateway/__main__.py | 29 --
.../application.py | 106 -------
.../src/simcore_service_api_gateway/auth.py | 94 ------
.../auth_security.py | 89 ------
.../simcore_service_api_gateway/crud_users.py | 30 --
.../src/simcore_service_api_gateway/db.py | 98 ------
.../endpoints_auth.py | 57 ----
.../endpoints_check.py | 22 --
.../endpoints_studies.py | 53 ----
.../endpoints_user.py | 16 -
.../src/simcore_service_api_gateway/main.py | 61 ----
.../simcore_service_api_gateway/schemas.py | 24 --
.../simcore_service_api_gateway/settings.py | 69 ----
.../utils/fastapi_shortcuts.py | 32 --
services/api-gateway/tests/unit/conftest.py | 40 ---
.../tests/unit/test_auth_security.py | 7 -
.../tests/unit/test_endpoints_check.py | 37 ---
.../api-gateway/tests/unit/test_settings.py | 20 --
services/api-server/.env-devel | 24 ++
services/api-server/.gitignore | 4 +
.../{api-gateway => api-server}/Dockerfile | 50 +--
services/api-server/Makefile | 141 +++++++++
.../{api-gateway => api-server}/README.md | 25 +-
services/api-server/VERSION | 1 +
.../docker/boot.sh | 17 +-
services/api-server/docker/entrypoint.sh | 76 +++++
.../docker/healthcheck.py | 1 -
services/api-server/openapi.json | 296 ++++++++++++++++++
.../requirements/Makefile | 0
services/api-server/requirements/_base.in | 22 ++
services/api-server/requirements/_base.txt | 65 ++++
.../requirements/_test.in | 12 +-
services/api-server/requirements/_test.txt | 107 +++++++
.../requirements/ci.txt | 3 +-
.../requirements/dev.txt | 3 +-
.../requirements/prod.txt | 5 +-
.../sandbox/_test_client_sdk.py} | 44 +--
services/api-server/sandbox/_test_schemas.py | 21 ++
services/api-server/sandbox/api-key-auth.py | 38 +++
services/api-server/sandbox/get_app_state.py | 24 ++
.../api-server/sandbox/model_conversions.py | 91 ++++++
.../api-server/sandbox/pydantic-settings.py | 51 +++
services/api-server/sandbox/simple_app.py | 45 +++
services/api-server/setup.cfg | 7 +
services/{api-gateway => api-server}/setup.py | 6 +-
.../simcore_service_api_server/__init__.py | 4 +
.../simcore_service_api_server/__main__.py | 35 +++
.../__version__.py | 4 +-
.../api}/__init__.py | 0
.../api/dependencies/__init__.py} | 0
.../api/dependencies/auth_api_key.py | 46 +++
.../api/dependencies/auth_basic.py | 48 +++
.../api/dependencies/auth_oath2.py | 100 ++++++
.../api/dependencies/authentication.py | 3 +
.../api/dependencies/database.py | 21 ++
.../api/dependencies/webserver.py | 55 ++++
.../api/errors/__init__.py} | 0
.../api/errors/http_error.py | 7 +
.../api/errors/validation_error.py | 26 ++
.../simcore_service_api_server/api/root.py | 15 +
.../api/routes/__init__.py | 0
.../api/routes/authentication/__init__.py | 0
.../api/routes/authentication/api_key.py | 15 +
.../api/routes/authentication/oauth2.py | 77 +++++
.../api/routes/health.py | 8 +
.../api/routes/meta.py | 15 +
.../api/routes/studies.py | 50 +++
.../api/routes/users.py | 64 ++++
.../core/__init__.py | 0
.../core/application.py | 61 ++++
.../simcore_service_api_server/core/errors.py | 0
.../simcore_service_api_server/core/events.py | 40 +++
.../core/openapi.py | 65 ++++
.../simcore_service_api_server/core/redoc.py | 51 +++
.../core/settings.py | 107 +++++++
.../simcore_service_api_server/db/__init__.py | 0
.../simcore_service_api_server/db/errors.py | 2 +
.../simcore_service_api_server/db/events.py | 54 ++++
.../db/repositories/__init__.py | 0
.../db/repositories/api_keys.py | 38 +++
.../db/repositories/base.py | 15 +
.../db/repositories/users.py | 107 +++++++
.../simcore_service_api_server/db/tables.py | 16 +
.../models/__init__.py | 0
.../models/domain/__init__.py | 0
.../models/domain/api_keys.py | 15 +
.../models/domain/groups.py | 15 +
.../models/domain/users.py | 32 ++
.../models/schemas/__init__.py | 0
.../models/schemas/api_keys.py | 12 +
.../models/schemas/meta.py | 27 ++
.../models/schemas/profiles.py | 38 +++
.../models/schemas/tokens.py | 25 ++
.../models/schemas/users.py | 12 +
.../services/__init__.py | 0
.../services/jwt.py | 66 ++++
.../services}/remote_debug.py | 12 +-
.../services/security.py} | 20 +-
.../services/serialization.py | 19 ++
.../services/webserver.py | 52 +++
.../api-server/tests/integration/.gitkeep | 0
services/api-server/tests/unit/_helpers.py | 65 ++++
services/api-server/tests/unit/conftest.py | 246 +++++++++++++++
.../api-server/tests/unit/test_api_meta.py | 19 ++
.../api-server/tests/unit/test_api_user.py | 51 +++
.../tests/unit/test_code_syntax.py | 4 +-
.../tests/unit/test_fake_generator.py | 18 ++
services/api-server/tests/unit/test_jwt.py | 35 +++
.../api-server/tests/unit/test_security.py | 14 +
.../api-server/tests/unit/test_settings.py | 36 +++
.../tests/utils/docker-compose.yml | 2 +-
services/api-server/tests/utils/init-pg.py | 108 +++++++
.../tools/gen_api.py | 2 +-
.../tools/templates/cruds.py.jinja2 | 0
.../tools/templates/orm.py.jinja2 | 0
.../resource_custom_methods.py.jinja2 | 3 +-
.../resource_standard_methods.py.jinja2 | 6 +-
.../tools/templates/schemas.py.jinja2 | 0
.../tools/templates/test_endpoints.py.jinja2 | 2 +-
services/catalog/Makefile | 2 +-
services/catalog/docker/healthcheck.py | 1 -
.../src/simcore_service_catalog/__main__.py | 2 +-
.../src/simcore_service_catalog/config.py | 6 +-
.../simcore_service_catalog/endpoints/dags.py | 2 +-
.../src/simcore_service_catalog/main.py | 3 +-
.../src/simcore_service_catalog/orm.py | 1 -
.../schemas/schemas_dags.py | 4 +-
.../simcore_service_catalog/utils/helpers.py | 1 -
.../utils/remote_debug.py | 13 +-
services/catalog/tests/unit/test_package.py | 3 +-
services/docker-compose-build.yml | 14 +-
services/docker-compose.devel.yml | 8 +
services/docker-compose.local.yml | 5 +
services/docker-compose.yml | 23 +-
services/sidecar/Dockerfile | 2 +-
.../simcore_service_sidecar/__version__.py | 2 +-
.../src/simcore_service_webserver/session.py | 2 +-
.../server/tests/unit/with_dbs/test_login.py | 67 +++-
tests/swarm-deploy/test_swarm_runs.py | 2 +-
170 files changed, 3440 insertions(+), 1423 deletions(-)
rename ci/github/unit-testing/{api-gateway.bash => api-server.bash} (69%)
rename ci/travis/unit-testing/{api-gateway.bash => api-server.bash} (82%)
create mode 100644 scripts/demo/templates/invitation-codes-{{ deploy }}-{{ datestamp }}.md.jinja2
delete mode 100644 services/api-gateway/.cookiecutterrc
delete mode 100644 services/api-gateway/.env-devel
delete mode 100644 services/api-gateway/Makefile
delete mode 100644 services/api-gateway/VERSION
delete mode 160000 services/api-gateway/client
delete mode 100755 services/api-gateway/docker/entrypoint.sh
delete mode 100644 services/api-gateway/requirements/_base.in
delete mode 100644 services/api-gateway/requirements/_base.txt
delete mode 100644 services/api-gateway/requirements/_test.txt
delete mode 100644 services/api-gateway/setup.cfg
delete mode 100644 services/api-gateway/src/simcore_service_api_gateway/__init__.py
delete mode 100644 services/api-gateway/src/simcore_service_api_gateway/__main__.py
delete mode 100644 services/api-gateway/src/simcore_service_api_gateway/application.py
delete mode 100644 services/api-gateway/src/simcore_service_api_gateway/auth.py
delete mode 100644 services/api-gateway/src/simcore_service_api_gateway/auth_security.py
delete mode 100644 services/api-gateway/src/simcore_service_api_gateway/crud_users.py
delete mode 100644 services/api-gateway/src/simcore_service_api_gateway/db.py
delete mode 100644 services/api-gateway/src/simcore_service_api_gateway/endpoints_auth.py
delete mode 100644 services/api-gateway/src/simcore_service_api_gateway/endpoints_check.py
delete mode 100644 services/api-gateway/src/simcore_service_api_gateway/endpoints_studies.py
delete mode 100644 services/api-gateway/src/simcore_service_api_gateway/endpoints_user.py
delete mode 100644 services/api-gateway/src/simcore_service_api_gateway/main.py
delete mode 100644 services/api-gateway/src/simcore_service_api_gateway/schemas.py
delete mode 100644 services/api-gateway/src/simcore_service_api_gateway/settings.py
delete mode 100644 services/api-gateway/src/simcore_service_api_gateway/utils/fastapi_shortcuts.py
delete mode 100644 services/api-gateway/tests/unit/conftest.py
delete mode 100644 services/api-gateway/tests/unit/test_auth_security.py
delete mode 100644 services/api-gateway/tests/unit/test_endpoints_check.py
delete mode 100644 services/api-gateway/tests/unit/test_settings.py
create mode 100644 services/api-server/.env-devel
create mode 100644 services/api-server/.gitignore
rename services/{api-gateway => api-server}/Dockerfile (67%)
create mode 100644 services/api-server/Makefile
rename services/{api-gateway => api-server}/README.md (53%)
create mode 100644 services/api-server/VERSION
rename services/{api-gateway => api-server}/docker/boot.sh (61%)
create mode 100755 services/api-server/docker/entrypoint.sh
rename services/{api-gateway => api-server}/docker/healthcheck.py (99%)
create mode 100644 services/api-server/openapi.json
rename services/{api-gateway => api-server}/requirements/Makefile (100%)
create mode 100644 services/api-server/requirements/_base.in
create mode 100644 services/api-server/requirements/_base.txt
rename services/{api-gateway => api-server}/requirements/_test.in (66%)
create mode 100644 services/api-server/requirements/_test.txt
rename services/{api-gateway => api-server}/requirements/ci.txt (81%)
rename services/{api-gateway => api-server}/requirements/dev.txt (90%)
rename services/{api-gateway => api-server}/requirements/prod.txt (55%)
rename services/{api-gateway/tests/unit/test_client_sdk.py => api-server/sandbox/_test_client_sdk.py} (91%)
create mode 100644 services/api-server/sandbox/_test_schemas.py
create mode 100644 services/api-server/sandbox/api-key-auth.py
create mode 100644 services/api-server/sandbox/get_app_state.py
create mode 100644 services/api-server/sandbox/model_conversions.py
create mode 100644 services/api-server/sandbox/pydantic-settings.py
create mode 100644 services/api-server/sandbox/simple_app.py
create mode 100644 services/api-server/setup.cfg
rename services/{api-gateway => api-server}/setup.py (88%)
create mode 100644 services/api-server/src/simcore_service_api_server/__init__.py
create mode 100644 services/api-server/src/simcore_service_api_server/__main__.py
rename services/{api-gateway/src/simcore_service_api_gateway => api-server/src/simcore_service_api_server}/__version__.py (68%)
rename services/{api-gateway/src/simcore_service_api_gateway/utils => api-server/src/simcore_service_api_server/api}/__init__.py (100%)
rename services/{api-gateway/tests/integration/.gitkeep => api-server/src/simcore_service_api_server/api/dependencies/__init__.py} (100%)
create mode 100644 services/api-server/src/simcore_service_api_server/api/dependencies/auth_api_key.py
create mode 100644 services/api-server/src/simcore_service_api_server/api/dependencies/auth_basic.py
create mode 100644 services/api-server/src/simcore_service_api_server/api/dependencies/auth_oath2.py
create mode 100644 services/api-server/src/simcore_service_api_server/api/dependencies/authentication.py
create mode 100644 services/api-server/src/simcore_service_api_server/api/dependencies/database.py
create mode 100644 services/api-server/src/simcore_service_api_server/api/dependencies/webserver.py
rename services/{api-gateway/tools/templates/schemas.py.jinja2 => api-server/src/simcore_service_api_server/api/errors/__init__.py} (100%)
create mode 100644 services/api-server/src/simcore_service_api_server/api/errors/http_error.py
create mode 100644 services/api-server/src/simcore_service_api_server/api/errors/validation_error.py
create mode 100644 services/api-server/src/simcore_service_api_server/api/root.py
create mode 100644 services/api-server/src/simcore_service_api_server/api/routes/__init__.py
create mode 100644 services/api-server/src/simcore_service_api_server/api/routes/authentication/__init__.py
create mode 100644 services/api-server/src/simcore_service_api_server/api/routes/authentication/api_key.py
create mode 100644 services/api-server/src/simcore_service_api_server/api/routes/authentication/oauth2.py
create mode 100644 services/api-server/src/simcore_service_api_server/api/routes/health.py
create mode 100644 services/api-server/src/simcore_service_api_server/api/routes/meta.py
create mode 100644 services/api-server/src/simcore_service_api_server/api/routes/studies.py
create mode 100644 services/api-server/src/simcore_service_api_server/api/routes/users.py
create mode 100644 services/api-server/src/simcore_service_api_server/core/__init__.py
create mode 100644 services/api-server/src/simcore_service_api_server/core/application.py
create mode 100644 services/api-server/src/simcore_service_api_server/core/errors.py
create mode 100644 services/api-server/src/simcore_service_api_server/core/events.py
create mode 100644 services/api-server/src/simcore_service_api_server/core/openapi.py
create mode 100644 services/api-server/src/simcore_service_api_server/core/redoc.py
create mode 100644 services/api-server/src/simcore_service_api_server/core/settings.py
create mode 100644 services/api-server/src/simcore_service_api_server/db/__init__.py
create mode 100644 services/api-server/src/simcore_service_api_server/db/errors.py
create mode 100644 services/api-server/src/simcore_service_api_server/db/events.py
create mode 100644 services/api-server/src/simcore_service_api_server/db/repositories/__init__.py
create mode 100644 services/api-server/src/simcore_service_api_server/db/repositories/api_keys.py
create mode 100644 services/api-server/src/simcore_service_api_server/db/repositories/base.py
create mode 100644 services/api-server/src/simcore_service_api_server/db/repositories/users.py
create mode 100644 services/api-server/src/simcore_service_api_server/db/tables.py
create mode 100644 services/api-server/src/simcore_service_api_server/models/__init__.py
create mode 100644 services/api-server/src/simcore_service_api_server/models/domain/__init__.py
create mode 100644 services/api-server/src/simcore_service_api_server/models/domain/api_keys.py
create mode 100644 services/api-server/src/simcore_service_api_server/models/domain/groups.py
create mode 100644 services/api-server/src/simcore_service_api_server/models/domain/users.py
create mode 100644 services/api-server/src/simcore_service_api_server/models/schemas/__init__.py
create mode 100644 services/api-server/src/simcore_service_api_server/models/schemas/api_keys.py
create mode 100644 services/api-server/src/simcore_service_api_server/models/schemas/meta.py
create mode 100644 services/api-server/src/simcore_service_api_server/models/schemas/profiles.py
create mode 100644 services/api-server/src/simcore_service_api_server/models/schemas/tokens.py
create mode 100644 services/api-server/src/simcore_service_api_server/models/schemas/users.py
create mode 100644 services/api-server/src/simcore_service_api_server/services/__init__.py
create mode 100644 services/api-server/src/simcore_service_api_server/services/jwt.py
rename services/{api-gateway/src/simcore_service_api_gateway/utils => api-server/src/simcore_service_api_server/services}/remote_debug.py (76%)
rename services/{api-gateway/src/simcore_service_api_gateway/utils/helpers.py => api-server/src/simcore_service_api_server/services/security.py} (55%)
create mode 100644 services/api-server/src/simcore_service_api_server/services/serialization.py
create mode 100644 services/api-server/src/simcore_service_api_server/services/webserver.py
create mode 100644 services/api-server/tests/integration/.gitkeep
create mode 100644 services/api-server/tests/unit/_helpers.py
create mode 100644 services/api-server/tests/unit/conftest.py
create mode 100644 services/api-server/tests/unit/test_api_meta.py
create mode 100644 services/api-server/tests/unit/test_api_user.py
rename services/{api-gateway => api-server}/tests/unit/test_code_syntax.py (100%)
create mode 100644 services/api-server/tests/unit/test_fake_generator.py
create mode 100644 services/api-server/tests/unit/test_jwt.py
create mode 100644 services/api-server/tests/unit/test_security.py
create mode 100644 services/api-server/tests/unit/test_settings.py
rename services/{api-gateway => api-server}/tests/utils/docker-compose.yml (93%)
create mode 100644 services/api-server/tests/utils/init-pg.py
rename services/{api-gateway => api-server}/tools/gen_api.py (99%)
rename services/{api-gateway => api-server}/tools/templates/cruds.py.jinja2 (100%)
rename services/{api-gateway => api-server}/tools/templates/orm.py.jinja2 (100%)
rename services/{api-gateway => api-server}/tools/templates/resource_custom_methods.py.jinja2 (94%)
rename services/{api-gateway => api-server}/tools/templates/resource_standard_methods.py.jinja2 (97%)
create mode 100644 services/api-server/tools/templates/schemas.py.jinja2
rename services/{api-gateway => api-server}/tools/templates/test_endpoints.py.jinja2 (95%)
diff --git a/.codeclimate.yml b/.codeclimate.yml
index d9bd3b34903..32cc5ec7253 100644
--- a/.codeclimate.yml
+++ b/.codeclimate.yml
@@ -78,3 +78,4 @@ exclude_patterns:
- "**/migrations/"
- "**/*.js"
- "**/pytest-simcore/"
+ - "**/sandbox/"
diff --git a/.env-devel b/.env-devel
index 8ca1cb110d0..033700f642c 100644
--- a/.env-devel
+++ b/.env-devel
@@ -47,6 +47,7 @@ TRACING_ZIPKIN_ENDPOINT=http://jaeger:9411
TRAEFIK_SIMCORE_ZONE=internal_simcore_stack
+WEBSERVER_HOST=webserver
WEBSERVER_LOGIN_REGISTRATION_CONFIRMATION_REQUIRED=0
WEBSERVER_LOGIN_REGISTRATION_INVITATION_REQUIRED=0
# python3 -c "from cryptography.fernet import Fernet; print(Fernet.generate_key())"
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 3c11d09f3d7..c1752821415 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -13,7 +13,7 @@ Makefile @pcrespov, @sanderegg
/scripts/demo @odeimaiz, @pcrespov
/scripts/json-schema-to-openapi-schema @sanderegg
/scripts/template-projects @odeimaiz, @pcrespov
-/services/api-gateway @pcrespov
+/services/api-server @pcrespov
/services/catalog @pcrespov
/services/sidecar @pcrespov, @mguidon
/services/web/client @odeimaiz, @oetiker, @ignapas
diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml
index a862c5e279f..54c10604e1b 100644
--- a/.github/workflows/ci-testing-deploy.yml
+++ b/.github/workflows/ci-testing-deploy.yml
@@ -56,8 +56,8 @@ jobs:
- name: test
run: ./ci/github/unit-testing/api.bash test
- unit-test-api-gateway:
- name: Unit-testing api-gateway
+ unit-test-api-server:
+ name: Unit-testing api-server
runs-on: ${{ matrix.os }}
strategy:
matrix:
@@ -86,9 +86,9 @@ jobs:
restore-keys: |
${{ runner.os }}-pip-
- name: install
- run: ./ci/github/unit-testing/api-gateway.bash install
+ run: ./ci/github/unit-testing/api-server.bash install
- name: test
- run: ./ci/github/unit-testing/api-gateway.bash test
+ run: ./ci/github/unit-testing/api-server.bash test
- uses: codecov/codecov-action@v1
with:
flags: unittests #optional
@@ -96,12 +96,12 @@ jobs:
run: |
curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter
chmod +x ./cc-test-reporter
- ./cc-test-reporter format-coverage -t coverage.py -o codeclimate.unit_api_gateway_coverage.json coverage.xml
+ ./cc-test-reporter format-coverage -t coverage.py -o codeclimate.unit_api_server_coverage.json coverage.xml
- name: upload codeclimate coverage
uses: actions/upload-artifact@v1
with:
- name: unit_api_gateway_coverage
- path: codeclimate.unit_api_gateway_coverage.json
+ name: unit_api_server_coverage
+ path: codeclimate.unit_api_server_coverage.json
unit-test-catalog:
name: Unit-testing catalog
@@ -838,7 +838,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/download-artifact@v1
with:
- name: unit_api_gateway_coverage
+ name: unit_api_server_coverage
- uses: actions/download-artifact@v1
with:
name: unit_catalog_coverage
@@ -873,7 +873,7 @@ jobs:
run: |
mkdir all_coverages
cp \
- unit_api_gateway_coverage/*.json \
+ unit_api_server_coverage/*.json \
unit_catalog_coverage/*.json \
unit_director_coverage/*.json \
unit_sidecar_coverage/*.json \
diff --git a/.pylintrc b/.pylintrc
index 0896b2147e2..dba7f3db517 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -3,7 +3,7 @@
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
-extension-pkg-whitelist=
+extension-pkg-whitelist=pydantic
# Add files or directories to the blacklist. They should be base names, not
# paths.
diff --git a/.travis.yml b/.travis.yml
index 8d619454f95..d45442a6b4e 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -107,9 +107,9 @@ jobs:
after_failure:
- unbuffer bash ci/travis/unit-testing/webserver.bash after_failure
- # test python, api-gateway ----------------------------------------------------------------------
+ # test python, api-server ----------------------------------------------------------------------
- stage: build / unit-testing
- name: api-gateway
+ name: api-server
language: python
python:
- "3.6"
@@ -118,17 +118,17 @@ jobs:
- docker
cache: pip
before_install:
- - sudo bash ci/travis/unit-testing/api-gateway.bash before_install
+ - sudo bash ci/travis/unit-testing/api-server.bash before_install
install:
- - unbuffer bash ci/travis/unit-testing/api-gateway.bash install
+ - unbuffer bash ci/travis/unit-testing/api-server.bash install
before_script:
- - unbuffer bash ci/travis/unit-testing/api-gateway.bash before_script
+ - unbuffer bash ci/travis/unit-testing/api-server.bash before_script
script:
- - unbuffer bash ci/travis/unit-testing/api-gateway.bash script
+ - unbuffer bash ci/travis/unit-testing/api-server.bash script
after_success:
- - unbuffer bash ci/travis/unit-testing/api-gateway.bash after_success
+ - unbuffer bash ci/travis/unit-testing/api-server.bash after_success
after_failure:
- - unbuffer bash ci/travis/unit-testing/api-gateway.bash after_failure
+ - unbuffer bash ci/travis/unit-testing/api-server.bash after_failure
# test python, catalog ----------------------------------------------------------------------
- stage: build / unit-testing
diff --git a/Makefile b/Makefile
index 0dff48d7fa5..ef2dfa7fec8 100644
--- a/Makefile
+++ b/Makefile
@@ -29,7 +29,7 @@ $(if $(IS_WIN),$(error Windows is not supported in all recipes. Use WSL instead.
# TODO: read from docker-compose file instead $(shell find $(CURDIR)/services -type f -name 'Dockerfile')
# or $(notdir $(subst /Dockerfile,,$(wildcard services/*/Dockerfile))) ...
SERVICES_LIST := \
- api-gateway \
+ api-server \
catalog \
director \
sidecar \
@@ -46,10 +46,12 @@ export VCS_STATUS_CLIENT:= $(if $(shell git status -s),'modified/untracked','cle
export BUILD_DATE := $(shell date -u +"%Y-%m-%dT%H:%M:%SZ")
# api-versions
-export CATALOG_API_VERSION := $(shell cat $(CURDIR)/services/catalog/VERSION)
-export DIRECTOR_API_VERSION := $(shell cat $(CURDIR)/services/director/VERSION)
-export STORAGE_API_VERSION := $(shell cat $(CURDIR)/services/storage/VERSION)
-export WEBSERVER_API_VERSION:= $(shell cat $(CURDIR)/services/web/server/VERSION)
+export API_SERVER_API_VERSION := $(shell cat $(CURDIR)/services/api-server/VERSION)
+export CATALOG_API_VERSION := $(shell cat $(CURDIR)/services/catalog/VERSION)
+export DIRECTOR_API_VERSION := $(shell cat $(CURDIR)/services/director/VERSION)
+export STORAGE_API_VERSION := $(shell cat $(CURDIR)/services/storage/VERSION)
+export WEBSERVER_API_VERSION := $(shell cat $(CURDIR)/services/web/server/VERSION)
+
# swarm stacks
export SWARM_STACK_NAME ?= simcore
@@ -189,7 +191,7 @@ endif
up-devel: .stack-simcore-development.yml .init-swarm $(CLIENT_WEB_OUTPUT) ## Deploys local development stack, qx-compile+watch and ops stack (pass 'make ops_disabled=1 up-...' to disable)
- # Start compile+watch front-end container [front-end]
+ # Start compile+watch front-end container [front-end]
$(MAKE_C) services/web/client down compile-dev flags=--watch
# Deploy stack $(SWARM_STACK_NAME) [back-end]
@docker stack deploy -c $< $(SWARM_STACK_NAME)
@@ -303,7 +305,8 @@ pylint: ## Runs python linter framework's wide
/bin/bash -c "pylint --jobs=0 --rcfile=.pylintrc $(strip $(shell find services packages -iname '*.py' \
-not -path "*egg*" \
-not -path "*migration*" \
- -not -path "*contrib*" \
+ -not -path "*datcore.py" \
+ -not -path "*sandbox*" \
-not -path "*-sdk/python*" \
-not -path "*generated_code*" \
-not -path "*datcore.py" \
diff --git a/ci/github/unit-testing/api-gateway.bash b/ci/github/unit-testing/api-server.bash
similarity index 69%
rename from ci/github/unit-testing/api-gateway.bash
rename to ci/github/unit-testing/api-server.bash
index 37963e2c0fa..0e7d1bfd7c3 100755
--- a/ci/github/unit-testing/api-gateway.bash
+++ b/ci/github/unit-testing/api-server.bash
@@ -5,14 +5,14 @@ IFS=$'\n\t'
install() {
bash ci/helpers/ensure_python_pip.bash
- pushd services/api-gateway; pip3 install -r requirements/ci.txt; popd
+ pushd services/api-server; pip3 install -r requirements/ci.txt; popd
pip list --verbose
}
test() {
- pytest --cov=simcore_service_api_gateway --durations=10 --cov-append \
+ pytest --cov=simcore_service_api_server --durations=10 --cov-append \
--color=yes --cov-report=term-missing --cov-report=xml \
- -v -m "not travis" services/api-gateway/tests/unit
+ -v -m "not travis" services/api-server/tests/unit
}
# Check if the function exists (bash specific)
diff --git a/ci/helpers/install_pylint.bash b/ci/helpers/install_pylint.bash
index c35ab066f02..b4c4beb4c8f 100644
--- a/ci/helpers/install_pylint.bash
+++ b/ci/helpers/install_pylint.bash
@@ -14,7 +14,8 @@ pip3 install "$PYLINT_VERSION"
# Minimal packages to pass linter
pip install \
celery\
- docker
+ docker\
+ pyjwt
echo "INFO:" "$(pylint --version)" "@" "$(command -v pylint)"
diff --git a/ci/travis/unit-testing/api-gateway.bash b/ci/travis/unit-testing/api-server.bash
similarity index 82%
rename from ci/travis/unit-testing/api-gateway.bash
rename to ci/travis/unit-testing/api-server.bash
index cba866ecda5..7be8367b474 100755
--- a/ci/travis/unit-testing/api-gateway.bash
+++ b/ci/travis/unit-testing/api-server.bash
@@ -3,7 +3,7 @@
set -euo pipefail
IFS=$'\n\t'
-FOLDER_CHECKS=(api/ api-gateway packages/ .travis.yml)
+FOLDER_CHECKS=(api/ api-server packages/ .travis.yml)
before_install() {
if bash ci/travis/helpers/test-for-changes.bash "${FOLDER_CHECKS[@]}";
@@ -18,7 +18,7 @@ install() {
if bash ci/travis/helpers/test-for-changes.bash "${FOLDER_CHECKS[@]}";
then
bash ci/helpers/ensure_python_pip.bash
- pushd services/api-gateway; pip3 install -r requirements/ci.txt; popd
+ pushd services/api-server; pip3 install -r requirements/ci.txt; popd
fi
}
@@ -32,11 +32,11 @@ before_script() {
script() {
if bash ci/travis/helpers/test-for-changes.bash "${FOLDER_CHECKS[@]}";
then
- pytest --cov=simcore_service_api_gateway --durations=10 --cov-append \
+ pytest --cov=simcore_service_api_server --durations=10 --cov-append \
--color=yes --cov-report=term-missing --cov-report=xml \
- -v -m "not travis" services/api-gateway/tests/unit
+ -v -m "not travis" services/api-server/tests/unit
else
- echo "No changes detected. Skipping unit-testing of api-gateway."
+ echo "No changes detected. Skipping unit-testing of api-server."
fi
}
diff --git a/packages/postgres-database/src/simcore_postgres_database/cli.py b/packages/postgres-database/src/simcore_postgres_database/cli.py
index 95a26524a80..f86c97035b3 100644
--- a/packages/postgres-database/src/simcore_postgres_database/cli.py
+++ b/packages/postgres-database/src/simcore_postgres_database/cli.py
@@ -11,6 +11,7 @@
from copy import deepcopy
from logging.config import fileConfig
from pathlib import Path
+from typing import Dict
import alembic.command
import click
@@ -116,7 +117,7 @@ def main():
@click.option("--host")
@click.option("--port", type=int)
@click.option("--database", "-d")
-def discover(**cli_inputs):
+def discover(**cli_inputs) -> Dict:
""" Discovers databases and caches configs in ~/.simcore_postgres_database.json (except if --no-cache)"""
# NOTE: Do not add defaults to user, password so we get a chance to ping urls
# TODO: if multiple candidates online, then query user to select
@@ -173,7 +174,7 @@ def _test_swarm():
fg="green",
)
- return
+ return cfg
except Exception as err:
inline_msg = str(err).replace("\n", ". ")
diff --git a/packages/postgres-database/src/simcore_postgres_database/models/api_keys.py b/packages/postgres-database/src/simcore_postgres_database/models/api_keys.py
index cd3ee02e421..55527db8666 100644
--- a/packages/postgres-database/src/simcore_postgres_database/models/api_keys.py
+++ b/packages/postgres-database/src/simcore_postgres_database/models/api_keys.py
@@ -1,4 +1,4 @@
-""" API keys to access public gateway
+""" API keys to access public API
These keys grant the client authorization to the API resources
diff --git a/packages/postgres-database/tests/conftest.py b/packages/postgres-database/tests/conftest.py
index f45d746514a..c14004d7fa5 100644
--- a/packages/postgres-database/tests/conftest.py
+++ b/packages/postgres-database/tests/conftest.py
@@ -3,11 +3,12 @@
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
-import pytest
-import yaml
+from typing import Callable, Coroutine, Union
-import sqlalchemy as sa
import aiopg.sa
+import pytest
+import sqlalchemy as sa
+import yaml
@pytest.fixture(scope="session")
@@ -32,11 +33,8 @@ def postgres_service(docker_services, docker_ip, docker_compose_file) -> str:
return dsn
-from typing import Union, Coroutine, Callable
-
-
@pytest.fixture
-def make_engine(postgres_service):
+def make_engine(postgres_service: str) -> Callable:
dsn = postgres_service
def maker(is_async=True) -> Union[Coroutine, Callable]:
diff --git a/packages/postgres-database/tests/test_delete_projects_and_users.py b/packages/postgres-database/tests/test_delete_projects_and_users.py
index a4cfd4bc46b..f0e4b9e18cd 100644
--- a/packages/postgres-database/tests/test_delete_projects_and_users.py
+++ b/packages/postgres-database/tests/test_delete_projects_and_users.py
@@ -85,6 +85,58 @@ async def start():
return loop.run_until_complete(start())
+@pytest.mark.skip(reason="sandbox for dev purposes")
+async def test_insert_user(engine):
+ async with engine.acquire() as conn:
+
+ # execute + scalar
+ res: ResultProxy = await conn.execute(
+ users.insert().values(**random_user(name="FOO"))
+ )
+ assert res.returns_rows
+ assert res.rowcount == 1
+ assert res.keys() == ("id",)
+
+ user_id = await res.scalar()
+ assert isinstance(user_id, int)
+ assert user_id > 0
+
+ # only scalar
+ user2_id: int = await conn.scalar(
+ users.insert().values(**random_user(name="BAR"))
+ )
+ assert isinstance(user2_id, int)
+ assert user2_id == user_id + 1
+
+ # query result
+ res: ResultProxy = await conn.execute(
+ users.select().where(users.c.id == user2_id)
+ )
+ assert res.returns_rows
+ assert res.rowcount == 1
+ assert len(res.keys()) > 1
+
+ # DIFFERENT betwen .first() and fetchone()
+
+ user2: RowProxy = await res.first()
+ # Fetch the first row and then close the result set unconditionally.
+ assert res.closed
+
+ res: ResultProxy = await conn.execute(
+ users.select().where(users.c.id == user2_id)
+ )
+ user2a: RowProxy = await res.fetchone()
+ # If rows are present, the cursor remains open after this is called.
+ assert not res.closed
+ assert user2 == user2a
+
+ user2b: RowProxy = await res.fetchone()
+ # If no more rows, the cursor is automatically closed and None is returned
+ assert user2b is None
+ assert res.closed
+
+
+
async def test_count_users(engine):
async with engine.acquire() as conn:
users_count = await conn.scalar(users.count())
diff --git a/packages/postgres-database/tests/test_groups.py b/packages/postgres-database/tests/test_groups.py
index cb5604614d2..3bdc9551ee2 100644
--- a/packages/postgres-database/tests/test_groups.py
+++ b/packages/postgres-database/tests/test_groups.py
@@ -1,4 +1,5 @@
-# pylint: disable=E1120
+# pylint: disable=no-name-in-module
+# pylint: disable=no-value-for-parameter
import faker
import pytest
diff --git a/packages/pytest-simcore/src/pytest_simcore/traefik_service.py b/packages/pytest-simcore/src/pytest_simcore/traefik_service.py
index 3e9ee90d987..0fd90cd1299 100644
--- a/packages/pytest-simcore/src/pytest_simcore/traefik_service.py
+++ b/packages/pytest-simcore/src/pytest_simcore/traefik_service.py
@@ -48,5 +48,5 @@ async def wait_till_traefik_responsive(api_endpoint: URL):
assert "service" in proxied_service
if "webserver" in proxied_service["service"]:
assert proxied_service["status"] == "enabled"
- elif "api-gateway" in proxied_service["service"]:
+ elif "api-server" in proxied_service["service"]:
assert proxied_service["status"] == "enabled"
diff --git a/scripts/common.Makefile b/scripts/common.Makefile
index f1da5d1701a..3fc94e1ead0 100644
--- a/scripts/common.Makefile
+++ b/scripts/common.Makefile
@@ -92,7 +92,9 @@ info: ## displays basic info
.PHONY: autoformat
-autoformat: ## runs black python formatter on this service's code
+autoformat: ## runs black python formatter on this service's code. Use AFTER make install-*
+ # sort imports
+ @python3 -m isort --atomic -rc $(CURDIR)
# auto formatting with black
@python3 -m black --verbose \
--exclude "/(\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|\.svn|_build|buck-out|build|dist|migration|client-sdk|generated_code)/" \
diff --git a/scripts/demo/create_portal_markdown.py b/scripts/demo/create_portal_markdown.py
index 48c136f5cde..75cd698039b 100644
--- a/scripts/demo/create_portal_markdown.py
+++ b/scripts/demo/create_portal_markdown.py
@@ -3,6 +3,9 @@
Aims to emulate links
"""
+
+# TODO: extend cli to generate invitations use jinja templates (see folder)
+
import argparse
import json
import logging
@@ -118,13 +121,14 @@ def main(mock_codes):
print("", file=fh)
+ today = datetime.today()
file_path = current_path.parent / CONFIRMATIONS_FILENAME
with _open(file_path) as fh:
print("code,user_id,action,data,created_at", file=fh)
for n, code in enumerate(mock_codes, start=1):
print('%s,1,INVITATION,"{' % code, file=fh)
- print(f'""guest"": ""invitation-{n}"" ,', file=fh)
- print('""host"" : ""support@osparc.io""', file=fh)
+ print(f'""guest"": ""invitation-{today.year:04d}{today.month:02d}{today.day:02d}-{n}"" ,', file=fh)
+ print('""issuer"" : ""support@osparc.io""', file=fh)
print('}",%s' % datetime.now().isoformat(sep=" "), file=fh)
diff --git a/scripts/demo/templates/invitation-codes-{{ deploy }}-{{ datestamp }}.md.jinja2 b/scripts/demo/templates/invitation-codes-{{ deploy }}-{{ datestamp }}.md.jinja2
new file mode 100644
index 00000000000..f7e9306f125
--- /dev/null
+++ b/scripts/demo/templates/invitation-codes-{{ deploy }}-{{ datestamp }}.md.jinja2
@@ -0,0 +1,22 @@
+# Invitations for {{ deploy }}
+
+
+{% for url in invitation_urls %}
+ 1. {url}
+{% endfor %}
+
+
+Every invitation can be identified in the data column as
+
+```json
+{
+ "guest": "invitation-{{ datestamp }}-${NUMBER}" ,
+ "issuer" : "{{ issuer_email }}"
+}
+```
+
+These invitations **will expire on {{ datetime.now() + valid_lifetime }}** if they are not renovated
+
+---
+
+Generated with {{ current_program }} by {{ issuer_email }} on {{ datetime.now() }}
diff --git a/scripts/openapi-generator-cli.bash b/scripts/openapi-generator-cli.bash
index 4a3198fd120..b1072c66353 100755
--- a/scripts/openapi-generator-cli.bash
+++ b/scripts/openapi-generator-cli.bash
@@ -27,6 +27,9 @@ GROUPID=$(stat --format=%g "$PWD")
#PATTERN=s+$PWD+/local+
#CMD=$(echo "$@" | sed $PATTERN)
+# TODO: check SAME digest. Perhaps push into itisfoundation repo?
+# openapitools/openapi-generator-cli v4.2.3 sha256:c90e7f2d63340574bba015ad88a5abb55d5b25ab3d5460c02e14a566574e8d55
+
exec docker run --rm \
--user "$USERID:$GROUPID" \
--volume "$PWD:/local" \
diff --git a/services/api-gateway/.cookiecutterrc b/services/api-gateway/.cookiecutterrc
deleted file mode 100644
index 980023f6e13..00000000000
--- a/services/api-gateway/.cookiecutterrc
+++ /dev/null
@@ -1,20 +0,0 @@
-# This file exists so you can easily regenerate your project.
-#
-# cookiecutter --overwrite-if-exists --config-file=.cookiecutterrc /home/crespo/devp/osparc-simcore/services/api-gateway/../../../cookiecutter-simcore-py-fastapi
-#
-
-default_context:
-
- _extensions: ['jinja2_time.TimeExtension']
- _template: '/home/crespo/devp/osparc-simcore/services/api-gateway/../../../cookiecutter-simcore-py-fastapi'
- distribution_name: 'simcore-service-api-gateway'
- enable_docker_daemon_access: 'False'
- full_name: 'Pedro Crespo'
- github_username: 'pcrespov'
- is_standalone_repo: 'False'
- package_name: 'simcore_service_api_gateway'
- project_name: 'Public API Gateway'
- project_short_description: "Platform's API Gateway for external clients"
- project_slug: 'api-gateway'
- version: '0.1.1'
- year: '2020'
diff --git a/services/api-gateway/.env-devel b/services/api-gateway/.env-devel
deleted file mode 100644
index 4f55c26b4b4..00000000000
--- a/services/api-gateway/.env-devel
+++ /dev/null
@@ -1,13 +0,0 @@
-#
-# Environment variables used to configure this service
-#
-
-# SEE services/api-gateway/src/simcore_service_api_gateway/auth_security.py
-SECRET_KEY=d0d0397de2c85ad26ffd4a0f9643dfe3a0ca3937f99cf3c2e174e11b5ef79880
-
-# SEE services/api-gateway/src/simcore_service_api_gateway/settings.py
-LOGLEVEL=DEBUG
-
-POSTGRES_USER=test
-POSTGRES_PASSWORD=test
-POSTGRES_DB=test
diff --git a/services/api-gateway/Makefile b/services/api-gateway/Makefile
deleted file mode 100644
index 0bf23497921..00000000000
--- a/services/api-gateway/Makefile
+++ /dev/null
@@ -1,63 +0,0 @@
-#
-# Targets for DEVELOPMENT of Public API Gateway
-#
-include ../../scripts/common.Makefile
-
-# Custom variables
-APP_NAME := $(notdir $(CURDIR))
-APP_CLI_NAME := simcore-service-api-gateway
-export APP_VERSION = $(shell cat VERSION)
-
-
-.PHONY: reqs
-reqs: ## compiles pip requirements (.in -> .txt)
- @$(MAKE_C) requirements reqs
-
-
-.PHONY: install-dev install-prod install-ci
-install-dev install-prod install-ci: _check_venv_active ## install app in development/production or CI mode
- # installing in $(subst install-,,$@) mode
- pip-sync requirements/$(subst install-,,$@).txt
-
-
-PHONY: tests-unit tests-integration tests
-tests: tests-unit tests-integration
-
-tests-unit: ## runs unit tests
- # running unit tests
- @pytest -vv --exitfirst --failed-first --durations=10 --pdb $(CURDIR)/tests/unit
-
-tests-integration: ## runs integration tests against local+production images
- # running integration tests local/(service):production images ...
- @export DOCKER_REGISTRY=local; \
- export DOCKER_IMAGE_TAG=production; \
- pytest -vv --exitfirst --failed-first --durations=10 --pdb $(CURDIR)/tests/integration
-
-
-.PHONY: run-devel down
-run-devel: .env-devel down ## runs app on host with pg fixture for development
- # running current app
- export $(shell grep -v '^#' $< | xargs -d '\n'); \
- docker-compose -f $(CURDIR)/tests/utils/docker-compose.yml up --detach; \
- uvicorn simcore_service_api_gateway.main:the_app --reload --port=8001 --host=0.0.0.0
-
-down: ## stops pg fixture
- # stopping extra services
- -@docker-compose -f $(CURDIR)/tests/utils/docker-compose.yml down
- # killing any process using port 8001
- -@fuser --kill --verbose --namespace tcp 8001
-
-
-.PHONY: build
-build: ## builds docker image (using main services/docker-compose-build.yml)
- @$(MAKE_C) ${REPO_BASE_DIR} target=${APP_NAME} $@
-
-
-.PHONY: replay
-# TODO: replay shall point to online cookiecutter
-replay: .cookiecutterrc ## re-applies cookiecutter
- # Replaying /home/crespo/devp/osparc-simcore/services/api-gateway/../../../cookiecutter-simcore-py-fastapi ...
- @cookiecutter --no-input --overwrite-if-exists \
- --config-file=$< \
- --output-dir="$(abspath $(CURDIR)/..)" \
- "/home/crespo/devp/osparc-simcore/services/api-gateway/../../../cookiecutter-simcore-py-fastapi"
diff --git a/services/api-gateway/VERSION b/services/api-gateway/VERSION
deleted file mode 100644
index 6da28dde76d..00000000000
--- a/services/api-gateway/VERSION
+++ /dev/null
@@ -1 +0,0 @@
-0.1.1
\ No newline at end of file
diff --git a/services/api-gateway/client b/services/api-gateway/client
deleted file mode 160000
index 6e12ed66860..00000000000
--- a/services/api-gateway/client
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 6e12ed6686037d6ab842a91e9110709680e60f51
diff --git a/services/api-gateway/docker/entrypoint.sh b/services/api-gateway/docker/entrypoint.sh
deleted file mode 100755
index 47b831e0fcc..00000000000
--- a/services/api-gateway/docker/entrypoint.sh
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/bin/sh
-set -o errexit
-set -o nounset
-
-IFS=$(printf '\n\t')
-
-INFO="INFO: [$(basename "$0")] "
-WARNING="WARNING: [$(basename "$0")] "
-ERROR="ERROR: [$(basename "$0")] "
-
-# This entrypoint script:
-#
-# - Executes *inside* of the container upon start as --user [default root]
-# - Notice that the container *starts* as --user [default root] but
-# *runs* as non-root user [scu]
-#
-echo "$INFO" "Entrypoint for stage ${SC_BUILD_TARGET} ..."
-echo User :"$(id "$(whoami)")"
-echo Workdir :"$(pwd)"
-echo scuUser :"$(id scu)"
-
-
-USERNAME=scu
-GROUPNAME=scu
-
-if [ "${SC_BUILD_TARGET}" = "development" ]
-then
- echo "$INFO" "development mode detected..."
- # NOTE: expects docker run ... -v $(pwd):/devel/services/api-gateway
- DEVEL_MOUNT=/devel/services/api-gateway
-
- stat $DEVEL_MOUNT > /dev/null 2>&1 || \
- (echo "$ERROR" "You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1)
-
- USERID=$(stat --format=%u $DEVEL_MOUNT)
- GROUPID=$(stat --format=%g $DEVEL_MOUNT)
- GROUPNAME=$(getent group "${GROUPID}" | cut --delimiter=: --fields=1)
-
- if [ "$USERID" -eq 0 ]
- then
- echo "$WARNING" Folder mounted owned by root user... adding "$SC_USER_NAME" to root...
- adduser "${SC_USER_NAME}" root
- else
- # take host's credentials in scu
- if [ -z "$GROUPNAME" ]
- then
- echo "$INFO" mounted folder from "$USERID", creating new group my"${SC_USER_NAME}"
- GROUPNAME=my"${SC_USER_NAME}"
- addgroup --gid "$GROUPID" "$GROUPNAME"
- # change group property of files already around
- find / -path /proc -prune -group "$SC_USER_ID" -exec chgrp --no-dereference "$GROUPNAME" {} \;
- else
- echo "$INFO" "mounted folder from $USERID, adding ${SC_USER_NAME} to $GROUPNAME..."
- adduser "$SC_USER_NAME" "$GROUPNAME"
- fi
-
- echo "$INFO changing $SC_USER_NAME $SC_USER_ID:$SC_USER_ID to $USERID:$GROUPID"
- deluser "${SC_USER_NAME}" > /dev/null 2>&1
- if [ "$SC_USER_NAME" = "$GROUPNAME" ]
- then
- addgroup --gid "$GROUPID" "$GROUPNAME"
- fi
- adduser --disabled-password --gecos "" --uid "$USERID" --gid "$GROUPID" --shell /bin/sh "$SC_USER_NAME" --no-create-home
- # change user property of files already around
- find / -path /proc -prune -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \;
- fi
-
- echo "$INFO installing pythong dependencies..."
- cd services/api-gateway || exit 1
- pip install --no-cache-dir -r requirements/dev.txt
- cd - || exit 1
-fi
-
-if [ ${SC_BOOT_MODE} == "debug-ptvsd" ]
-then
- # NOTE: production does NOT pre-installs ptvsd
- pip install --no-cache-dir ptvsd
-fi
-
-
-echo "$INFO Starting $* ..."
-echo " $SC_USER_NAME rights : $(id "$SC_USER_NAME")"
-echo " local dir : $(ls -al)"
-
-su --command "$*" "$SC_USER_NAME"
diff --git a/services/api-gateway/requirements/_base.in b/services/api-gateway/requirements/_base.in
deleted file mode 100644
index 923aeb87716..00000000000
--- a/services/api-gateway/requirements/_base.in
+++ /dev/null
@@ -1,14 +0,0 @@
-#
-# Specifies third-party dependencies for 'services/api-gateway/src'
-#
-# NOTE: ALL version constraints MUST be commented
-
-fastapi[all]
-aiopg[sa]
-tenacity
-passlib[bcrypt]
-pyjwt
-
-
-async-exit-stack # not needed when python>=3.7
-async-generator # not needed when python>=3.7
diff --git a/services/api-gateway/requirements/_base.txt b/services/api-gateway/requirements/_base.txt
deleted file mode 100644
index db53cf71332..00000000000
--- a/services/api-gateway/requirements/_base.txt
+++ /dev/null
@@ -1,48 +0,0 @@
-#
-# This file is autogenerated by pip-compile
-# To update, run:
-#
-# pip-compile --output-file=requirements/_base.txt requirements/_base.in
-#
-aiofiles==0.4.0 # via fastapi
-aiopg[sa]==1.0.0 # via -r requirements/_base.in
-aniso8601==7.0.0 # via graphene
-async-exit-stack==1.0.1 # via -r requirements/_base.in, fastapi
-async-generator==1.10 # via -r requirements/_base.in, fastapi
-bcrypt==3.1.7 # via passlib
-certifi==2019.11.28 # via requests
-cffi==1.14.0 # via bcrypt
-chardet==3.0.4 # via requests
-click==7.1.1 # via uvicorn
-dataclasses==0.7 # via pydantic
-dnspython==1.16.0 # via email-validator
-email-validator==1.0.5 # via fastapi
-fastapi[all]==0.52.0 # via -r requirements/_base.in
-graphene==2.1.8 # via fastapi
-graphql-core==2.3.1 # via graphene, graphql-relay
-graphql-relay==2.0.1 # via graphene
-h11==0.9.0 # via uvicorn
-httptools==0.1.1 # via uvicorn
-idna==2.9 # via email-validator, requests
-itsdangerous==1.1.0 # via fastapi
-jinja2==2.11.1 # via fastapi
-markupsafe==1.1.1 # via jinja2
-passlib[bcrypt]==1.7.2 # via -r requirements/_base.in
-promise==2.3 # via graphql-core, graphql-relay
-psycopg2-binary==2.8.4 # via aiopg, sqlalchemy
-pycparser==2.20 # via cffi
-pydantic==1.4 # via fastapi
-pyjwt==1.7.1 # via -r requirements/_base.in
-python-multipart==0.0.5 # via fastapi
-pyyaml==5.3 # via fastapi
-requests==2.23.0 # via fastapi
-rx==1.6.1 # via graphql-core
-six==1.14.0 # via bcrypt, graphene, graphql-core, graphql-relay, python-multipart, tenacity
-sqlalchemy[postgresql_psycopg2binary]==1.3.15 # via aiopg
-starlette==0.13.2 # via fastapi
-tenacity==6.1.0 # via -r requirements/_base.in
-ujson==2.0.2 # via fastapi
-urllib3==1.25.8 # via requests
-uvicorn==0.11.3 # via fastapi
-uvloop==0.14.0 # via uvicorn
-websockets==8.1 # via uvicorn
diff --git a/services/api-gateway/requirements/_test.txt b/services/api-gateway/requirements/_test.txt
deleted file mode 100644
index 215cfd4d0b6..00000000000
--- a/services/api-gateway/requirements/_test.txt
+++ /dev/null
@@ -1,84 +0,0 @@
-#
-# This file is autogenerated by pip-compile
-# To update, run:
-#
-# pip-compile --output-file=requirements/_test.txt requirements/_test.in
-#
-aiofiles==0.4.0 # via -r requirements/_base.txt, fastapi
-aiohttp==3.6.2 # via pytest-aiohttp
-aiopg[sa]==1.0.0 # via -r requirements/_base.txt
-aniso8601==7.0.0 # via -r requirements/_base.txt, graphene
-astroid==2.3.3 # via pylint
-async-exit-stack==1.0.1 # via -r requirements/_base.txt, fastapi
-async-generator==1.10 # via -r requirements/_base.txt, fastapi
-async-timeout==3.0.1 # via aiohttp
-attrs==19.3.0 # via aiohttp, pytest, pytest-docker
-bcrypt==3.1.7 # via -r requirements/_base.txt, passlib
-certifi==2019.11.28 # via -r requirements/_base.txt, requests
-cffi==1.14.0 # via -r requirements/_base.txt, bcrypt
-change-case==0.5.2 # via -r requirements/_test.in
-chardet==3.0.4 # via -r requirements/_base.txt, aiohttp, requests
-click==7.1.1 # via -r requirements/_base.txt, uvicorn
-codecov==2.0.16 # via -r requirements/_test.in
-coverage==5.0.4 # via codecov, coveralls, pytest-cov
-coveralls==1.11.1 # via -r requirements/_test.in
-dataclasses==0.7 # via -r requirements/_base.txt, pydantic
-dnspython==1.16.0 # via -r requirements/_base.txt, email-validator
-docopt==0.6.2 # via coveralls
-email-validator==1.0.5 # via -r requirements/_base.txt, fastapi
-faker==4.0.2 # via -r requirements/_test.in
-fastapi[all]==0.52.0 # via -r requirements/_base.txt
-graphene==2.1.8 # via -r requirements/_base.txt, fastapi
-graphql-core==2.3.1 # via -r requirements/_base.txt, graphene, graphql-relay
-graphql-relay==2.0.1 # via -r requirements/_base.txt, graphene
-h11==0.9.0 # via -r requirements/_base.txt, uvicorn
-httptools==0.1.1 # via -r requirements/_base.txt, uvicorn
-idna-ssl==1.1.0 # via aiohttp
-idna==2.9 # via -r requirements/_base.txt, email-validator, idna-ssl, requests, yarl
-importlib-metadata==1.5.0 # via pluggy, pytest
-isort==4.3.21 # via pylint
-itsdangerous==1.1.0 # via -r requirements/_base.txt, fastapi
-jinja2==2.11.1 # via -r requirements/_base.txt, -r requirements/_test.in, fastapi
-lazy-object-proxy==1.4.3 # via astroid
-markupsafe==1.1.1 # via -r requirements/_base.txt, jinja2
-mccabe==0.6.1 # via pylint
-more-itertools==8.2.0 # via pytest
-multidict==4.7.5 # via aiohttp, yarl
-packaging==20.3 # via pytest
-passlib[bcrypt]==1.7.2 # via -r requirements/_base.txt
-pluggy==0.13.1 # via pytest
-promise==2.3 # via -r requirements/_base.txt, graphql-core, graphql-relay
-psycopg2-binary==2.8.4 # via -r requirements/_base.txt, aiopg, sqlalchemy
-py==1.8.1 # via pytest
-pycparser==2.20 # via -r requirements/_base.txt, cffi
-pydantic==1.4 # via -r requirements/_base.txt, fastapi
-pyjwt==1.7.1 # via -r requirements/_base.txt
-pylint==2.4.4 # via -r requirements/_test.in
-pyparsing==2.4.6 # via packaging
-pytest-aiohttp==0.3.0 # via -r requirements/_test.in
-pytest-cov==2.8.1 # via -r requirements/_test.in
-pytest-docker==0.7.2 # via -r requirements/_test.in
-pytest-mock==2.0.0 # via -r requirements/_test.in
-pytest-runner==5.2 # via -r requirements/_test.in
-pytest==5.4.3 # via -r requirements/_test.in, pytest-aiohttp, pytest-cov, pytest-mock
-python-dateutil==2.8.1 # via faker
-python-multipart==0.0.5 # via -r requirements/_base.txt, fastapi
-pyyaml==5.3 # via -r requirements/_base.txt, fastapi
-requests==2.23.0 # via -r requirements/_base.txt, codecov, coveralls, fastapi
-rx==1.6.1 # via -r requirements/_base.txt, graphql-core
-six==1.14.0 # via -r requirements/_base.txt, astroid, bcrypt, graphene, graphql-core, graphql-relay, packaging, promise, python-dateutil, python-multipart, tenacity
-sqlalchemy[postgresql_psycopg2binary]==1.3.15 # via -r requirements/_base.txt, aiopg
-starlette==0.13.2 # via -r requirements/_base.txt, fastapi
-tenacity==6.1.0 # via -r requirements/_base.txt
-text-unidecode==1.3 # via faker
-typed-ast==1.4.1 # via astroid
-typing-extensions==3.7.4.1 # via aiohttp
-ujson==2.0.2 # via -r requirements/_base.txt, fastapi
-urllib3==1.25.8 # via -r requirements/_base.txt, requests
-uvicorn==0.11.3 # via -r requirements/_base.txt, fastapi
-uvloop==0.14.0 # via -r requirements/_base.txt, uvicorn
-wcwidth==0.1.8 # via pytest
-websockets==8.1 # via -r requirements/_base.txt, uvicorn
-wrapt==1.11.2 # via astroid
-yarl==1.4.2 # via aiohttp
-zipp==3.1.0 # via importlib-metadata
diff --git a/services/api-gateway/setup.cfg b/services/api-gateway/setup.cfg
deleted file mode 100644
index 173cc1d88d4..00000000000
--- a/services/api-gateway/setup.cfg
+++ /dev/null
@@ -1,10 +0,0 @@
-[bumpversion]
-current_version = 0.1.1
-commit = True
-tag = True
-
-[bumpversion:file:VERSION]
-
-[bumpversion:file:.cookiecutterrc]
-search = '{current_version}'
-replace = '{new_version}'
diff --git a/services/api-gateway/src/simcore_service_api_gateway/__init__.py b/services/api-gateway/src/simcore_service_api_gateway/__init__.py
deleted file mode 100644
index 0bdd749bf16..00000000000
--- a/services/api-gateway/src/simcore_service_api_gateway/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-""" Python package for the simcore_service_api_gateway.
-
-"""
-from .__version__ import __version__
diff --git a/services/api-gateway/src/simcore_service_api_gateway/__main__.py b/services/api-gateway/src/simcore_service_api_gateway/__main__.py
deleted file mode 100644
index d3e676ec728..00000000000
--- a/services/api-gateway/src/simcore_service_api_gateway/__main__.py
+++ /dev/null
@@ -1,29 +0,0 @@
-""" Main application entry point
-
- `python -m simcore_service_api_gateway ...`
-
-Why does this file exist, and why __main__? For more info, read:
-
-- https://www.python.org/dev/peps/pep-0338/
-- https://docs.python.org/3/using/cmdline.html#cmdoption-m
-"""
-import uvicorn
-
-from simcore_service_api_gateway.application import get_settings
-from simcore_service_api_gateway.main import the_app
-from simcore_service_api_gateway.settings import AppSettings, BootModeEnum
-
-
-def main():
- settings: AppSettings = get_settings(the_app)
- uvicorn.run(
- the_app,
- host=settings.host,
- port=settings.port,
- reload=settings.boot_mode == BootModeEnum.development,
- log_level=settings.log_level_name.lower(),
- )
-
-
-if __name__ == "__main__":
- main()
diff --git a/services/api-gateway/src/simcore_service_api_gateway/application.py b/services/api-gateway/src/simcore_service_api_gateway/application.py
deleted file mode 100644
index 604e2346ee6..00000000000
--- a/services/api-gateway/src/simcore_service_api_gateway/application.py
+++ /dev/null
@@ -1,106 +0,0 @@
-""" Helpers wrapping or producing FastAPI's app
-
- These helpers are typically used with main.the_app singleton instance
-"""
-import json
-import types
-from pathlib import Path
-from typing import Callable, Dict
-
-import yaml
-from fastapi import FastAPI
-from fastapi.openapi.docs import get_redoc_html
-from fastapi.openapi.utils import get_openapi
-
-from .__version__ import api_version, api_vtag
-from .settings import AppSettings
-
-FAVICON = "https://osparc.io/resource/osparc/favicon.png"
-LOGO = "https://raw.githubusercontent.com/ITISFoundation/osparc-manual/b809d93619512eb60c827b7e769c6145758378d0/_media/osparc-logo.svg"
-
-
-def _custom_openapi(zelf: FastAPI) -> Dict:
- if not zelf.openapi_schema:
- openapi_schema = get_openapi(
- title=zelf.title,
- version=zelf.version,
- openapi_version=zelf.openapi_version,
- description=zelf.description,
- routes=zelf.routes,
- openapi_prefix=zelf.openapi_prefix,
- )
-
- # ReDoc vendor extensions
- # SEE https://github.com/Redocly/redoc/blob/master/docs/redoc-vendor-extensions.md
- openapi_schema["info"]["x-logo"] = {
- "url": LOGO,
- "altText": "osparc-simcore logo",
- }
-
- #
- # TODO: load code samples add if function is contained in sample
- # TODO: See if openapi-cli does this already
- #
- openapi_schema["paths"]["/meta"]["get"]["x-code-samples"] = [
- {"lang": "python", "source": "print('hello world')",},
- ]
-
- zelf.openapi_schema = openapi_schema
- return zelf.openapi_schema
-
-
-def _setup_redoc(app: FastAPI):
- from fastapi.applications import Request, HTMLResponse
-
- async def redoc_html(_req: Request) -> HTMLResponse:
- return get_redoc_html(
- openapi_url=app.openapi_url,
- title=app.title + " - redoc",
- redoc_favicon_url=FAVICON,
- )
-
- app.add_route("/redoc", redoc_html, include_in_schema=False)
-
-
-def create(settings: AppSettings) -> FastAPI:
- # factory
- app = FastAPI(
- debug=settings.debug,
- title="Public API Gateway",
- description="osparc-simcore Public RESTful API Specifications",
- version=api_version,
- openapi_url=f"/api/{api_vtag}/openapi.json",
- redoc_url=None,
- )
- app.state.settings = settings
-
- app.openapi = types.MethodType(_custom_openapi, app)
-
- _setup_redoc(app)
-
- return app
-
-
-def get_settings(app: FastAPI) -> AppSettings:
- """ Read-only app settings """
- return app.state["settings"].copy()
-
-
-def add_startup_handler(app: FastAPI, startup_event: Callable):
- # TODO: this is different from fastapi_shortcuts
- # Add Callable with and w/o arguments?
- app.router.add_event_handler("startup", startup_event)
-
-
-def add_shutdown_handler(app: FastAPI, shutdown_event: Callable):
- app.router.add_event_handler("shutdown", shutdown_event)
-
-
-def dump_openapi(app: FastAPI, filepath: Path):
- with open(filepath, "wt") as fh:
- if filepath.suffix == ".json":
- json.dump(app.openapi(), fh, indent=2)
- elif filepath.suffix in (".yaml", ".yml"):
- yaml.safe_dump(app.openapi(), fh)
- else:
- raise ValueError("invalid")
diff --git a/services/api-gateway/src/simcore_service_api_gateway/auth.py b/services/api-gateway/src/simcore_service_api_gateway/auth.py
deleted file mode 100644
index ea8b526c4b2..00000000000
--- a/services/api-gateway/src/simcore_service_api_gateway/auth.py
+++ /dev/null
@@ -1,94 +0,0 @@
-""" This submodule includes responsibilities from authorization server
-
- +--------+ +---------------+
- | |--(A)- Authorization Request ->| Resource |
- | | | Owner | Authorization request
- | |<-(B)-- Authorization Grant ---| |
- | | +---------------+
- | |
- | | +---------------+
- | |--(C)-- Authorization Grant -->| Authorization |
- | Client | | Server | Token request
- | |<-(D)----- Access Token -------| |
- | | +---------------+
- | |
- | | +---------------+
- | |--(E)----- Access Token ------>| Resource |
- | | | Server |
- | |<-(F)--- Protected Resource ---| |
- +--------+ +---------------+
-
- Figure 1: Abstract Protocol Flow
-
-SEE
- - https://oauth.net/2/
- - https://tools.ietf.org/html/rfc6749
-"""
-# TODO: this module shall delegate the auth functionality to a separate service
-
-import logging
-from typing import Optional
-
-from fastapi import Depends, HTTPException, Security, status
-from fastapi.security import OAuth2PasswordBearer, SecurityScopes
-
-from . import crud_users as crud
-from .__version__ import api_vtag
-from .auth_security import get_access_token_data
-from .schemas import TokenData, User, UserInDB
-
-log = logging.getLogger(__name__)
-
-# callable with request as argument -> extracts token from Authentication header
-oauth2_scheme = OAuth2PasswordBearer(
- tokenUrl=f"{api_vtag}/token",
- scopes={
- "me": "Read information about the current user.",
- "projects": "Read projects.",
- "you": "Some other scope",
- },
-)
-
-
-async def get_current_user(
- security_scopes: SecurityScopes, access_token: str = Depends(oauth2_scheme)
-) -> User:
- if security_scopes.scopes:
- authenticate_value = f'Bearer scope="{security_scopes.scope_str}"'
- else:
- authenticate_value = "Bearer"
-
- credentials_exception = HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail="Could not validate credentials",
- headers={"WWW-Authenticate": authenticate_value},
- )
-
- # decodes and validates jwt-based access token
- token_data: Optional[TokenData] = get_access_token_data(access_token)
- if token_data is None:
- raise credentials_exception
-
- # identify user
- user: Optional[UserInDB] = crud.get_user(username=token_data.username)
- if user is None:
- raise credentials_exception
-
- # validate scope
- for scope in security_scopes.scopes:
- if scope not in token_data.scopes:
- raise HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail="Not enough permissions",
- headers={"WWW-Authenticate": authenticate_value},
- )
- # auto-converst into User??
- return user
-
-
-async def get_current_active_user(
- current_user: User = Security(get_current_user, scopes=["me"])
-):
- if current_user.disabled:
- raise HTTPException(status_code=400, detail="Inactive user")
- return current_user
diff --git a/services/api-gateway/src/simcore_service_api_gateway/auth_security.py b/services/api-gateway/src/simcore_service_api_gateway/auth_security.py
deleted file mode 100644
index 51fa730d366..00000000000
--- a/services/api-gateway/src/simcore_service_api_gateway/auth_security.py
+++ /dev/null
@@ -1,89 +0,0 @@
-""" Utility functions related with security
-
-"""
-import logging
-import os
-from datetime import datetime, timedelta
-from typing import Dict, List, Optional
-
-import jwt
-from jwt import PyJWTError
-from passlib.context import CryptContext
-from pydantic import ValidationError
-
-from . import crud_users as crud
-from .schemas import TokenData, UserInDB
-
-log = logging.getLogger(__name__)
-
-# PASSWORDS
-
-__pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
-
-
-def verify_password(plain_password: str, hashed_password: str) -> bool:
- return __pwd_context.verify(plain_password, hashed_password)
-
-
-def get_password_hash(password: str) -> str:
- return __pwd_context.hash(password)
-
-
-def authenticate_user(username: str, password: str) -> Optional[UserInDB]:
- user = crud.get_user(username)
- if not user:
- return None
- if not verify_password(password, user.hashed_password):
- return None
- return user
-
-
-# JSON WEB TOKENS (JWT)
-
-__SIGNING_KEY__ = os.environ.get("SECRET_KEY")
-__ALGORITHM__ = "HS256"
-ACCESS_TOKEN_EXPIRE_MINUTES = 30
-
-
-def create_access_token(
- *, subject: str, scopes: List[str] = None, expires_delta: timedelta = None
-) -> str:
- if expires_delta is None:
- expires_delta = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
-
- # The JWT specification says that there's a key sub, with the subject of the token.
- to_encode = {
- "sub": subject,
- "exp": datetime.utcnow() + expires_delta,
- "scopes": scopes or [],
- }
- encoded_jwt = jwt.encode(to_encode, __SIGNING_KEY__, algorithm=__ALGORITHM__)
- return encoded_jwt
-
-
-def decode_token(encoded_jwt: str) -> Dict:
- return jwt.decode(encoded_jwt, __SIGNING_KEY__, algorithms=[__ALGORITHM__])
-
-
-def get_access_token_data(encoded_jwt: str) -> Optional[TokenData]:
- """
- Decodes and validates JWT and returns TokenData
- Returns None, if invalid token
- """
- try:
- # decode JWT [header.payload.signature] and get payload:
- payload: Dict = decode_token(encoded_jwt)
-
- username: str = payload.get("sub")
- if username is None:
- return None
- token_scopes = payload.get("scopes", [])
-
- # validate
- token_data = TokenData(scopes=token_scopes, username=username)
-
- except (PyJWTError, ValidationError):
- # invalid token!
- log.debug("Invalid token", exc_info=True)
- return None
- return token_data
diff --git a/services/api-gateway/src/simcore_service_api_gateway/crud_users.py b/services/api-gateway/src/simcore_service_api_gateway/crud_users.py
deleted file mode 100644
index 3397b442318..00000000000
--- a/services/api-gateway/src/simcore_service_api_gateway/crud_users.py
+++ /dev/null
@@ -1,30 +0,0 @@
-"""
- API layer to access dbs and return schema-based data structures
-"""
-
-from .schemas import UserInDB
-from typing import Optional
-
-fake_users_db = {
- "pcrespov": {
- "username": "pcrespov",
- "full_name": "Pedrito",
- "email": "perico@example.com",
- "hashed_password": "$2b$12$EixZaYVK1fsbw1ZfbX3OXePaWxn96p36WQoeG6Lruj3vjPGga31lW",
- "disabled": False,
- },
- "alice": {
- "username": "alice",
- "full_name": "Alice Chains",
- "email": "alicechains@example.com",
- "hashed_password": "$2b$12$gSvqqUPvlXP2tfVFaWK1Be7DlH.PKZbv5H8KnzzVgXXbVxpva.pFm",
- "disabled": True,
- },
-}
-
-
-def get_user(username: str) -> Optional[UserInDB]:
- if username in fake_users_db:
- user_dict = fake_users_db[username]
- return UserInDB(**user_dict)
- return None
diff --git a/services/api-gateway/src/simcore_service_api_gateway/db.py b/services/api-gateway/src/simcore_service_api_gateway/db.py
deleted file mode 100644
index 1cc21e3cdfc..00000000000
--- a/services/api-gateway/src/simcore_service_api_gateway/db.py
+++ /dev/null
@@ -1,98 +0,0 @@
-""" Access to postgres service
- DUMMY!
-"""
-
-import logging
-from typing import Dict, Optional
-
-import aiopg.sa
-import sqlalchemy as sa
-from aiopg.sa import Engine
-from aiopg.sa.connection import SAConnection
-from aiopg.sa.result import ResultProxy, RowProxy
-from fastapi import FastAPI
-from tenacity import before_sleep_log, retry, stop_after_attempt, wait_fixed
-
-from .application import FastAPI, get_settings
-from .settings import AppSettings
-from .utils.fastapi_shortcuts import add_event_on_shutdown, add_event_on_startup
-
-## from .orm.base import Base
-
-log = logging.getLogger(__name__)
-
-
-def pg_retry_policy(logger: Optional[logging.Logger] = None) -> Dict:
- """ Retry policy for postgres requests upon failure """
- logger = logger or logging.getLogger(__name__)
- return dict(
- wait=wait_fixed(5),
- stop=stop_after_attempt(20),
- before_sleep=before_sleep_log(log, logging.WARNING),
- reraise=True,
- )
-
-
-async def setup_engine(app: FastAPI) -> None:
- settings = get_settings(app)
- engine = await aiopg.sa.create_engine(
- settings.postgres_dsn,
- application_name=f"{__name__}_{id(app)}", # unique identifier per app
- minsize=5,
- maxsize=10,
- )
- app.state.engine = engine
-
-
-async def teardown_engine(app: FastAPI) -> None:
- engine = app.state.engine
- engine.close()
- await engine.wait_closed()
-
-
-async def get_cnx(app: FastAPI):
- engine: Engine = app.state.engine
- async with engine.acquire() as conn:
- yield conn
-
-
-def info(app: FastAPI):
- engine = app.state.engine
- for p in "closed driver dsn freesize maxsize minsize name size timeout".split():
- print(f"{p} = {getattr(engine, p)}")
-
-
-def create_tables(settings: AppSettings):
- log.info("creating tables")
- _engine = sa.create_engine(settings.postgres_dsn)
- ## Base.metadata.create_all(bind=engine)
-
-
-# SETUP ------
-
-
-async def start_db(app: FastAPI):
- # TODO: tmp disabled
- log.debug("DUMMY: Initializing db in %s", app)
-
- @retry(**pg_retry_policy(log))
- async def _go():
- await setup_engine(app)
-
- # if False:
- # log.info("Creating db tables (testing mode)")
- # create_tables()
-
-
-def shutdown_db(app: FastAPI):
- # TODO: tmp disabled
- log.debug("DUMMY: Shutting down db in %s", app)
- # await teardown_engine(app)
-
-
-def setup_db(app: FastAPI):
- add_event_on_startup(app, start_db)
- add_event_on_shutdown(app, shutdown_db)
-
-
-__all__ = ("Engine", "ResultProxy", "RowProxy", "SAConnection")
diff --git a/services/api-gateway/src/simcore_service_api_gateway/endpoints_auth.py b/services/api-gateway/src/simcore_service_api_gateway/endpoints_auth.py
deleted file mode 100644
index eb33a1b1ae5..00000000000
--- a/services/api-gateway/src/simcore_service_api_gateway/endpoints_auth.py
+++ /dev/null
@@ -1,57 +0,0 @@
-import logging
-from io import StringIO
-from typing import Optional
-
-from fastapi import APIRouter, Depends, HTTPException
-from fastapi.security import OAuth2PasswordRequestForm
-
-from .auth_security import authenticate_user, create_access_token
-from .schemas import Token, UserInDB
-from .utils.helpers import json_dumps
-
-log = logging.getLogger(__name__)
-
-
-router = APIRouter()
-
-# NOTE: this path has to be the same as simcore_service_api_gateway.auth.oauth2_scheme
-@router.post("/token", response_model=Token)
-async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends()):
- """
- Returns an access-token provided a valid authorization grant
- """
-
- #
- # - This entrypoint is part of the Authorization Server
- # - Implements access point to obtain access-tokens
- #
- # | | +---------------+
- # | |--(C)-- Authorization Grant -->| Authorization |
- # | Client | | Server | Token request
- # | |<-(D)----- Access Token -------| |
- # | | +---------------+
- #
-
- stream = StringIO()
- print("Form Request", "-" * 20, file=stream)
- for attr in "grant_type username password scopes client_id client_secret".split():
- print("-", attr, ":", getattr(form_data, attr), file=stream)
- print("-" * 20, file=stream)
- log.debug(stream.getvalue())
-
- user: Optional[UserInDB] = authenticate_user(form_data.username, form_data.password)
- if not user:
- raise HTTPException(status_code=400, detail="Incorrect username or password")
-
- access_token = create_access_token(subject=user.username, scopes=form_data.scopes)
-
- # NOTE: this reponse is defined in Oath2
- resp_data = {"access_token": access_token, "token_type": "bearer"}
-
- stream = StringIO()
- print("{:-^30}".format("/token response"), file=stream)
- print(json_dumps(resp_data), file=stream)
- print("-" * 30, file=stream)
- log.debug(stream.getvalue())
-
- return resp_data
diff --git a/services/api-gateway/src/simcore_service_api_gateway/endpoints_check.py b/services/api-gateway/src/simcore_service_api_gateway/endpoints_check.py
deleted file mode 100644
index 406c6c1fa99..00000000000
--- a/services/api-gateway/src/simcore_service_api_gateway/endpoints_check.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from fastapi import APIRouter
-
-from .__version__ import __version__, api_version, api_vtag
-
-router = APIRouter()
-
-
-@router.get("/meta")
-async def get_service_metadata():
- return {
- "name": __name__.split(".")[0],
- "version": api_version,
- # TODO: a way to get first part of the url?? "version_prefix": f"/{api_vtag}",
- # TODO: sync this info
- "released": {api_vtag: api_version},
- }
-
-
-@router.get("/health")
-async def check_service_health():
- # TODO: if not, raise ServiceUnavailable (use diagnostic concept as in webserver)
- return
diff --git a/services/api-gateway/src/simcore_service_api_gateway/endpoints_studies.py b/services/api-gateway/src/simcore_service_api_gateway/endpoints_studies.py
deleted file mode 100644
index 33adcd50a7d..00000000000
--- a/services/api-gateway/src/simcore_service_api_gateway/endpoints_studies.py
+++ /dev/null
@@ -1,53 +0,0 @@
-from fastapi import APIRouter, Security
-
-from .auth import get_current_active_user
-from .schemas import User
-
-router = APIRouter()
-
-
-@router.get("/studies")
-async def list_studies(
- current_user: User = Security(get_current_active_user, scopes=["projects"])
-):
- return [{"project_id": "Foo", "owner": current_user.username}]
-
-
-@router.get("/studies/{study_id}")
-async def get_study(
- study_id: str,
- current_user: User = Security(get_current_active_user, scopes=["projects"]),
-):
- return [{"project_id": study_id, "owner": current_user.username}]
-
-
-@router.post("/studies")
-async def create_study(
- current_user: User = Security(get_current_active_user, scopes=["projects"])
-):
- return {"project_id": "Foo", "owner": current_user.username}
-
-
-@router.put("/studies/{study_id}")
-async def replace_study(
- study_id: str,
- current_user: User = Security(get_current_active_user, scopes=["projects"]),
-):
- return {"project_id": study_id, "owner": current_user.username}
-
-
-@router.patch("/studies/{study_id}")
-async def update_study(
- study_id: str,
- current_user: User = Security(get_current_active_user, scopes=["projects"]),
-):
- return {"project_id": study_id, "owner": current_user.username}
-
-
-@router.delete("/studies/{study_id}")
-async def delete_study(
- study_id: str,
- current_user: User = Security(get_current_active_user, scopes=["projects"]),
-):
- _data = {"project_id": study_id, "owner": current_user.username}
- return None
diff --git a/services/api-gateway/src/simcore_service_api_gateway/endpoints_user.py b/services/api-gateway/src/simcore_service_api_gateway/endpoints_user.py
deleted file mode 100644
index f1ecd24af79..00000000000
--- a/services/api-gateway/src/simcore_service_api_gateway/endpoints_user.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from fastapi import APIRouter, Depends
-
-from .auth import get_current_active_user
-from .schemas import User
-
-router = APIRouter()
-
-
-@router.get("/user", response_model=User)
-async def get_my_profile(current_user: User = Depends(get_current_active_user)):
- return current_user
-
-
-@router.patch("/user", response_model=User)
-async def update_my_profile(current_user: User = Depends(get_current_active_user)):
- return current_user
diff --git a/services/api-gateway/src/simcore_service_api_gateway/main.py b/services/api-gateway/src/simcore_service_api_gateway/main.py
deleted file mode 100644
index c2168caa9ad..00000000000
--- a/services/api-gateway/src/simcore_service_api_gateway/main.py
+++ /dev/null
@@ -1,61 +0,0 @@
-import logging
-import sys
-from pathlib import Path
-
-from fastapi import FastAPI
-
-from . import (
- application,
- endpoints_auth,
- endpoints_check,
- endpoints_studies,
- endpoints_user,
-)
-from .__version__ import api_vtag
-from .db import setup_db
-from .settings import AppSettings
-from .utils.remote_debug import setup_remote_debugging
-
-current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
-
-log = logging.getLogger(__name__)
-
-
-def build_app() -> FastAPI:
- """
- Creates a sets up app
- """
- app_settings = AppSettings()
-
- logging.root.setLevel(app_settings.loglevel)
-
- app: FastAPI = application.create(settings=app_settings)
-
- @app.on_event("startup")
- def startup_event(): # pylint: disable=unused-variable
- log.info("Application started")
- setup_remote_debugging()
-
- # ROUTES
- app.include_router(endpoints_check.router)
-
- app.include_router(endpoints_auth.router, tags=["Token"], prefix=f"/{api_vtag}")
- app.include_router(endpoints_user.router, tags=["User"], prefix=f"/{api_vtag}")
- app.include_router(
- endpoints_studies.router, tags=["Studies"], prefix=f"/{api_vtag}"
- )
-
- # SUBMODULES setups
- setup_db(app)
- # NOTE: add new here!
- # ...
-
- @app.on_event("shutdown")
- def shutdown_event(): # pylint: disable=unused-variable
- log.info("Application shutdown")
-
- return app
-
-
-# SINGLETON FastAPI app
-the_app: FastAPI = build_app()
diff --git a/services/api-gateway/src/simcore_service_api_gateway/schemas.py b/services/api-gateway/src/simcore_service_api_gateway/schemas.py
deleted file mode 100644
index f723f8ec0e0..00000000000
--- a/services/api-gateway/src/simcore_service_api_gateway/schemas.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from typing import List
-
-from pydantic import BaseModel # pylint: disable=no-name-in-module
-
-
-class Token(BaseModel):
- access_token: str
- token_type: str
-
-
-class TokenData(BaseModel):
- username: str = None
- scopes: List[str] = []
-
-
-class User(BaseModel):
- username: str
- email: str = None
- full_name: str = None
-
-
-class UserInDB(User):
- hashed_password: str
- disabled: bool = None
diff --git a/services/api-gateway/src/simcore_service_api_gateway/settings.py b/services/api-gateway/src/simcore_service_api_gateway/settings.py
deleted file mode 100644
index 0890b750fa3..00000000000
--- a/services/api-gateway/src/simcore_service_api_gateway/settings.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# pylint: disable=no-name-in-module
-
-# NOTE: SEE https://pydantic-docs.helpmanual.io/usage/settings/ for usage
-
-from pydantic import BaseSettings, Field, SecretStr, validator
-from enum import Enum
-from typing import Optional
-from yarl import URL
-import logging
-
-
-class BootModeEnum(str, Enum):
- production = "production"
- development = "development"
-
-
-class AppSettings(BaseSettings):
- # pylint: disable=no-self-use
- # pylint: disable=no-self-argument
-
- # DOCKER
- boot_mode: Optional[BootModeEnum] = Field(None, env="SC_BOOT_MODE")
-
- # LOGGING
- log_level_name: str = Field("DEBUG", env="loglevel")
-
- @validator("log_level_name")
- def match_logging_level(cls, value) -> str:
- try:
- getattr(logging, value.upper())
- except AttributeError:
- raise ValueError(f"{value.upper()} is not a valid level")
- return value.upper()
-
- @property
- def loglevel(self) -> int:
- return getattr(logging, self.log_level_name)
-
- # POSTGRES
- postgres_user: str
- postgres_password: SecretStr
- postgres_db: str
- postgres_host: str = "localhost"
- postgres_port: int = 5432
-
- @property
- def postgres_dsn(self) -> URL:
- return URL.build(
- scheme="postgresql",
- user=self.postgres_user,
- password=self.postgres_password.get_secret_value(),
- host=self.postgres_host,
- port=self.postgres_port,
- path=f"/{self.postgres_db}",
- )
-
- # WEBSERVER
- webserver_host: str = "webserver"
- webserver_port: int = 8080
-
- # SERVICE SERVER (see : https://www.uvicorn.org/settings/)
- host: str = "localhost" # "0.0.0.0" if is_containerized else "127.0.0.1",
- port: int = 8000
-
- debug: bool = False # If True, debug tracebacks should be returned on errors.
-
- class Config:
- env_prefix = ""
- case_sensitive = False
diff --git a/services/api-gateway/src/simcore_service_api_gateway/utils/fastapi_shortcuts.py b/services/api-gateway/src/simcore_service_api_gateway/utils/fastapi_shortcuts.py
deleted file mode 100644
index 450d979745e..00000000000
--- a/services/api-gateway/src/simcore_service_api_gateway/utils/fastapi_shortcuts.py
+++ /dev/null
@@ -1,32 +0,0 @@
-""" Thin wrappers around fastapi interface for convenience
-
- When to add here a function? These are the goals:
- - overcome common mistakes
- - shortcuts to code faster
- - replicates rationale in aiohttp
-
- And these are the non-goals:
- - replace FastAPI interface
-
-"""
-import asyncio
-from functools import partial
-from typing import Callable
-
-from fastapi import FastAPI
-
-
-def _wrap_partial(func: Callable, app: FastAPI) -> Callable:
- if asyncio.iscoroutinefunction(func):
- return asyncio.coroutine(partial(func, app))
- return partial(func, app)
-
-
-def add_event_on_startup(app: FastAPI, func: Callable) -> None:
- callback = _wrap_partial(func, app)
- app.router.add_event_handler("startup", callback)
-
-
-def add_event_on_shutdown(app: FastAPI, func: Callable) -> None:
- callback = _wrap_partial(func, app)
- app.router.add_event_handler("shutdown", callback)
diff --git a/services/api-gateway/tests/unit/conftest.py b/services/api-gateway/tests/unit/conftest.py
deleted file mode 100644
index 60811f0446c..00000000000
--- a/services/api-gateway/tests/unit/conftest.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# pylint:disable=unused-variable
-# pylint:disable=unused-argument
-# pylint:disable=redefined-outer-name
-
-import sys
-from pathlib import Path
-
-import pytest
-
-import simcore_service_api_gateway
-
-
-current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
-
-
-@pytest.fixture(scope="session")
-def project_slug_dir():
- folder = current_dir.parent.parent
- assert folder.exists()
- assert any(folder.glob("src/simcore_service_api_gateway"))
- return folder
-
-
-@pytest.fixture(scope="session")
-def package_dir():
- dirpath = Path(simcore_service_api_gateway.__file__).resolve().parent
- assert dirpath.exists()
- return dirpath
-
-
-@pytest.fixture(scope="session")
-def osparc_simcore_root_dir(project_slug_dir):
- root_dir = project_slug_dir.parent.parent
- assert (
- root_dir and root_dir.exists()
- ), "Did you renamed or moved the integration folder under api-gateway??"
- assert any(root_dir.glob("services/api-gateway")), (
- "%s not look like rootdir" % root_dir
- )
- return root_dir
diff --git a/services/api-gateway/tests/unit/test_auth_security.py b/services/api-gateway/tests/unit/test_auth_security.py
deleted file mode 100644
index 8a3fca7314e..00000000000
--- a/services/api-gateway/tests/unit/test_auth_security.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from simcore_service_api_gateway.auth_security import verify_password, get_password_hash
-
-
-def test_has_password():
- hashed_pass = get_password_hash("secret")
- assert hashed_pass != "secret"
- assert verify_password("secret", hashed_pass)
diff --git a/services/api-gateway/tests/unit/test_endpoints_check.py b/services/api-gateway/tests/unit/test_endpoints_check.py
deleted file mode 100644
index afff4432ff3..00000000000
--- a/services/api-gateway/tests/unit/test_endpoints_check.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# pylint: disable=unused-variable
-# pylint: disable=unused-argument
-# pylint: disable=redefined-outer-name
-
-import pytest
-from starlette.testclient import TestClient
-
-from simcore_service_api_gateway import application, endpoints_check
-from simcore_service_api_gateway.__version__ import api_version
-from simcore_service_api_gateway.settings import AppSettings
-
-
-@pytest.fixture
-def client(monkeypatch) -> TestClient:
- monkeypatch.setenv("POSTGRES_USER", "test")
- monkeypatch.setenv("POSTGRES_PASSWORD", "test")
- monkeypatch.setenv("POSTGRES_DB", "test")
- monkeypatch.setenv("LOGLEVEL", "debug")
- monkeypatch.setenv("SC_BOOT_MODE", "production")
-
- # app
- test_settings = AppSettings()
- app = application.create(settings=test_settings)
-
- # routes
- app.include_router(endpoints_check.router, tags=["check"])
-
- # test client:
- # Context manager to trigger events: https://fastapi.tiangolo.com/advanced/testing-events/
- with TestClient(app) as cli:
- yield cli
-
-
-def test_read_service_meta(client: TestClient):
- response = client.get("/meta")
- assert response.status_code == 200
- assert response.json()["version"] == api_version
diff --git a/services/api-gateway/tests/unit/test_settings.py b/services/api-gateway/tests/unit/test_settings.py
deleted file mode 100644
index 85d0385238a..00000000000
--- a/services/api-gateway/tests/unit/test_settings.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from simcore_service_api_gateway.settings import AppSettings, BootModeEnum, URL
-
-# import pytest
-import logging
-from pprint import pprint
-
-
-def test_app_settings(monkeypatch):
- monkeypatch.setenv("POSTGRES_USER", "test")
- monkeypatch.setenv("POSTGRES_PASSWORD", "test")
- monkeypatch.setenv("POSTGRES_DB", "test")
- monkeypatch.setenv("LOGLEVEL", "debug")
- monkeypatch.setenv("SC_BOOT_MODE", "production")
-
- settings = AppSettings()
-
- pprint(settings.dict())
- assert settings.boot_mode == BootModeEnum.production
- assert settings.postgres_dsn == URL("postgresql://test:test@localhost:5432/test")
- assert settings.loglevel == logging.DEBUG
diff --git a/services/api-server/.env-devel b/services/api-server/.env-devel
new file mode 100644
index 00000000000..7b46d0f94f3
--- /dev/null
+++ b/services/api-server/.env-devel
@@ -0,0 +1,24 @@
+#
+# Environment variables used to configure this service
+#
+
+# SEE services/api-server/src/simcore_service_api_server/auth_security.py
+SECRET_KEY=d0d0397de2c85ad26ffd4a0f9643dfe3a0ca3937f99cf3c2e174e11b5ef79880
+
+# SEE services/api-server/src/simcore_service_api_server/settings.py
+LOG_LEVEL=DEBUG
+
+POSTGRES_USER=test
+POSTGRES_PASSWORD=test
+POSTGRES_DB=test
+POSTGRES_HOST=localhost
+
+# Enables debug
+SC_BOOT_MODE=debug-ptvsd
+
+
+# webserver
+WEBSERVER_ENABLED=1
+WEBSERVER_HOST=webserver
+# Take from general .env-devel
+WEBSERVER_SESSION_SECRET_KEY=REPLACE ME with a key of at least length 32.
diff --git a/services/api-server/.gitignore b/services/api-server/.gitignore
new file mode 100644
index 00000000000..3168e0a15ca
--- /dev/null
+++ b/services/api-server/.gitignore
@@ -0,0 +1,4 @@
+# outputs from makefile
+client
+docker-compose.yml
+.env
diff --git a/services/api-gateway/Dockerfile b/services/api-server/Dockerfile
similarity index 67%
rename from services/api-gateway/Dockerfile
rename to services/api-server/Dockerfile
index 42bd8b20907..172ae927177 100644
--- a/services/api-gateway/Dockerfile
+++ b/services/api-server/Dockerfile
@@ -2,14 +2,21 @@ ARG PYTHON_VERSION="3.6.10"
FROM python:${PYTHON_VERSION}-slim as base
#
# USAGE:
-# cd sercices/api-gateway
-# docker build -f Dockerfile -t api-gateway:prod --target production ../../
-# docker run api-gateway:prod
+# cd sercices/api-server
+# docker build -f Dockerfile -t api-server:prod --target production ../../
+# docker run api-server:prod
#
# REQUIRED: context expected at ``osparc-simcore/`` folder because we need access to osparc-simcore/packages
LABEL maintainer=pcrespov
+RUN set -eux; \
+ apt-get update; \
+ apt-get install -y gosu; \
+ rm -rf /var/lib/apt/lists/*; \
+# verify that the binary works
+ gosu nobody true
+
# simcore-user uid=8004(scu) gid=8004(scu) groups=8004(scu)
ENV SC_USER_ID=8004 \
SC_USER_NAME=scu \
@@ -36,7 +43,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
# those from our virtualenv.
ENV PATH="${VIRTUAL_ENV}/bin:$PATH"
-EXPOSE 8000
+EXPOSE 8001
EXPOSE 3000
# -------------------------- Build stage -------------------
@@ -48,11 +55,12 @@ FROM base as build
ENV SC_BUILD_TARGET=build
-RUN apt-get update
-RUN apt-get install -y --no-install-recommends \
- build-essential \
- gcc
+RUN apt-get update &&\
+ apt-get install -y --no-install-recommends \
+ build-essential
+# NOTE: python virtualenv is used here such that installed
+# packages may be moved to production image easily by copying the venv
RUN python -m venv ${VIRTUAL_ENV}
RUN pip install --upgrade --no-cache-dir \
@@ -64,7 +72,7 @@ WORKDIR /build
# install base 3rd party dependencies
# NOTE: copies to /build to avoid overwriting later which would invalidate this layer
-COPY --chown=scu:scu services/api-gateway/requirements/_base.txt .
+COPY --chown=scu:scu services/api-server/requirements/_base.txt .
RUN pip --no-cache-dir install -r _base.txt
@@ -72,16 +80,16 @@ RUN pip --no-cache-dir install -r _base.txt
# CI in master buils & pushes this target to speed-up image build
#
# + /build
-# + services/api-gateway [scu:scu] WORKDIR
+# + services/api-server [scu:scu] WORKDIR
#
FROM build as cache
ENV SC_BUILD_TARGET cache
COPY --chown=scu:scu packages /build/packages
-COPY --chown=scu:scu services/api-gateway /build/services/api-gateway
+COPY --chown=scu:scu services/api-server /build/services/api-server
-WORKDIR /build/services/api-gateway
+WORKDIR /build/services/api-server
RUN pip --no-cache-dir install -r requirements/prod.txt &&\
pip --no-cache-dir list -v
@@ -92,7 +100,7 @@ RUN pip --no-cache-dir install -r requirements/prod.txt &&\
# Runs as scu (non-root user)
#
# + /home/scu $HOME = WORKDIR
-# + services/api-gateway [scu:scu]
+# + services/api-server [scu:scu]
#
FROM base as production
@@ -104,17 +112,17 @@ ENV PYTHONOPTIMIZE=TRUE
WORKDIR /home/scu
COPY --chown=scu:scu --from=cache ${VIRTUAL_ENV} ${VIRTUAL_ENV}
-COPY --chown=scu:scu services/api-gateway/docker services/api-gateway/docker
-
+COPY --chown=scu:scu services/api-server/docker services/api-server/docker
+RUN chmod +x services/api-server/docker/*.sh
HEALTHCHECK --interval=30s \
--timeout=20s \
--start-period=30s \
--retries=3 \
- CMD ["python3", "services/api-gateway/docker/healthcheck.py", "http://localhost:8000/"]
+ CMD ["python3", "services/api-server/docker/healthcheck.py", "http://localhost:8000/"]
-ENTRYPOINT [ "/bin/sh", "services/api-gateway/docker/entrypoint.sh" ]
-CMD ["/bin/sh", "services/api-gateway/docker/boot.sh"]
+ENTRYPOINT [ "/bin/sh", "services/api-server/docker/entrypoint.sh" ]
+CMD ["/bin/sh", "services/api-server/docker/boot.sh"]
# --------------------------Development stage -------------------
@@ -131,5 +139,7 @@ ENV SC_BUILD_TARGET=development
WORKDIR /devel
-ENTRYPOINT ["/bin/sh", "services/api-gateway/docker/entrypoint.sh"]
-CMD ["/bin/sh", "services/api-gateway/docker/boot.sh"]
+RUN chown -R scu:scu ${VIRTUAL_ENV}
+
+ENTRYPOINT ["/bin/sh", "services/api-server/docker/entrypoint.sh"]
+CMD ["/bin/sh", "services/api-server/docker/boot.sh"]
diff --git a/services/api-server/Makefile b/services/api-server/Makefile
new file mode 100644
index 00000000000..f14a4727c80
--- /dev/null
+++ b/services/api-server/Makefile
@@ -0,0 +1,141 @@
+#
+# Targets for DEVELOPMENT of Public API Server
+#
+include ../../scripts/common.Makefile
+
+# Custom variables
+APP_NAME := $(notdir $(CURDIR))
+APP_CLI_NAME := simcore-service-$(APP_NAME)
+export APP_VERSION = $(shell cat VERSION)
+SRC_DIR := $(abspath $(CURDIR)/src/$(subst -,_,$(APP_CLI_NAME)))
+
+.PHONY: reqs
+reqs: ## compiles pip requirements (.in -> .txt)
+ @$(MAKE_C) requirements reqs
+
+
+.PHONY: install-dev install-prod install-ci
+install-dev install-prod install-ci: _check_venv_active ## install app in development/production or CI mode
+ # installing in $(subst install-,,$@) mode
+ pip-sync requirements/$(subst install-,,$@).txt
+
+
+PHONY: tests-unit tests-integration tests
+tests: tests-unit tests-integration
+
+tests-unit: ## runs unit tests
+ # running unit tests
+ @pytest -vv --exitfirst --failed-first --durations=10 --pdb $(CURDIR)/tests/unit
+
+tests-integration: ## runs integration tests against local+production images
+ # running integration tests local/(service):production images ...
+ @export DOCKER_REGISTRY=local; \
+ export DOCKER_IMAGE_TAG=production; \
+ pytest -vv --exitfirst --failed-first --durations=10 --pdb $(CURDIR)/tests/integration
+
+
+# DEVELOPMENT TOOLS ########
+
+.env:
+ cp .env-devel $@
+
+docker-compose.yml:
+ cp $(CURDIR)/tests/utils/docker-compose.yml $@
+
+.PHONY: run-devel down
+run-devel: .env docker-compose.yml down ## runs app on host with pg fixture for development [for development]
+ # Starting db (under $<)
+ docker-compose up --detach
+ # start app (under $<)
+ uvicorn simcore_service_api_server.__main__:the_app \
+ --reload --reload-dir $(SRC_DIR) \
+ --port=8000 --host=0.0.0.0
+
+.PHONY: db-tables
+db-tables: .env-devel ## upgrades and create tables [for development]
+ # Upgrading and creating tables
+ export $(shell grep -v '^#' $< | xargs -d '\n'); \
+ python3 tests/utils/init-pg.py
+
+.PHONY: db-migration
+db-migration: .env-devel ## runs discover and upgrade on running pg-db [for development]
+ # Creating tables
+ export $(shell grep -v '^#' $< | xargs -d '\n'); \
+ sc-pg discover && sc-pg upgrade
+
+down: docker-compose.yml ## stops pg fixture
+ # stopping extra services
+ -@docker-compose -f $< down
+ # killing any process using port 8000
+ -@fuser --kill --verbose --namespace tcp 8000
+
+######################
+
+
+.PHONY: build
+build: ## builds docker image (using main services/docker-compose-build.yml)
+ @$(MAKE_C) ${REPO_BASE_DIR} target=${APP_NAME} $@
+
+
+# GENERATION python client -------------------------------------------------
+.PHONY: python-client generator-help
+# SEE https://openapi-generator.tech/docs/usage#generate
+# SEE https://openapi-generator.tech/docs/generators/python
+
+# NOTE: assumes this repo exists
+GIT_USER_ID := ITISFoundation
+GIT_REPO_ID := osparc-simcore-python-client
+
+SCRIPTS_DIR := $(abspath $(CURDIR)/../../scripts)
+GENERATOR_NAME := python
+
+# TODO: put instead to additional-props.yaml and --config=openapi-generator/python-config.yaml
+ADDITIONAL_PROPS := \
+ generateSourceCodeOnly=false\
+ hideGenerationTimestamp=true\
+ library=urllib3\
+ packageName=osparc\
+ packageUrl=https://github.com/$(GIT_USER_ID)/${GIT_REPO_ID}.git\
+ packageVersion=$(APP_VERSION)\
+ projectName=osparc-simcore-python-api
+ADDITIONAL_PROPS := $(foreach prop,$(ADDITIONAL_PROPS),$(strip $(prop)))
+
+null :=
+space := $(null) #
+comma := ,
+
+# TODO: fix this, shall be generated upon start when flag is provided
+
+
+
+# TODO: code_samples still added by hand!
+client:
+ # cloning $(GIT_USER_ID)/$(GIT_REPO_ID) -> $@
+ git clone git@github.com:$(GIT_USER_ID)/$(GIT_REPO_ID).git $@
+ cd client; git checkout -b "upgrade-${APP_VERSION}"
+
+
+python-client: client ## runs python client generator
+ # download openapi.json
+ curl -O http://localhost:8000/api/v0/openapi.json
+
+ cd $(CURDIR); \
+ $(SCRIPTS_DIR)/openapi-generator-cli.bash generate \
+ --generator-name=$(GENERATOR_NAME) \
+ --git-user-id=$(GIT_USER_ID)\
+ --git-repo-id=$(GIT_REPO_ID)\
+ --http-user-agent="osparc-api/{packageVersion}/{language}"\
+ --input-spec=/local/openapi.json \
+ --output=/local/client \
+ --additional-properties=$(subst $(space),$(comma),$(strip $(ADDITIONAL_PROPS)))\
+ --package-name=osparc\
+ --release-note="Updated to $(APP_VERSION)"
+
+
+
+
+generator-help: ## help on client-api generator
+ # generate help
+ @$(SCRIPTS_DIR)/openapi-generator-cli.bash help generate
+ # generator config help
+ @$(SCRIPTS_DIR)/openapi-generator-cli.bash config-help -g $(GENERATOR_NAME)
diff --git a/services/api-gateway/README.md b/services/api-server/README.md
similarity index 53%
rename from services/api-gateway/README.md
rename to services/api-server/README.md
index 0f60933bad4..4386afbe3a3 100644
--- a/services/api-gateway/README.md
+++ b/services/api-server/README.md
@@ -1,18 +1,18 @@
-# api-gateway
+# api-server
-[![image-size]](https://microbadger.com/images/itisfoundation/api-gateway. "More on itisfoundation/api-gateway.:staging-latest image")
+[![image-size]](https://microbadger.com/images/itisfoundation/api-server. "More on itisfoundation/api-server.:staging-latest image")
-[![image-badge]](https://microbadger.com/images/itisfoundation/api-gateway "More on Public API Gateway image in registry")
-[![image-version]](https://microbadger.com/images/itisfoundation/api-gateway "More on Public API Gateway image in registry")
-[![image-commit]](https://microbadger.com/images/itisfoundation/api-gateway "More on Public API Gateway image in registry")
+[![image-badge]](https://microbadger.com/images/itisfoundation/api-server "More on Public API Server image in registry")
+[![image-version]](https://microbadger.com/images/itisfoundation/api-server "More on Public API Server image in registry")
+[![image-commit]](https://microbadger.com/images/itisfoundation/api-server "More on Public API Server image in registry")
-Platform's API Gateway for external clients
+Platform's public API server
-[image-size]:https://img.shields.io/microbadger/image-size/itisfoundation/api-gateway./staging-latest.svg?label=api-gateway.&style=flat
-[image-badge]:https://images.microbadger.com/badges/image/itisfoundation/api-gateway.svg
-[image-version]https://images.microbadger.com/badges/version/itisfoundation/api-gateway.svg
-[image-commit]:https://images.microbadger.com/badges/commit/itisfoundation/api-gateway.svg
+[image-size]:https://img.shields.io/microbadger/image-size/itisfoundation/api-server./staging-latest.svg?label=api-server.&style=flat
+[image-badge]:https://images.microbadger.com/badges/image/itisfoundation/api-server.svg
+[image-version]https://images.microbadger.com/badges/version/itisfoundation/api-server.svg
+[image-commit]:https://images.microbadger.com/badges/commit/itisfoundation/api-server.svg
@@ -21,3 +21,8 @@ Platform's API Gateway for external clients
- [Design patterns for modern web APIs](https://blog.feathersjs.com/design-patterns-for-modern-web-apis-1f046635215) by D. Luecke
- [API Design Guide](https://cloud.google.com/apis/design/) by Google Cloud
+
+
+## Acknoledgments
+
+ Many of the ideas in this design were taken from the **excellent** work at https://github.com/nsidnev/fastapi-realworld-example-app by *Nik Sidnev* using the **extraordinary** [fastapi](https://fastapi.tiangolo.com/) package by *Sebastian Ramirez*.
diff --git a/services/api-server/VERSION b/services/api-server/VERSION
new file mode 100644
index 00000000000..9325c3ccda9
--- /dev/null
+++ b/services/api-server/VERSION
@@ -0,0 +1 @@
+0.3.0
\ No newline at end of file
diff --git a/services/api-gateway/docker/boot.sh b/services/api-server/docker/boot.sh
similarity index 61%
rename from services/api-gateway/docker/boot.sh
rename to services/api-server/docker/boot.sh
index cd5cf8506e3..5045365b640 100755
--- a/services/api-gateway/docker/boot.sh
+++ b/services/api-server/docker/boot.sh
@@ -8,16 +8,19 @@ INFO="INFO: [$(basename "$0")] "
# BOOTING application ---------------------------------------------
echo "$INFO" "Booting in ${SC_BOOT_MODE} mode ..."
-echo " User :$(id "$(whoami)")"
-echo " Workdir :$(pwd)"
+echo "$INFO" "User :$(id "$(whoami)")"
+echo "$INFO" "Workdir : $(pwd)"
-if [ "${SC_BUILD_TARGET}" = "development" ]
-then
+if [ "${SC_BUILD_TARGET}" = "development" ]; then
echo "$INFO" "Environment :"
- printenv | sed 's/=/: /' | sed 's/^/ /' | sort
+ printenv | sed 's/=/: /' | sed 's/^/ /' | sort
echo "$INFO" "Python :"
python --version | sed 's/^/ /'
command -v python | sed 's/^/ /'
+
+ cd services/api-server || exit 1
+ pip --quiet --no-cache-dir install -r requirements/dev.txt
+ cd - || exit 1
echo "$INFO" "PIP :"
pip list | sed 's/^/ /'
fi
@@ -27,7 +30,7 @@ if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ]
then
# NOTE: ptvsd is programmatically enabled inside of the service
# this way we can have reload in place as well
- exec uvicorn simcore_service_api_gateway.main:the_app --reload --host 0.0.0.0
+ exec uvicorn simcore_service_api_server.__main__:the_app --reload --host 0.0.0.0
else
- exec simcore-service-api-gateway
+ exec simcore-service-api-server
fi
diff --git a/services/api-server/docker/entrypoint.sh b/services/api-server/docker/entrypoint.sh
new file mode 100755
index 00000000000..c1bfc513856
--- /dev/null
+++ b/services/api-server/docker/entrypoint.sh
@@ -0,0 +1,76 @@
+#!/bin/sh
+set -o errexit
+set -o nounset
+
+IFS=$(printf '\n\t')
+
+INFO="INFO: [$(basename "$0")] "
+WARNING="WARNING: [$(basename "$0")] "
+ERROR="ERROR: [$(basename "$0")] "
+
+# This entrypoint script:
+#
+# - Executes *inside* of the container upon start as --user [default root]
+# - Notice that the container *starts* as --user [default root] but
+# *runs* as non-root user [scu]
+#
+echo "$INFO" "Entrypoint for stage ${SC_BUILD_TARGET} ..."
+echo "$INFO" "User :$(id "$(whoami)")"
+echo "$INFO" "Workdir : $(pwd)"
+echo "$INFO" "User : $(id scu)"
+echo "$INFO" "python : $(command -v python)"
+echo "$INFO" "pip : $(command -v pip)"
+
+USERNAME=scu
+GROUPNAME=scu
+
+if [ "${SC_BUILD_TARGET}" = "development" ]; then
+ echo "$INFO" "development mode detected..."
+ # NOTE: expects docker run ... -v $(pwd):$DEVEL_MOUNT
+ DEVEL_MOUNT=/devel/services/api-server
+
+ stat $DEVEL_MOUNT >/dev/null 2>&1 ||
+ (echo "$ERROR" "You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1)
+
+ echo "$INFO" "setting correct user id/group id..."
+ HOST_USERID=$(stat --format=%u "${DEVEL_MOUNT}")
+ HOST_GROUPID=$(stat --format=%g "${DEVEL_MOUNT}")
+ CONT_GROUPNAME=$(getent group "${HOST_GROUPID}" | cut --delimiter=: --fields=1)
+ if [ "$HOST_USERID" -eq 0 ]; then
+ echo "$WARNING" "Folder mounted owned by root user... adding $SC_USER_NAME to root..."
+ adduser "$SC_USER_NAME" root
+ else
+ echo "$INFO" "Folder mounted owned by user $HOST_USERID:$HOST_GROUPID-'$CONT_GROUPNAME'..."
+ # take host's credentials in $SC_USER_NAME
+ if [ -z "$CONT_GROUPNAME" ]; then
+ echo "$WARNING" "Creating new group grp$SC_USER_NAME"
+ CONT_GROUPNAME=grp$SC_USER_NAME
+ addgroup --gid "$HOST_GROUPID" "$CONT_GROUPNAME"
+ else
+ echo "$INFO" "group already exists"
+ fi
+ echo "$INFO" "Adding $SC_USER_NAME to group $CONT_GROUPNAME..."
+ adduser "$SC_USER_NAME" "$CONT_GROUPNAME"
+
+ echo "$WARNING" "Changing ownership [this could take some time]"
+ echo "$INFO" "Changing $SC_USER_NAME:$SC_USER_NAME ($SC_USER_ID:$SC_USER_ID) to $SC_USER_NAME:$CONT_GROUPNAME ($HOST_USERID:$HOST_GROUPID)"
+ usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME"
+
+ echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME"
+ find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \;
+ # change user property of files already around
+ echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME"
+ find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \;
+ fi
+fi
+
+if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ]; then
+ # NOTE: production does NOT pre-installs ptvsd
+ pip install --no-cache-dir ptvsd
+fi
+
+echo "$INFO Starting $* ..."
+echo " $SC_USER_NAME rights : $(id "$SC_USER_NAME")"
+echo " local dir : $(ls -al)"
+
+exec gosu "$SC_USER_NAME" "$@"
diff --git a/services/api-gateway/docker/healthcheck.py b/services/api-server/docker/healthcheck.py
similarity index 99%
rename from services/api-gateway/docker/healthcheck.py
rename to services/api-server/docker/healthcheck.py
index 93c59bb7826..23a3ba3ec11 100644
--- a/services/api-gateway/docker/healthcheck.py
+++ b/services/api-server/docker/healthcheck.py
@@ -18,7 +18,6 @@
import os
import sys
-
from urllib.request import urlopen
SUCCESS, UNHEALTHY = 0, 1
diff --git a/services/api-server/openapi.json b/services/api-server/openapi.json
new file mode 100644
index 00000000000..cc38013fa9e
--- /dev/null
+++ b/services/api-server/openapi.json
@@ -0,0 +1,296 @@
+{
+ "openapi": "3.0.2",
+ "info": {
+ "title": "Public API Server",
+ "description": "**osparc-simcore Public RESTful API Specifications**\n## Python Client\n- Github [repo](https://github.com/ITISFoundation/osparc-simcore-python-client)\n- Quick install: ``pip install git+https://github.com/ITISFoundation/osparc-simcore-python-client.git``\n",
+ "version": "0.3.0",
+ "x-logo": {
+ "url": "https://raw.githubusercontent.com/ITISFoundation/osparc-manual/b809d93619512eb60c827b7e769c6145758378d0/_media/osparc-logo.svg",
+ "altText": "osparc-simcore logo"
+ }
+ },
+ "paths": {
+ "/v0/meta": {
+ "get": {
+ "tags": [
+ "meta"
+ ],
+ "summary": "Get Service Metadata",
+ "operationId": "get_service_metadata",
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Meta"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "/v0/me": {
+ "get": {
+ "tags": [
+ "users"
+ ],
+ "summary": "Get My Profile",
+ "operationId": "get_my_profile",
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Profile"
+ }
+ }
+ }
+ }
+ },
+ "security": [
+ {
+ "HTTPBasic": []
+ }
+ ]
+ },
+ "put": {
+ "tags": [
+ "users"
+ ],
+ "summary": "Update My Profile",
+ "operationId": "update_my_profile",
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ProfileUpdate"
+ }
+ }
+ },
+ "required": true
+ },
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Profile"
+ }
+ }
+ }
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ }
+ }
+ },
+ "security": [
+ {
+ "HTTPBasic": []
+ }
+ ]
+ }
+ }
+ },
+ "components": {
+ "schemas": {
+ "Groups": {
+ "title": "Groups",
+ "required": [
+ "me",
+ "all"
+ ],
+ "type": "object",
+ "properties": {
+ "me": {
+ "$ref": "#/components/schemas/UsersGroup"
+ },
+ "organizations": {
+ "title": "Organizations",
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/UsersGroup"
+ },
+ "default": []
+ },
+ "all": {
+ "$ref": "#/components/schemas/UsersGroup"
+ }
+ }
+ },
+ "HTTPValidationError": {
+ "title": "HTTPValidationError",
+ "type": "object",
+ "properties": {
+ "errors": {
+ "title": "Errors",
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/ValidationError"
+ }
+ }
+ }
+ },
+ "Meta": {
+ "title": "Meta",
+ "required": [
+ "name",
+ "version"
+ ],
+ "type": "object",
+ "properties": {
+ "name": {
+ "title": "Name",
+ "type": "string"
+ },
+ "version": {
+ "title": "Version",
+ "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$",
+ "type": "string"
+ },
+ "released": {
+ "title": "Released",
+ "type": "object",
+ "additionalProperties": {
+ "type": "string",
+ "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$"
+ },
+ "description": "Maps every route's path tag with a released version"
+ }
+ },
+ "example": {
+ "name": "simcore_service_foo",
+ "version": "2.4.45",
+ "released": {
+ "v1": "1.3.4",
+ "v2": "2.4.45"
+ }
+ }
+ },
+ "Profile": {
+ "title": "Profile",
+ "required": [
+ "login",
+ "role"
+ ],
+ "type": "object",
+ "properties": {
+ "first_name": {
+ "title": "First Name",
+ "type": "string",
+ "example": "James"
+ },
+ "last_name": {
+ "title": "Last Name",
+ "type": "string",
+ "example": "Maxwell"
+ },
+ "login": {
+ "title": "Login",
+ "type": "string",
+ "format": "email"
+ },
+ "role": {
+ "title": "Role",
+ "enum": [
+ "ANONYMOUS",
+ "GUEST",
+ "USER",
+ "TESTER"
+ ],
+ "type": "string"
+ },
+ "groups": {
+ "$ref": "#/components/schemas/Groups"
+ },
+ "gravatar_id": {
+ "title": "Gravatar Id",
+ "maxLength": 40,
+ "type": "string",
+ "description": "Hash value of email to retrieve an avatar image from https://www.gravatar.com"
+ }
+ }
+ },
+ "ProfileUpdate": {
+ "title": "ProfileUpdate",
+ "type": "object",
+ "properties": {
+ "first_name": {
+ "title": "First Name",
+ "type": "string",
+ "example": "James"
+ },
+ "last_name": {
+ "title": "Last Name",
+ "type": "string",
+ "example": "Maxwell"
+ }
+ }
+ },
+ "UsersGroup": {
+ "title": "UsersGroup",
+ "required": [
+ "gid",
+ "label"
+ ],
+ "type": "object",
+ "properties": {
+ "gid": {
+ "title": "Gid",
+ "type": "string"
+ },
+ "label": {
+ "title": "Label",
+ "type": "string"
+ },
+ "description": {
+ "title": "Description",
+ "type": "string"
+ }
+ }
+ },
+ "ValidationError": {
+ "title": "ValidationError",
+ "required": [
+ "loc",
+ "msg",
+ "type"
+ ],
+ "type": "object",
+ "properties": {
+ "loc": {
+ "title": "Location",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "msg": {
+ "title": "Message",
+ "type": "string"
+ },
+ "type": {
+ "title": "Error Type",
+ "type": "string"
+ }
+ }
+ }
+ },
+ "securitySchemes": {
+ "HTTPBasic": {
+ "type": "http",
+ "scheme": "basic"
+ }
+ }
+ }
+}
diff --git a/services/api-gateway/requirements/Makefile b/services/api-server/requirements/Makefile
similarity index 100%
rename from services/api-gateway/requirements/Makefile
rename to services/api-server/requirements/Makefile
diff --git a/services/api-server/requirements/_base.in b/services/api-server/requirements/_base.in
new file mode 100644
index 00000000000..89ed918052c
--- /dev/null
+++ b/services/api-server/requirements/_base.in
@@ -0,0 +1,22 @@
+#
+# Specifies third-party dependencies for 'services/api-server/src'
+#
+# NOTE: ALL version constraints MUST be commented
+
+-r ../../../packages/postgres-database/requirements/_base.in
+
+fastapi[all]
+aiopg[sa]
+tenacity
+passlib[bcrypt]
+loguru
+pydantic[dotenv]
+cryptography
+httpx
+
+# TODO: check alternative https://github.com/latchset/jwcrypto/
+pyjwt>=1.7.1 # Vulnerable SEE https://auth0.com/blog/critical-vulnerabilities-in-json-web-token-libraries/?_ga=2.21160507.1609921856.1592236287-1918774871.1591379535
+
+
+async-exit-stack # not needed when python>=3.7
+async-generator # not needed when python>=3.7
diff --git a/services/api-server/requirements/_base.txt b/services/api-server/requirements/_base.txt
new file mode 100644
index 00000000000..8bed8f166a8
--- /dev/null
+++ b/services/api-server/requirements/_base.txt
@@ -0,0 +1,65 @@
+#
+# This file is autogenerated by pip-compile
+# To update, run:
+#
+# pip-compile --output-file=requirements/_base.txt requirements/_base.in
+#
+aiocontextvars==0.2.2 # via loguru
+aiofiles==0.5.0 # via fastapi
+aiopg[sa]==1.0.0 # via -r requirements/_base.in
+aniso8601==7.0.0 # via graphene
+async-exit-stack==1.0.1 # via -r requirements/_base.in, fastapi
+async-generator==1.10 # via -r requirements/_base.in, fastapi
+bcrypt==3.1.7 # via passlib
+certifi==2020.4.5.2 # via httpx, requests
+cffi==1.14.0 # via bcrypt, cryptography
+chardet==3.0.4 # via httpx, requests
+click==7.1.2 # via uvicorn
+contextvars==2.4 # via aiocontextvars, sniffio
+cryptography==2.9.2 # via -r requirements/_base.in
+dataclasses==0.7 # via pydantic
+dnspython==1.16.0 # via email-validator
+email-validator==1.1.1 # via fastapi
+fastapi[all]==0.57.0 # via -r requirements/_base.in
+graphene==2.1.8 # via fastapi
+graphql-core==2.3.2 # via graphene, graphql-relay
+graphql-relay==2.0.1 # via graphene
+h11==0.9.0 # via httpcore, uvicorn
+h2==3.2.0 # via httpcore
+hpack==3.0.0 # via h2
+hstspreload==2020.6.9 # via httpx
+httpcore==0.9.1 # via httpx
+httptools==0.1.1 # via uvicorn
+httpx==0.13.3 # via -r requirements/_base.in
+hyperframe==5.2.0 # via h2
+idna==2.9 # via email-validator, httpx, requests, yarl
+immutables==0.14 # via contextvars
+itsdangerous==1.1.0 # via fastapi
+jinja2==2.11.2 # via fastapi
+loguru==0.5.1 # via -r requirements/_base.in
+markupsafe==1.1.1 # via jinja2
+multidict==4.7.6 # via yarl
+orjson==3.1.0 # via fastapi
+passlib[bcrypt]==1.7.2 # via -r requirements/_base.in
+promise==2.3 # via graphql-core, graphql-relay
+psycopg2-binary==2.8.5 # via aiopg, sqlalchemy
+pycparser==2.20 # via cffi
+pydantic[dotenv]==1.5.1 # via -r requirements/_base.in, fastapi
+pyjwt==1.7.1 # via -r requirements/_base.in
+python-dotenv==0.13.0 # via pydantic
+python-multipart==0.0.5 # via fastapi
+pyyaml==5.3.1 # via fastapi
+requests==2.23.0 # via fastapi
+rfc3986==1.4.0 # via httpx
+rx==1.6.1 # via graphql-core
+six==1.15.0 # via bcrypt, cryptography, graphene, graphql-core, graphql-relay, python-multipart, tenacity
+sniffio==1.1.0 # via httpcore, httpx
+sqlalchemy[postgresql_psycopg2binary]==1.3.17 # via -r requirements/../../../packages/postgres-database/requirements/_base.in, aiopg
+starlette==0.13.4 # via fastapi
+tenacity==6.2.0 # via -r requirements/_base.in
+ujson==3.0.0 # via fastapi
+urllib3==1.25.9 # via requests
+uvicorn==0.11.5 # via fastapi
+uvloop==0.14.0 # via uvicorn
+websockets==8.1 # via uvicorn
+yarl==1.4.2 # via -r requirements/../../../packages/postgres-database/requirements/_base.in
diff --git a/services/api-gateway/requirements/_test.in b/services/api-server/requirements/_test.in
similarity index 66%
rename from services/api-gateway/requirements/_test.in
rename to services/api-server/requirements/_test.in
index 7aa64304254..9e44fc7dc40 100644
--- a/services/api-gateway/requirements/_test.in
+++ b/services/api-server/requirements/_test.in
@@ -1,12 +1,12 @@
#
-# Specifies dependencies required to run 'services/api-gateway/test'
+# Specifies dependencies required to run 'services/api-server/test'
# both for unit and integration tests!!
#
# frozen specs
-r _base.txt
-# 'services/api-gateway/tests/unit' dependencies
+# 'services/api-server/tests/unit' dependencies
# testing
pytest
@@ -15,9 +15,14 @@ pytest-cov
pytest-docker
pytest-mock
pytest-runner
+asgi_lifespan
# fixtures
-Faker
+faker
+
+# db migration
+alembic
+docker
# tools
pylint
@@ -27,3 +32,4 @@ codecov
# scripts/templates
change_case
jinja2
+ptvsd
diff --git a/services/api-server/requirements/_test.txt b/services/api-server/requirements/_test.txt
new file mode 100644
index 00000000000..cacef8fad5c
--- /dev/null
+++ b/services/api-server/requirements/_test.txt
@@ -0,0 +1,107 @@
+#
+# This file is autogenerated by pip-compile
+# To update, run:
+#
+# pip-compile --output-file=requirements/_test.txt requirements/_test.in
+#
+aiocontextvars==0.2.2 # via -r requirements/_base.txt, loguru
+aiofiles==0.5.0 # via -r requirements/_base.txt, fastapi
+aiohttp==3.6.2 # via pytest-aiohttp
+aiopg[sa]==1.0.0 # via -r requirements/_base.txt
+alembic==1.4.2 # via -r requirements/_test.in
+aniso8601==7.0.0 # via -r requirements/_base.txt, graphene
+asgi-lifespan==1.0.1 # via -r requirements/_test.in
+astroid==2.4.2 # via pylint
+async-exit-stack==1.0.1 # via -r requirements/_base.txt, asgi-lifespan, fastapi
+async-generator==1.10 # via -r requirements/_base.txt, fastapi
+async-timeout==3.0.1 # via aiohttp
+attrs==19.3.0 # via aiohttp, pytest, pytest-docker
+bcrypt==3.1.7 # via -r requirements/_base.txt, passlib
+certifi==2020.4.5.2 # via -r requirements/_base.txt, httpx, requests
+cffi==1.14.0 # via -r requirements/_base.txt, bcrypt, cryptography
+change-case==0.5.2 # via -r requirements/_test.in
+chardet==3.0.4 # via -r requirements/_base.txt, aiohttp, httpx, requests
+click==7.1.2 # via -r requirements/_base.txt, uvicorn
+codecov==2.1.6 # via -r requirements/_test.in
+contextvars==2.4 # via -r requirements/_base.txt, aiocontextvars, sniffio
+coverage==4.5.4 # via codecov, coveralls, pytest-cov
+coveralls==2.0.0 # via -r requirements/_test.in
+cryptography==2.9.2 # via -r requirements/_base.txt
+dataclasses==0.7 # via -r requirements/_base.txt, pydantic
+dnspython==1.16.0 # via -r requirements/_base.txt, email-validator
+docker==4.2.1 # via -r requirements/_test.in
+docopt==0.6.2 # via coveralls
+email-validator==1.1.1 # via -r requirements/_base.txt, fastapi
+faker==4.1.0 # via -r requirements/_test.in
+fastapi[all]==0.57.0 # via -r requirements/_base.txt
+graphene==2.1.8 # via -r requirements/_base.txt, fastapi
+graphql-core==2.3.2 # via -r requirements/_base.txt, graphene, graphql-relay
+graphql-relay==2.0.1 # via -r requirements/_base.txt, graphene
+h11==0.9.0 # via -r requirements/_base.txt, httpcore, uvicorn
+h2==3.2.0 # via -r requirements/_base.txt, httpcore
+hpack==3.0.0 # via -r requirements/_base.txt, h2
+hstspreload==2020.6.9 # via -r requirements/_base.txt, httpx
+httpcore==0.9.1 # via -r requirements/_base.txt, httpx
+httptools==0.1.1 # via -r requirements/_base.txt, uvicorn
+httpx==0.13.3 # via -r requirements/_base.txt
+hyperframe==5.2.0 # via -r requirements/_base.txt, h2
+idna-ssl==1.1.0 # via aiohttp
+idna==2.9 # via -r requirements/_base.txt, email-validator, httpx, requests, yarl
+immutables==0.14 # via -r requirements/_base.txt, contextvars
+importlib-metadata==1.6.1 # via pluggy, pytest
+isort==4.3.21 # via pylint
+itsdangerous==1.1.0 # via -r requirements/_base.txt, fastapi
+jinja2==2.11.2 # via -r requirements/_base.txt, -r requirements/_test.in, fastapi
+lazy-object-proxy==1.4.3 # via astroid
+loguru==0.5.1 # via -r requirements/_base.txt
+mako==1.1.3 # via alembic
+markupsafe==1.1.1 # via -r requirements/_base.txt, jinja2, mako
+mccabe==0.6.1 # via pylint
+more-itertools==8.4.0 # via pytest
+multidict==4.7.6 # via -r requirements/_base.txt, aiohttp, yarl
+orjson==3.1.0 # via -r requirements/_base.txt, fastapi
+packaging==20.4 # via pytest
+passlib[bcrypt]==1.7.2 # via -r requirements/_base.txt
+pluggy==0.13.1 # via pytest
+promise==2.3 # via -r requirements/_base.txt, graphql-core, graphql-relay
+psycopg2-binary==2.8.5 # via -r requirements/_base.txt, aiopg, sqlalchemy
+ptvsd==4.3.2 # via -r requirements/_test.in
+py==1.8.1 # via pytest
+pycparser==2.20 # via -r requirements/_base.txt, cffi
+pydantic[dotenv]==1.5.1 # via -r requirements/_base.txt, fastapi
+pyjwt==1.7.1 # via -r requirements/_base.txt
+pylint==2.5.3 # via -r requirements/_test.in
+pyparsing==2.4.7 # via packaging
+pytest-aiohttp==0.3.0 # via -r requirements/_test.in
+pytest-cov==2.10.0 # via -r requirements/_test.in
+pytest-docker==0.7.2 # via -r requirements/_test.in
+pytest-mock==3.1.1 # via -r requirements/_test.in
+pytest-runner==5.2 # via -r requirements/_test.in
+pytest==5.4.3 # via -r requirements/_test.in, pytest-aiohttp, pytest-cov, pytest-mock
+python-dateutil==2.8.1 # via alembic, faker
+python-dotenv==0.13.0 # via -r requirements/_base.txt, pydantic
+python-editor==1.0.4 # via alembic
+python-multipart==0.0.5 # via -r requirements/_base.txt, fastapi
+pyyaml==5.3.1 # via -r requirements/_base.txt, fastapi
+requests==2.23.0 # via -r requirements/_base.txt, codecov, coveralls, docker, fastapi
+rfc3986==1.4.0 # via -r requirements/_base.txt, httpx
+rx==1.6.1 # via -r requirements/_base.txt, graphql-core
+six==1.15.0 # via -r requirements/_base.txt, astroid, bcrypt, cryptography, docker, graphene, graphql-core, graphql-relay, packaging, promise, python-dateutil, python-multipart, tenacity, websocket-client
+sniffio==1.1.0 # via -r requirements/_base.txt, asgi-lifespan, httpcore, httpx
+sqlalchemy[postgresql_psycopg2binary]==1.3.17 # via -r requirements/_base.txt, aiopg, alembic
+starlette==0.13.4 # via -r requirements/_base.txt, fastapi
+tenacity==6.2.0 # via -r requirements/_base.txt
+text-unidecode==1.3 # via faker
+toml==0.10.1 # via pylint
+typed-ast==1.4.1 # via astroid
+typing-extensions==3.7.4.2 # via aiohttp
+ujson==3.0.0 # via -r requirements/_base.txt, fastapi
+urllib3==1.25.9 # via -r requirements/_base.txt, requests
+uvicorn==0.11.5 # via -r requirements/_base.txt, fastapi
+uvloop==0.14.0 # via -r requirements/_base.txt, uvicorn
+wcwidth==0.2.4 # via pytest
+websocket-client==0.57.0 # via docker
+websockets==8.1 # via -r requirements/_base.txt, uvicorn
+wrapt==1.12.1 # via astroid
+yarl==1.4.2 # via -r requirements/_base.txt, aiohttp
+zipp==3.1.0 # via importlib-metadata
diff --git a/services/api-gateway/requirements/ci.txt b/services/api-server/requirements/ci.txt
similarity index 81%
rename from services/api-gateway/requirements/ci.txt
rename to services/api-server/requirements/ci.txt
index ad6e1a65628..f388a6d8850 100644
--- a/services/api-gateway/requirements/ci.txt
+++ b/services/api-server/requirements/ci.txt
@@ -1,4 +1,4 @@
-# Shortcut to install all packages for the contigous integration (CI) of 'services/api-gateway'
+# Shortcut to install all packages for the contigous integration (CI) of 'services/api-server'
#
# - As ci.txt but w/ tests
#
@@ -11,6 +11,7 @@
# installs this repo's packages
../../packages/pytest-simcore/
+../../packages/postgres-database/
# installs current package
.
diff --git a/services/api-gateway/requirements/dev.txt b/services/api-server/requirements/dev.txt
similarity index 90%
rename from services/api-gateway/requirements/dev.txt
rename to services/api-server/requirements/dev.txt
index 2558f95c147..692cfdd6917 100644
--- a/services/api-gateway/requirements/dev.txt
+++ b/services/api-server/requirements/dev.txt
@@ -1,4 +1,4 @@
-# Shortcut to install all packages needed to develop 'services/api-gateway'
+# Shortcut to install all packages needed to develop 'services/api-server'
#
# - As ci.txt but with current and repo packages in develop (edit) mode
#
@@ -11,6 +11,7 @@
# installs this repo's packages
-e ../../packages/pytest-simcore/
+-e ../../packages/postgres-database/[migration]
# installs current package
-e .
diff --git a/services/api-gateway/requirements/prod.txt b/services/api-server/requirements/prod.txt
similarity index 55%
rename from services/api-gateway/requirements/prod.txt
rename to services/api-server/requirements/prod.txt
index 0b7f35cdfa2..12498862a7c 100644
--- a/services/api-gateway/requirements/prod.txt
+++ b/services/api-server/requirements/prod.txt
@@ -1,4 +1,4 @@
-# Shortcut to install 'services/api-gateway' for production
+# Shortcut to install 'services/api-server' for production
#
# - As ci.txt but w/o tests
#
@@ -9,5 +9,8 @@
# installs base requirements
-r _base.txt
+# installs this repo's packages
+../../packages/postgres-database/
+
# installs current package
.
diff --git a/services/api-gateway/tests/unit/test_client_sdk.py b/services/api-server/sandbox/_test_client_sdk.py
similarity index 91%
rename from services/api-gateway/tests/unit/test_client_sdk.py
rename to services/api-server/sandbox/_test_client_sdk.py
index d5b713d2a1c..f2a17a19081 100644
--- a/services/api-gateway/tests/unit/test_client_sdk.py
+++ b/services/api-server/sandbox/_test_client_sdk.py
@@ -1,18 +1,24 @@
-# pylint: disable=unused-variable
-# pylint: disable=unused-argument
-# pylint: disable=redefined-outer-name
-# pylint: disable=protected-access
-
+# pylint: skip-file
+# fmt: off
+# simcore_api_sdk.abc.py
+import abc as _abc
+import json
from pprint import pprint
-from typing import Dict, List
+from typing import Any, Dict, List, Optional
+import aiohttp
+# DEV ---------------------------------------------------------------------
+import attr
import pytest
+# simcore_api_sdk/v0/me_api.py
+from attr import NOTHING
from starlette.testclient import TestClient
+from yarl import URL
-from simcore_service_api_gateway import application, endpoints_check
-from simcore_service_api_gateway.__version__ import api_vtag
-from simcore_service_api_gateway.settings import AppSettings
+from simcore_service_api_server import application, endpoints_check
+from simcore_service_api_server.__version__ import api_vtag
+from simcore_service_api_server.settings import AppSettings
@pytest.fixture
@@ -36,19 +42,6 @@ def client(monkeypatch) -> TestClient:
yield cli
-# DEV ---------------------------------------------------------------------
-import attr
-import aiohttp
-
-from yarl import URL
-from typing import Optional, Any
-import json
-
-
-# simcore_api_sdk.abc.py
-import abc as _abc
-
-
@attr.s(auto_attribs=True)
class ApiResponse:
status: int
@@ -100,9 +93,6 @@ async def _make_request(
# simcore_api_sdk/v0/__init__.py
# from ._openapi import ApiSession
-# simcore_api_sdk/v0/me_api.py
-from attr import NOTHING
-
class MeAPI(API):
async def get(self):
@@ -176,7 +166,7 @@ async def test_client_sdk():
async with ApiSession(api_key="1234", api_secret="secret") as api:
# GET /me is a special resource that is unique
- me: Dict = await api.me.get()
+ me: Profile = await api.me.get()
pprint(me)
# can update SOME entries
@@ -206,7 +196,7 @@ async def test_client_sdk():
prj: Dict = await api.studies.get("1234")
# POST /studies
- new_prj: Dict = await api.studies.create()
+ new_prj: Study = await api.studies.create()
# PUT or PATCH /studies/{prj_id}
# this is a patch
diff --git a/services/api-server/sandbox/_test_schemas.py b/services/api-server/sandbox/_test_schemas.py
new file mode 100644
index 00000000000..7482c3aba47
--- /dev/null
+++ b/services/api-server/sandbox/_test_schemas.py
@@ -0,0 +1,21 @@
+from typing import Optional
+
+import sqlalchemy as sa
+from aiopg.sa.engine import Engine
+from aiopg.sa.result import ResultProxy, RowProxy
+
+import simcore_api_server.model.pg_tables as tbl
+from simcore_api_server.schemas import UserInDB
+
+
+async def test_row_proxy_into_model(engine: Engine):
+ # test how RowProxy converts into into UserInDB
+
+ with engine.acquire() as conn:
+ stmt = sa.select([tbl.users,]).where(tbl.users.c.id == 1)
+
+ res: ResultProxy = await conn.execute(stmt)
+ row: Optional[RowProxy] = await res.fetchone()
+
+ user = UserInDB.from_orm(row)
+ assert user
diff --git a/services/api-server/sandbox/api-key-auth.py b/services/api-server/sandbox/api-key-auth.py
new file mode 100644
index 00000000000..824fdb5f8e4
--- /dev/null
+++ b/services/api-server/sandbox/api-key-auth.py
@@ -0,0 +1,38 @@
+# pylint: skip-file
+# fmt: off
+
+import uvicorn
+from fastapi import Depends, FastAPI, HTTPException, Security
+from fastapi.security.api_key import APIKeyHeader
+from starlette.status import HTTP_403_FORBIDDEN
+
+API_KEY = "1234567asdfgh"
+API_KEY_NAME = "access_token" # this is acces
+
+
+def get_active_user(
+ api_key: str = Security(APIKeyHeader(name=API_KEY_NAME, scheme_name="ApiKeyAuth"))
+) -> str:
+ # the api_key is a jwt created upon login
+ #
+ # - decode jwt
+ # - authenticate user
+
+ if api_key != API_KEY:
+ raise HTTPException(
+ status_code=HTTP_403_FORBIDDEN, detail="Invalid credentials"
+ )
+
+ return user_id
+
+
+app = FastAPI()
+
+
+@app.get("/foo")
+def foo(user_id: str = Depends(get_active_user)):
+ return f"hi {user_id}"
+
+
+if __name__ == "__main__":
+ uvicorn.run(app, host="0.0.0.0", port=8000)
diff --git a/services/api-server/sandbox/get_app_state.py b/services/api-server/sandbox/get_app_state.py
new file mode 100644
index 00000000000..342971bd1dd
--- /dev/null
+++ b/services/api-server/sandbox/get_app_state.py
@@ -0,0 +1,24 @@
+import uvicorn
+from fastapi import Depends, FastAPI
+from fastapi.applications import State
+from fastapi.requests import Request
+
+app = FastAPI(title="get_app_state")
+
+# Dependences WITH arguents
+def _get_app(request: Request) -> FastAPI:
+ return request.app
+
+
+def _get_app_state(request: Request) -> State:
+ return request.app.state
+
+
+@app.get("/app")
+async def get_server_ip(my_app: FastAPI = Depends(_get_app)):
+ assert my_app == app
+ return my_app.title
+
+
+if __name__ == "__main__":
+ uvicorn.run(app, host="0.0.0.0", port=8000)
diff --git a/services/api-server/sandbox/model_conversions.py b/services/api-server/sandbox/model_conversions.py
new file mode 100644
index 00000000000..0c2c4dfc86c
--- /dev/null
+++ b/services/api-server/sandbox/model_conversions.py
@@ -0,0 +1,91 @@
+from pprint import pprint
+from typing import List
+
+import attr
+from pydantic import BaseModel, ValidationError, constr
+from sqlalchemy import Column, Integer, String
+from sqlalchemy.dialects.postgresql import ARRAY
+from sqlalchemy.ext.declarative import declarative_base
+
+# https://pydantic-docs.helpmanual.io/usage/models/#orm-mode-aka-arbitrary-class-instances
+
+Base = declarative_base()
+
+
+class CompanyOrm(Base):
+ __tablename__ = "companies"
+ id = Column(Integer, primary_key=True, nullable=False)
+ public_key = Column(String(20), index=True, nullable=False, unique=True)
+ name = Column(String(63), unique=True)
+ domains = Column(ARRAY(String(255)))
+
+
+class Bar(BaseModel):
+ apple = "x"
+ banana = "y"
+
+
+class CompanyModel(BaseModel):
+ id: int
+ public_key: constr(max_length=20)
+ name: constr(max_length=63)
+ # NO DOMAINS!
+ other_value: int = 33
+
+ foo: Bar = Bar()
+
+ class Config:
+ orm_mode = True
+
+
+@attr.s(auto_attribs=True)
+class Company:
+ id: int
+ name: str
+ public_key: str = 55
+
+
+if __name__ == "__main__":
+
+ co_orm = CompanyOrm(
+ id=123,
+ public_key="foobar",
+ name="Testing",
+ domains=["example.com", "foobar.com"],
+ )
+ pprint(co_orm)
+
+ print("-" * 30)
+
+ co_model = CompanyModel.from_orm(co_orm)
+
+ print(co_model.__fields_set__)
+ assert "other_value" not in co_model.__fields_set__
+ assert "foo" not in co_model.__fields_set__
+
+ print("-" * 30)
+ assert "other_value" in co_model.__fields__
+
+ pprint(co_model)
+ pprint(co_model.dict())
+ # co_model.json()
+
+ print("-" * 30)
+ pprint(co_model.schema())
+ # co_model.schema_json() ->
+
+ print("-" * 30)
+ print(co_model.__config__)
+
+ # CAN convert from attr type! ORM is everything with attributes?
+ obj = Company(22, "pedro", "foo")
+
+ import pdb
+
+ pdb.set_trace()
+ co_model.from_orm(obj)
+
+ try:
+ co_model.parse_obj(obj)
+ except ValidationError as ee:
+ print("obj has to be a dict!")
diff --git a/services/api-server/sandbox/pydantic-settings.py b/services/api-server/sandbox/pydantic-settings.py
new file mode 100644
index 00000000000..77bfdbeb8ca
--- /dev/null
+++ b/services/api-server/sandbox/pydantic-settings.py
@@ -0,0 +1,51 @@
+## https://pydantic-docs.helpmanual.io/usage/settings/#dotenv-env-support
+import os
+from pathlib import Path
+
+from pydantic import BaseSettings, SecretStr
+
+env_path = Path(".env-ignore")
+
+env_path.write_text(
+ """
+# ignore comment
+ENVIRONMENT="production"
+REDIS_ADDRESS=localhost:6379
+MEANING_OF_LIFE=4000000
+MY_VAR='Hello world'
+POSTGRES_USER=test
+POSTGRES_PASSWORD=test
+POSTGRES_DB=test
+"""
+)
+
+
+os.environ["MEANING_OF_LIFE"] = "42"
+
+
+class PostgresSettings(BaseSettings):
+ user: str
+ password: SecretStr
+ db: str
+
+ class Config:
+ env_file = env_path
+ env_prefix = "POSTGRES_"
+
+
+class Settings(BaseSettings):
+ environment: str
+ meaning_of_life: int = 33
+
+ pg = PostgresSettings()
+
+ class Config:
+ env_file = env_path
+
+
+settings = Settings()
+
+print(settings.json())
+assert settings.meaning_of_life == 42
+assert settings.environment == "production"
+assert settings.pg.password.get_secret_value() == "test"
diff --git a/services/api-server/sandbox/simple_app.py b/services/api-server/sandbox/simple_app.py
new file mode 100644
index 00000000000..7fccb34353c
--- /dev/null
+++ b/services/api-server/sandbox/simple_app.py
@@ -0,0 +1,45 @@
+# pylint: skip-file
+# fmt: off
+
+import json
+from pathlib import Path
+from typing import Dict, List, Optional, Tuple
+
+import uvicorn
+from fastapi import Depends, FastAPI
+from fastapi.requests import Request
+from pydantic import BaseModel, Field
+
+app = FastAPI(title="My app")
+
+
+def _get_app(request: Request) -> FastAPI:
+ return request.app
+
+
+def get_my_user_id(app: FastAPI):
+ return 3
+
+
+class ItemFOO(BaseModel):
+ name: str
+ description: str = None
+ price: float
+ tax: Optional[float] = Field(None, description="description tax")
+
+
+@app.post("/studies/{study_id}")
+async def get_studies(q: int, study_id: int, body: List[ItemFOO]) -> ItemFOO:
+
+ return body
+
+
+def dump_oas():
+ Path("openapi-ignore.json").write_text(json.dumps(app.openapi(), indent=2))
+
+
+app.add_event_handler("startup", dump_oas)
+
+if __name__ == "__main__":
+
+ uvicorn.run("simple_app:app", reload=True, port=8002)
diff --git a/services/api-server/setup.cfg b/services/api-server/setup.cfg
new file mode 100644
index 00000000000..c9e439cbcf3
--- /dev/null
+++ b/services/api-server/setup.cfg
@@ -0,0 +1,7 @@
+[bumpversion]
+current_version = 0.3.0
+commit = True
+message = services/api-server version: {current_version} → {new_version}
+tag = False
+
+[bumpversion:file:VERSION]
diff --git a/services/api-gateway/setup.py b/services/api-server/setup.py
similarity index 88%
rename from services/api-gateway/setup.py
rename to services/api-server/setup.py
index ffab5621e7e..40c1176ae9b 100644
--- a/services/api-gateway/setup.py
+++ b/services/api-server/setup.py
@@ -28,10 +28,10 @@ def read_reqs(reqs_path: Path):
setup(
- name="simcore-service-api-gateway",
+ name="simcore-service-api-server",
version=version,
author="Pedro Crespo (pcrespov)",
- description="Platform's API Gateway for external clients",
+ description="Platform's API Server for external clients",
classifiers=[
"Development Status :: 1 - Planning",
"License :: OSI Approved :: MIT License",
@@ -50,7 +50,7 @@ def read_reqs(reqs_path: Path):
extras_require={"test": test_requirements},
entry_points={
"console_scripts": [
- "simcore-service-api-gateway = simcore_service_api_gateway.__main__:main",
+ "simcore-service-api-server = simcore_service_api_server.__main__:main",
],
},
)
diff --git a/services/api-server/src/simcore_service_api_server/__init__.py b/services/api-server/src/simcore_service_api_server/__init__.py
new file mode 100644
index 00000000000..f3df2ac8ec8
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/__init__.py
@@ -0,0 +1,4 @@
+""" Python package for the simcore_service_api_server.
+
+"""
+from .__version__ import __version__
diff --git a/services/api-server/src/simcore_service_api_server/__main__.py b/services/api-server/src/simcore_service_api_server/__main__.py
new file mode 100644
index 00000000000..76d1c20a3c6
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/__main__.py
@@ -0,0 +1,35 @@
+""" Main application entry point
+
+ `python -m simcore_service_api_server ...`
+
+"""
+import sys
+from pathlib import Path
+
+import uvicorn
+from fastapi import FastAPI
+
+from simcore_service_api_server.core.application import init_app
+from simcore_service_api_server.core.settings import AppSettings, BootModeEnum
+
+current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
+
+
+# SINGLETON FastAPI app
+the_app: FastAPI = init_app()
+
+
+def main():
+ cfg: AppSettings = the_app.state.settings
+ uvicorn.run(
+ "simcore_service_api_server.__main__:the_app",
+ host=cfg.host,
+ port=cfg.port,
+ reload=cfg.boot_mode == BootModeEnum.development,
+ reload_dirs=[current_dir,],
+ log_level=cfg.log_level_name.lower(),
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/services/api-gateway/src/simcore_service_api_gateway/__version__.py b/services/api-server/src/simcore_service_api_server/__version__.py
similarity index 68%
rename from services/api-gateway/src/simcore_service_api_gateway/__version__.py
rename to services/api-server/src/simcore_service_api_server/__version__.py
index 4c9375a1028..18fd260abe2 100644
--- a/services/api-gateway/src/simcore_service_api_gateway/__version__.py
+++ b/services/api-server/src/simcore_service_api_server/__version__.py
@@ -1,8 +1,8 @@
-""" Current version of the simcore_service_api_gateway application
+""" Current version of the simcore_service_api_server application
"""
import pkg_resources
-__version__ = pkg_resources.get_distribution("simcore_service_api_gateway").version
+__version__ = pkg_resources.get_distribution("simcore_service_api_server").version
major, minor, patch = __version__.split(".")
diff --git a/services/api-gateway/src/simcore_service_api_gateway/utils/__init__.py b/services/api-server/src/simcore_service_api_server/api/__init__.py
similarity index 100%
rename from services/api-gateway/src/simcore_service_api_gateway/utils/__init__.py
rename to services/api-server/src/simcore_service_api_server/api/__init__.py
diff --git a/services/api-gateway/tests/integration/.gitkeep b/services/api-server/src/simcore_service_api_server/api/dependencies/__init__.py
similarity index 100%
rename from services/api-gateway/tests/integration/.gitkeep
rename to services/api-server/src/simcore_service_api_server/api/dependencies/__init__.py
diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/auth_api_key.py b/services/api-server/src/simcore_service_api_server/api/dependencies/auth_api_key.py
new file mode 100644
index 00000000000..9bcb8e6a962
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/api/dependencies/auth_api_key.py
@@ -0,0 +1,46 @@
+from typing import Optional
+
+from fastapi import Depends, HTTPException, Security, status
+from fastapi.security.api_key import APIKeyHeader
+
+from ...db.repositories.users import UsersRepository
+from ...models.schemas.tokens import TokenData
+from ...services.jwt import get_access_token_data
+from .database import get_repository
+
+# Declaration of security scheme:
+# - Adds components.securitySchemes['APiKey'] to openapi.yaml
+# - callable with request as argument -> extracts token from Authentication header
+#
+
+
+API_KEY_NAME = "APIKey"
+api_key_scheme = APIKeyHeader(name=API_KEY_NAME)
+
+
+async def get_current_user_id(
+ access_token: str = Security(api_key_scheme),
+ users_repo: UsersRepository = Depends(get_repository(UsersRepository)),
+) -> int:
+ def _create_credentials_exception(msg: str):
+
+ return HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail=msg,
+ headers={"WWW-Authenticate": API_KEY_NAME},
+ )
+
+ # decodes and validates jwt-based access token
+ token_data: Optional[TokenData] = get_access_token_data(access_token)
+ if token_data is None:
+ raise _create_credentials_exception("Could not validate credentials")
+
+ # identify user
+ identified = await users_repo.any_user_with_id(token_data.user_id)
+ if not identified:
+ raise _create_credentials_exception("Could not validate credentials")
+
+ return token_data.user_id
+
+
+get_active_user_id = get_current_user_id
diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/auth_basic.py b/services/api-server/src/simcore_service_api_server/api/dependencies/auth_basic.py
new file mode 100644
index 00000000000..809298acf33
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/api/dependencies/auth_basic.py
@@ -0,0 +1,48 @@
+from fastapi import Depends, HTTPException, Security, status
+from fastapi.security import HTTPBasic, HTTPBasicCredentials
+
+from ...db.repositories.api_keys import ApiKeysRepository
+from ...db.repositories.users import UsersRepository
+from .database import get_repository
+
+# SEE https://swagger.io/docs/specification/authentication/basic-authentication/
+basic_scheme = HTTPBasic()
+
+
+def _create_exception():
+ _unauthorized_headers = {
+ "WWW-Authenticate": f'Basic realm="{basic_scheme.realm}"'
+ if basic_scheme.realm
+ else "Basic"
+ }
+ return HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Invalid API credentials",
+ headers=_unauthorized_headers,
+ )
+
+
+async def get_current_user_id(
+ credentials: HTTPBasicCredentials = Security(basic_scheme),
+ apikeys_repo: ApiKeysRepository = Depends(get_repository(ApiKeysRepository)),
+) -> int:
+ user_id = await apikeys_repo.get_user_id(
+ api_key=credentials.username, api_secret=credentials.password
+ )
+ if not user_id:
+ raise _create_exception()
+ return user_id
+
+
+async def get_active_user_email(
+ user_id: int = Depends(get_current_user_id),
+ users_repo: UsersRepository = Depends(get_repository(UsersRepository)),
+) -> str:
+ email = await users_repo.get_email_from_user_id(user_id)
+ if not email:
+ raise _create_exception()
+ return email
+
+
+# alias
+get_active_user_id = get_current_user_id
diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/auth_oath2.py b/services/api-server/src/simcore_service_api_server/api/dependencies/auth_oath2.py
new file mode 100644
index 00000000000..806ae19c519
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/api/dependencies/auth_oath2.py
@@ -0,0 +1,100 @@
+""" This submodule includes responsibilities from authorization server
+
+ +--------+ +---------------+
+ | |--(A)- Authorization Request ->| Resource |
+ | | | Owner | Authorization request
+ | |<-(B)-- Authorization Grant ---| |
+ | | +---------------+
+ | |
+ | | +---------------+
+ | |--(C)-- Authorization Grant -->| Authorization |
+ | Client | | Server | Token request
+ | |<-(D)----- Access Token -------| |
+ | | +---------------+
+ | |
+ | | +---------------+
+ | |--(E)----- Access Token ------>| Resource |
+ | | | Server |
+ | |<-(F)--- Protected Resource ---| |
+ +--------+ +---------------+
+
+ Figure 1: Abstract Protocol Flow
+
+SEE
+ - https://oauth.net/2/
+ - https://tools.ietf.org/html/rfc6749
+"""
+# TODO: this module shall delegate the auth functionality to a separate service
+
+from typing import Optional
+
+from fastapi import Depends, HTTPException, Security, status
+from fastapi.security import OAuth2PasswordBearer, SecurityScopes
+from loguru import logger
+
+from ...__version__ import api_vtag
+from ...db.repositories.users import UsersRepository
+from ...models.schemas.tokens import TokenData
+from ...services.jwt import get_access_token_data
+from .database import get_repository
+
+# Declaration of security scheme:
+# - Adds components.securitySchemes['OAuth2PasswordBearer'] to openapi.yaml
+# - callable with request as argument -> extracts token from Authentication header
+#
+# TODO: check organization of scopes in other APIs
+oauth2_scheme = OAuth2PasswordBearer(
+ tokenUrl=f"{api_vtag}/token",
+ scopes={"read": "Read-only access", "write": "Write access"},
+)
+
+
+async def get_current_user_id(
+ security_scopes: SecurityScopes,
+ access_token: str = Depends(oauth2_scheme),
+ users_repo: UsersRepository = Depends(get_repository(UsersRepository)),
+) -> int:
+ """
+ access_token: extracted access_token from request header
+ security_scopes: iterable with all REQUIRED scopes to run operation
+ """
+
+ def _create_credentials_exception(msg: str):
+ authenticate_value = "Bearer"
+ if security_scopes.scopes:
+ authenticate_value += f' scope="{security_scopes.scope_str}"'
+
+ return HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail=msg,
+ headers={"WWW-Authenticate": authenticate_value},
+ )
+
+ # decodes and validates jwt-based access token
+ token_data: Optional[TokenData] = get_access_token_data(access_token)
+ if token_data is None:
+ raise _create_credentials_exception("Could not validate credentials")
+
+ # identify user
+ identified = await users_repo.any_user_with_id(token_data.user_id)
+ if not identified:
+ raise _create_credentials_exception("Could not validate credentials")
+
+ # Checks whether user has ALL required scopes for this call
+ for required_scope in security_scopes.scopes:
+ if required_scope not in token_data.scopes:
+ logger.debug(
+ "Access denied. Client is missing required scope '{}' ", required_scope
+ )
+ raise _create_credentials_exception(
+ "Missing required scope for this operation"
+ )
+
+ return token_data.user_id
+
+
+async def get_active_user_id(
+ current_user_id: int = Security(get_current_user_id, scopes=["read"])
+) -> int:
+ # FIXME: Adds read scope. rename properly and activate scopes
+ return current_user_id
diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/authentication.py b/services/api-server/src/simcore_service_api_server/api/dependencies/authentication.py
new file mode 100644
index 00000000000..84cbbf6d0c6
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/api/dependencies/authentication.py
@@ -0,0 +1,3 @@
+from .auth_basic import get_active_user_email, get_active_user_id
+
+__all__ = ["get_active_user_id", "get_active_user_email"]
diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/database.py b/services/api-server/src/simcore_service_api_server/api/dependencies/database.py
new file mode 100644
index 00000000000..96903570029
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/api/dependencies/database.py
@@ -0,0 +1,21 @@
+from typing import AsyncGenerator, Callable, Type
+
+from aiopg.sa import Engine
+from fastapi import Depends
+from fastapi.requests import Request
+
+from ...db.repositories.base import BaseRepository
+
+
+def _get_db_engine(request: Request) -> Engine:
+ return request.app.state.engine
+
+
+def get_repository(repo_type: Type[BaseRepository]) -> Callable:
+ async def _get_repo(
+ engine: Engine = Depends(_get_db_engine),
+ ) -> AsyncGenerator[BaseRepository, None]:
+ async with engine.acquire() as conn:
+ yield repo_type(conn)
+
+ return _get_repo
diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/webserver.py b/services/api-server/src/simcore_service_api_server/api/dependencies/webserver.py
new file mode 100644
index 00000000000..dd387f12c9a
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/api/dependencies/webserver.py
@@ -0,0 +1,55 @@
+import json
+import time
+from typing import Dict, Optional
+
+from cryptography.fernet import Fernet
+from fastapi import Depends, HTTPException, status
+from fastapi.requests import Request
+from httpx import AsyncClient
+
+from ...core.settings import AppSettings, WebServerSettings
+from .authentication import get_active_user_email
+
+UNAVAILBLE_MSG = "backend service is disabled or unreachable"
+
+
+def _get_settings(request: Request) -> WebServerSettings:
+ app_settings: AppSettings = request.app.state.settings
+ return app_settings.webserver
+
+
+def _get_encrypt(request: Request) -> Optional[Fernet]:
+ return getattr(request.app.state, "webserver_fernet", None)
+
+
+def get_webserver_client(request: Request) -> AsyncClient:
+ client = getattr(request.app.state, "webserver_client", None)
+ if not client:
+ raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE, detail=UNAVAILBLE_MSG)
+ return client
+
+
+def get_session_cookie(
+ identity: str = Depends(get_active_user_email),
+ settings: WebServerSettings = Depends(_get_settings),
+ fernet: Optional[Fernet] = Depends(_get_encrypt),
+) -> Dict:
+ # Based on aiohttp_session and aiohttp_security
+ # SEE services/web/server/tests/unit/with_dbs/test_login.py
+
+ if fernet is None:
+ raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE, detail=UNAVAILBLE_MSG)
+
+ # builds session cookie
+ cookie_name = settings.session_name
+ cookie_data = json.dumps(
+ {
+ "created": int(time.time()), # now
+ "session": {"AIOHTTP_SECURITY": identity},
+ "path": "/",
+ # extras? e.g. expiration
+ }
+ ).encode("utf-8")
+ encrypted_cookie_data = fernet.encrypt(cookie_data).decode("utf-8")
+
+ return {cookie_name: encrypted_cookie_data}
diff --git a/services/api-gateway/tools/templates/schemas.py.jinja2 b/services/api-server/src/simcore_service_api_server/api/errors/__init__.py
similarity index 100%
rename from services/api-gateway/tools/templates/schemas.py.jinja2
rename to services/api-server/src/simcore_service_api_server/api/errors/__init__.py
diff --git a/services/api-server/src/simcore_service_api_server/api/errors/http_error.py b/services/api-server/src/simcore_service_api_server/api/errors/http_error.py
new file mode 100644
index 00000000000..c5032293589
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/api/errors/http_error.py
@@ -0,0 +1,7 @@
+from fastapi import HTTPException
+from starlette.requests import Request
+from starlette.responses import JSONResponse
+
+
+async def http_error_handler(_: Request, exc: HTTPException) -> JSONResponse:
+ return JSONResponse({"errors": [exc.detail]}, status_code=exc.status_code)
diff --git a/services/api-server/src/simcore_service_api_server/api/errors/validation_error.py b/services/api-server/src/simcore_service_api_server/api/errors/validation_error.py
new file mode 100644
index 00000000000..a8901794a8c
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/api/errors/validation_error.py
@@ -0,0 +1,26 @@
+from typing import Union
+
+from fastapi.exceptions import RequestValidationError
+from fastapi.openapi.constants import REF_PREFIX
+from fastapi.openapi.utils import validation_error_response_definition
+from pydantic import ValidationError
+from starlette.requests import Request
+from starlette.responses import JSONResponse
+from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY
+
+
+async def http422_error_handler(
+ _: Request, exc: Union[RequestValidationError, ValidationError],
+) -> JSONResponse:
+ return JSONResponse(
+ {"errors": exc.errors()}, status_code=HTTP_422_UNPROCESSABLE_ENTITY,
+ )
+
+
+validation_error_response_definition["properties"] = {
+ "errors": {
+ "title": "Errors",
+ "type": "array",
+ "items": {"$ref": "{0}ValidationError".format(REF_PREFIX)},
+ },
+}
diff --git a/services/api-server/src/simcore_service_api_server/api/root.py b/services/api-server/src/simcore_service_api_server/api/root.py
new file mode 100644
index 00000000000..cab791b02c1
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/api/root.py
@@ -0,0 +1,15 @@
+from fastapi import APIRouter
+
+from .routes import health, meta, users
+
+router = APIRouter()
+router.include_router(health.router)
+router.include_router(meta.router, tags=["meta"], prefix="/meta")
+
+# TODO: keeps for oauth or apikey schemes
+# router.include_router(authentication.router, tags=["authentication"], prefix="/users")
+
+router.include_router(users.router, tags=["users"], prefix="/me")
+
+## TODO: disables studies for the moment
+# router.include_router(studies.router, tags=["studies"], prefix="/studies")
diff --git a/services/api-server/src/simcore_service_api_server/api/routes/__init__.py b/services/api-server/src/simcore_service_api_server/api/routes/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/api-server/src/simcore_service_api_server/api/routes/authentication/__init__.py b/services/api-server/src/simcore_service_api_server/api/routes/authentication/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/api-server/src/simcore_service_api_server/api/routes/authentication/api_key.py b/services/api-server/src/simcore_service_api_server/api/routes/authentication/api_key.py
new file mode 100644
index 00000000000..385887dbbbe
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/api/routes/authentication/api_key.py
@@ -0,0 +1,15 @@
+# FIXME: Until tests
+# pylint: skip-file
+#
+
+from fastapi import APIRouter
+
+router = APIRouter()
+
+
+@router.post("/login", response_model=UserInResponse, name="auth:login")
+async def login(
+ user_login: UserInLogin = Body(..., embed=True, alias="user"),
+ users_repo: UsersRepository = Depends(get_repository(UsersRepository)),
+) -> UserInResponse:
+ pass
diff --git a/services/api-server/src/simcore_service_api_server/api/routes/authentication/oauth2.py b/services/api-server/src/simcore_service_api_server/api/routes/authentication/oauth2.py
new file mode 100644
index 00000000000..1502be49133
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/api/routes/authentication/oauth2.py
@@ -0,0 +1,77 @@
+from io import StringIO
+from typing import Optional
+
+from fastapi import APIRouter, Depends, HTTPException
+from fastapi.security import OAuth2PasswordRequestForm
+from loguru import logger
+
+from ....db.repositories.users import UsersRepository
+from ....models.schemas.tokens import Token, TokenData
+from ....services.jwt import create_access_token
+from ....services.serialization import json_dumps
+from ...dependencies.database import get_repository
+
+router = APIRouter()
+
+
+def _compose_msg(*, fd=None, rd=None) -> str:
+ assert not (fd ^ rd), "Mutally exclusive" # nosec
+
+ stream = StringIO()
+
+ if fd:
+ print("Form Request", "-" * 20, file=stream)
+ for (
+ attr
+ ) in "grant_type username password scopes client_id client_secret".split():
+ print("-", attr, ":", getattr(fd, attr), file=stream)
+ print("-" * 20, file=stream)
+ elif rd:
+ print("{:-^30}".format("/token response"), file=stream)
+ print(json_dumps(rd), file=stream)
+ print("-" * 30, file=stream)
+
+ return stream.getvalue()
+
+
+# NOTE: this path has to be the same as simcore_service_api_server.auth.oauth2_scheme
+@router.post("/token", response_model=Token)
+async def login_for_access_token(
+ form_data: OAuth2PasswordRequestForm = Depends(),
+ users_repo: UsersRepository = Depends(get_repository(UsersRepository)),
+):
+ """
+ Returns an access-token provided a valid authorization grant
+ """
+
+ #
+ # - This entrypoint is part of the Authorization Server
+ # - Implements access point to obtain access-tokens
+ #
+ # | | +---------------+
+ # | |--(C)-- Authorization Grant -->| Authorization |
+ # | Client | | Server | Token request
+ # | |<-(D)----- Access Token -------| |
+ # | | +---------------+
+ #
+
+ logger.debug(_compose_msg(fd=form_data))
+
+ user_id: Optional[int] = await users_repo.get_user_id(
+ user=form_data.username, password=form_data.password
+ )
+
+ # TODO: check is NOT banned
+
+ if not user_id:
+ raise HTTPException(status_code=400, detail="Incorrect username or password")
+
+ # FIXME: expiration disabled since for the moment we do NOT have any renewal mechanims in place!!!
+ access_token = create_access_token(TokenData(user_id), expires_in_mins=None)
+
+ # NOTE: this reponse is defined in Oath2
+ resp_data = {"access_token": access_token, "token_type": "bearer"}
+
+ logger.debug(_compose_msg(rd=resp_data))
+
+ return resp_data
diff --git a/services/api-server/src/simcore_service_api_server/api/routes/health.py b/services/api-server/src/simcore_service_api_server/api/routes/health.py
new file mode 100644
index 00000000000..d8b50c5f504
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/api/routes/health.py
@@ -0,0 +1,8 @@
+from fastapi import APIRouter
+
+router = APIRouter()
+
+
+@router.get("/", include_in_schema=False)
+async def check_service_health():
+ return ":-)"
diff --git a/services/api-server/src/simcore_service_api_server/api/routes/meta.py b/services/api-server/src/simcore_service_api_server/api/routes/meta.py
new file mode 100644
index 00000000000..903ce9666b1
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/api/routes/meta.py
@@ -0,0 +1,15 @@
+from fastapi import APIRouter
+
+from ...__version__ import __version__, api_version, api_vtag
+from ...models.schemas.meta import Meta
+
+router = APIRouter()
+
+
+@router.get("", response_model=Meta)
+async def get_service_metadata():
+ return Meta(
+ name=__name__.split(".")[0],
+ version=api_version,
+ released={api_vtag: api_version},
+ )
diff --git a/services/api-server/src/simcore_service_api_server/api/routes/studies.py b/services/api-server/src/simcore_service_api_server/api/routes/studies.py
new file mode 100644
index 00000000000..953e096a331
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/api/routes/studies.py
@@ -0,0 +1,50 @@
+from fastapi import APIRouter, Security
+
+from ..dependencies.authentication import get_active_user_id
+
+router = APIRouter()
+
+
+@router.get("")
+async def list_studies(user_id: int = Security(get_active_user_id, scopes=["read"])):
+ # TODO: Replace code by calls to web-server api
+ return [{"project_id": "Foo", "owner": user_id}]
+
+
+@router.get("/{study_id}")
+async def get_study(
+ study_id: str, user_id: int = Security(get_active_user_id, scopes=["read"]),
+):
+ # TODO: Replace code by calls to web-server api
+ return [{"project_id": study_id, "owner": user_id}]
+
+
+@router.post("")
+async def create_study(user_id: int = Security(get_active_user_id, scopes=["write"])):
+ # TODO: Replace code by calls to web-server api
+ return {"project_id": "Foo", "owner": user_id}
+
+
+@router.put("/{study_id}")
+async def replace_study(
+ study_id: str, user_id: int = Security(get_active_user_id, scopes=["write"]),
+):
+ # TODO: Replace code by calls to web-server api
+ return {"project_id": study_id, "owner": user_id}
+
+
+@router.patch("/{study_id}")
+async def update_study(
+ study_id: str, user_id: int = Security(get_active_user_id, scopes=["write"]),
+):
+ # TODO: Replace code by calls to web-server api
+ return {"project_id": study_id, "owner": user_id}
+
+
+@router.delete("/{study_id}")
+async def delete_study(
+ study_id: str, user_id: int = Security(get_active_user_id, scopes=["write"]),
+):
+ # TODO: Replace code by calls to web-server api
+ _data = {"project_id": study_id, "owner": user_id}
+ return None
diff --git a/services/api-server/src/simcore_service_api_server/api/routes/users.py b/services/api-server/src/simcore_service_api_server/api/routes/users.py
new file mode 100644
index 00000000000..b1076647bf1
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/api/routes/users.py
@@ -0,0 +1,64 @@
+from typing import Dict
+
+from fastapi import APIRouter, Depends, HTTPException, Security
+from httpx import AsyncClient, Response, StatusCode
+from loguru import logger
+
+# SEE: https://www.python-httpx.org/async/
+# TODO: path mapping and operation
+# TODO: if fails, raise for status and translates to service unavailable if fails
+#
+from pydantic import ValidationError
+from starlette import status
+
+from ...models.schemas.profiles import Profile, ProfileUpdate
+from ..dependencies.webserver import get_session_cookie, get_webserver_client
+
+router = APIRouter()
+
+
+@router.get("", response_model=Profile)
+async def get_my_profile(
+ client: AsyncClient = Depends(get_webserver_client),
+ session_cookies: Dict = Depends(get_session_cookie),
+) -> Profile:
+ resp = await client.get("/v0/me", cookies=session_cookies)
+
+ if resp.status_code == status.HTTP_200_OK:
+ data = resp.json()["data"]
+ try:
+ # FIXME: temporary patch until web-API is reviewed
+ data["role"] = data["role"].upper()
+ profile = Profile.parse_obj(data)
+ return profile
+ except ValidationError:
+ logger.exception("webserver response invalid")
+ raise
+
+ elif StatusCode.is_server_error(resp.status_code):
+ logger.error("webserver failed :{}", resp.reason_phrase)
+ raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE)
+
+ raise HTTPException(resp.status_code, resp.reason_phrase)
+
+
+@router.put("", response_model=Profile)
+async def update_my_profile(
+ profile_update: ProfileUpdate,
+ client: AsyncClient = Depends(get_webserver_client),
+ session_cookies: Dict = Security(get_session_cookie, scopes=["write"]),
+) -> Profile:
+ # FIXME: replace by patch
+ # TODO: improve. from patch -> put, we need to ensure it has a default in place
+ profile_update.first_name = profile_update.first_name or ""
+ profile_update.last_name = profile_update.last_name or ""
+ resp: Response = await client.put(
+ "/v0/me", json=profile_update.dict(), cookies=session_cookies
+ )
+
+ if StatusCode.is_error(resp.status_code):
+ logger.error("webserver failed: {}", resp.reason_phrase)
+ raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE)
+
+ profile = await get_my_profile(client, session_cookies)
+ return profile
diff --git a/services/api-server/src/simcore_service_api_server/core/__init__.py b/services/api-server/src/simcore_service_api_server/core/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/api-server/src/simcore_service_api_server/core/application.py b/services/api-server/src/simcore_service_api_server/core/application.py
new file mode 100644
index 00000000000..5f452974dd4
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/core/application.py
@@ -0,0 +1,61 @@
+import sys
+from typing import Optional
+
+from fastapi import FastAPI
+from fastapi.exceptions import RequestValidationError
+from loguru import logger
+from starlette.exceptions import HTTPException
+
+from ..__version__ import api_version, api_vtag
+from ..api.errors.http_error import http_error_handler
+from ..api.errors.validation_error import http422_error_handler
+from ..api.root import router as api_router
+from ..api.routes.health import router as health_router
+from .events import create_start_app_handler, create_stop_app_handler
+from .openapi import override_openapi_method, use_route_names_as_operation_ids
+from .redoc import create_redoc_handler
+from .settings import AppSettings
+
+
+def init_app(settings: Optional[AppSettings] = None) -> FastAPI:
+ if settings is None:
+ settings = AppSettings.create_default()
+
+ logger.add(sys.stderr, level=settings.loglevel)
+
+ app = FastAPI(
+ debug=settings.debug,
+ title="Public API Server",
+ description="osparc-simcore Public RESTful API Specifications",
+ version=api_version,
+ openapi_url=f"/api/{api_vtag}/openapi.json",
+ docs_url="/dev/docs",
+ redoc_url=None, # default disabled, see below
+ )
+
+ logger.debug(settings)
+ app.state.settings = settings
+
+ override_openapi_method(app)
+
+ app.add_event_handler("startup", create_start_app_handler(app))
+ app.add_event_handler("shutdown", create_stop_app_handler(app))
+
+ app.add_exception_handler(HTTPException, http_error_handler)
+ app.add_exception_handler(RequestValidationError, http422_error_handler)
+
+ # Routing
+
+ # healthcheck at / and at /v0/
+ app.include_router(health_router)
+
+ # docs
+ redoc_html = create_redoc_handler(app)
+ app.add_route("/docs", redoc_html, include_in_schema=False)
+
+ # api under /v*
+ app.include_router(api_router, prefix=f"/{api_vtag}")
+
+ use_route_names_as_operation_ids(app)
+
+ return app
diff --git a/services/api-server/src/simcore_service_api_server/core/errors.py b/services/api-server/src/simcore_service_api_server/core/errors.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/api-server/src/simcore_service_api_server/core/events.py b/services/api-server/src/simcore_service_api_server/core/events.py
new file mode 100644
index 00000000000..1a1fd646857
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/core/events.py
@@ -0,0 +1,40 @@
+from typing import Callable
+
+from fastapi import FastAPI
+from loguru import logger
+
+from ..db.events import close_db_connection, connect_to_db
+from ..services.remote_debug import setup_remote_debugging
+from ..services.webserver import close_webserver, setup_webserver
+from .settings import BootModeEnum
+
+
+def create_start_app_handler(app: FastAPI) -> Callable:
+ async def start_app() -> None:
+ logger.info("Application started")
+
+ # setup connection to remote debugger (if applies)
+ setup_remote_debugging(
+ force_enabled=app.state.settings.boot_mode == BootModeEnum.debug
+ )
+
+ # setup connection to pg db
+ if app.state.settings.postgres.enabled:
+ await connect_to_db(app)
+
+ if app.state.settings.webserver.enabled:
+ setup_webserver(app)
+
+ return start_app
+
+
+def create_stop_app_handler(app: FastAPI) -> Callable:
+ @logger.catch
+ async def stop_app() -> None:
+ logger.info("Application stopping")
+ if app.state.settings.postgres.enabled:
+ await close_db_connection(app)
+ if app.state.settings.webserver.enabled:
+ await close_webserver(app)
+
+ return stop_app
diff --git a/services/api-server/src/simcore_service_api_server/core/openapi.py b/services/api-server/src/simcore_service_api_server/core/openapi.py
new file mode 100644
index 00000000000..f1c3df7b1a7
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/core/openapi.py
@@ -0,0 +1,65 @@
+import json
+import types
+from pathlib import Path
+from typing import Dict
+
+import yaml
+from fastapi import FastAPI
+from fastapi.openapi.utils import get_openapi
+from fastapi.routing import APIRoute
+from loguru import logger
+
+from .redoc import add_vendor_extensions, compose_long_description
+
+
+def override_openapi_method(app: FastAPI):
+ # TODO: test openapi(*) member does not change interface
+
+ def _custom_openapi_method(zelf: FastAPI, openapi_prefix: str = "") -> Dict:
+ """ Overrides FastAPI.openapi member function
+ returns OAS schema with vendor extensions
+ """
+ if not zelf.openapi_schema:
+
+ desc = compose_long_description(zelf.description)
+ openapi_schema = get_openapi(
+ title=zelf.title,
+ version=zelf.version,
+ openapi_version=zelf.openapi_version,
+ description=desc,
+ routes=zelf.routes,
+ openapi_prefix=openapi_prefix,
+ tags=zelf.openapi_tags,
+ )
+
+ add_vendor_extensions(openapi_schema)
+
+ zelf.openapi_schema = openapi_schema
+ return zelf.openapi_schema
+
+ app.openapi = types.MethodType(_custom_openapi_method, app)
+
+
+def use_route_names_as_operation_ids(app: FastAPI) -> None:
+ """
+ Overrides default operation_ids assigning the same name as the handler functions
+
+ MUST be called only after all routes have been added.
+
+ PROS: auto-generated client has one-to-one correspondence and human readable names
+ CONS: highly coupled. Changes in server handler names will change client
+ """
+ for route in app.routes:
+ if isinstance(route, APIRoute):
+ route.operation_id = route.name
+
+
+def dump_openapi(app: FastAPI, filepath: Path):
+ logger.info("Dumping openapi specs as {}", filepath)
+ with open(filepath, "wt") as fh:
+ if filepath.suffix == ".json":
+ json.dump(app.openapi(), fh, indent=2)
+ elif filepath.suffix in (".yaml", ".yml"):
+ yaml.safe_dump(app.openapi(), fh)
+ else:
+ raise ValueError("invalid")
diff --git a/services/api-server/src/simcore_service_api_server/core/redoc.py b/services/api-server/src/simcore_service_api_server/core/redoc.py
new file mode 100644
index 00000000000..45ff4d407db
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/core/redoc.py
@@ -0,0 +1,51 @@
+from typing import Callable, Dict
+
+from fastapi import FastAPI
+from fastapi.applications import HTMLResponse, Request
+from fastapi.openapi.docs import get_redoc_html
+
+# from ..__version__ import api_vtag
+
+FAVICON = "https://osparc.io/resource/osparc/favicon.png"
+LOGO = "https://raw.githubusercontent.com/ITISFoundation/osparc-manual/b809d93619512eb60c827b7e769c6145758378d0/_media/osparc-logo.svg"
+PYTHON_CODE_SAMPLES_BASE_URL = "https://raw.githubusercontent.com/ITISFoundation/osparc-simcore-python-client/master/code_samples"
+
+
+def compose_long_description(description: str) -> str:
+ desc = f"**{description}**\n"
+ desc += "## Python Library\n"
+ desc += "- Documentation (https://itisfoundation.github.io/osparc-simcore-python-client/#/)\n"
+ desc += "- Quick install: ``pip install git+https://github.com/ITISFoundation/osparc-simcore-python-client.git``\n"
+
+ return desc
+
+
+def add_vendor_extensions(openapi_schema: Dict):
+ # ReDoc vendor extensions
+ # SEE https://github.com/Redocly/redoc/blob/master/docs/redoc-vendor-extensions.md
+ openapi_schema["info"]["x-logo"] = {
+ "url": LOGO,
+ "altText": "osparc-simcore logo",
+ }
+
+ #
+ # TODO: load code samples add if function is contained in sample
+ # TODO: See if openapi-cli does this already
+ # TODO: check that all url are available before exposing
+ # openapi_schema["paths"][f"/{api_vtag}/meta"]["get"]["x-code-samples"] = [
+ # {
+ # "lang": "python",
+ # "source": {"$ref": f"{PYTHON_CODE_SAMPLES_BASE_URL}/meta/get.py"},
+ # },
+ # ]
+
+
+def create_redoc_handler(app: FastAPI) -> Callable:
+ async def _redoc_html(_req: Request) -> HTMLResponse:
+ return get_redoc_html(
+ openapi_url=app.openapi_url,
+ title=app.title + " - redoc",
+ redoc_favicon_url=FAVICON,
+ )
+
+ return _redoc_html
diff --git a/services/api-server/src/simcore_service_api_server/core/settings.py b/services/api-server/src/simcore_service_api_server/core/settings.py
new file mode 100644
index 00000000000..8fd229c62b3
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/core/settings.py
@@ -0,0 +1,107 @@
+import logging
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseSettings, Field, SecretStr, validator
+from yarl import URL
+
+
+class BootModeEnum(str, Enum):
+ debug = "debug-ptvsd"
+ production = "production"
+ development = "development"
+
+
+class _CommonConfig:
+ case_sensitive = False
+ env_file = ".env"
+
+
+class WebServerSettings(BaseSettings):
+ enabled: bool = Field(
+ True, description="Enables/Disables connection with webserver service"
+ )
+ host: str
+ port: int = 8080
+ session_secret_key: SecretStr
+ session_name: str = "osparc.WEBAPI_SESSION"
+ vtag: str = "v0"
+
+ @property
+ def base_url(self):
+ # FIXME: httpx.client does not consder vtag
+ return f"http://{self.host}:{self.port}/{self.vtag}"
+
+ class Config(_CommonConfig):
+ env_prefix = "WEBSERVER_"
+
+
+class PostgresSettings(BaseSettings):
+ enabled: bool = Field(
+ True, description="Enables/Disables connection with postgres service"
+ )
+ user: str
+ password: SecretStr
+ db: str
+ host: str
+ port: int = 5432
+
+ minsize: int = 10
+ maxsize: int = 10
+
+ @property
+ def dsn(self) -> URL:
+ return URL.build(
+ scheme="postgresql",
+ user=self.user,
+ password=self.password.get_secret_value(),
+ host=self.host,
+ port=self.port,
+ path=f"/{self.db}",
+ )
+
+ class Config(_CommonConfig):
+ env_prefix = "POSTGRES_"
+
+
+class AppSettings(BaseSettings):
+ @classmethod
+ def create_default(cls) -> "AppSettings":
+ # This call triggers parsers
+ return cls(postgres=PostgresSettings(), webserver=WebServerSettings())
+
+ # pylint: disable=no-self-use
+ # pylint: disable=no-self-argument
+
+ # DOCKER
+ boot_mode: Optional[BootModeEnum] = Field(..., env="SC_BOOT_MODE")
+
+ # LOGGING
+ log_level_name: str = Field("DEBUG", env="LOG_LEVEL")
+
+ @validator("log_level_name")
+ def match_logging_level(cls, value) -> str:
+ try:
+ getattr(logging, value.upper())
+ except AttributeError:
+ raise ValueError(f"{value.upper()} is not a valid level")
+ return value.upper()
+
+ @property
+ def loglevel(self) -> int:
+ return getattr(logging, self.log_level_name)
+
+ # POSTGRES
+ postgres: PostgresSettings
+
+ # WEB-SERVER SERVICE
+ webserver: WebServerSettings
+
+ # SERVICE SERVER (see : https://www.uvicorn.org/settings/)
+ host: str = "0.0.0.0" # "0.0.0.0" if is_containerized else "127.0.0.1",
+ port: int = 8000
+
+ debug: bool = False # If True, debug tracebacks should be returned on errors.
+
+ class Config(_CommonConfig):
+ env_prefix = ""
diff --git a/services/api-server/src/simcore_service_api_server/db/__init__.py b/services/api-server/src/simcore_service_api_server/db/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/api-server/src/simcore_service_api_server/db/errors.py b/services/api-server/src/simcore_service_api_server/db/errors.py
new file mode 100644
index 00000000000..bb3ef669024
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/db/errors.py
@@ -0,0 +1,2 @@
+class EntityDoesNotExist(Exception):
+ """Raised when entity was not found in database."""
diff --git a/services/api-server/src/simcore_service_api_server/db/events.py b/services/api-server/src/simcore_service_api_server/db/events.py
new file mode 100644
index 00000000000..8523967ddb4
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/db/events.py
@@ -0,0 +1,54 @@
+import logging
+from io import StringIO
+
+from aiopg.sa import Engine, create_engine
+from fastapi import FastAPI
+from loguru import logger
+from tenacity import before_sleep_log, retry, stop_after_attempt, wait_fixed
+
+from ..core.settings import PostgresSettings
+
+ENGINE_ATTRS = "closed driver dsn freesize maxsize minsize name size timeout".split()
+
+
+pg_retry_policy = dict(
+ wait=wait_fixed(5),
+ stop=stop_after_attempt(20),
+ before_sleep=before_sleep_log(logger, logging.WARNING),
+ reraise=True,
+)
+
+
+def _compose_info_on_engine(app: FastAPI) -> str:
+ engine = app.state.engine
+ stm = StringIO()
+ print("Setup engine:", end=" ", file=stm)
+ for attr in ENGINE_ATTRS:
+ print(f"{attr}={getattr(engine, attr)}", end="; ", file=stm)
+ return stm.getvalue()
+
+
+@retry(**pg_retry_policy)
+async def connect_to_db(app: FastAPI) -> None:
+ logger.debug("Connenting db ...")
+
+ cfg: PostgresSettings = app.state.settings.postgres
+ engine: Engine = await create_engine(
+ str(cfg.dsn),
+ application_name=f"{__name__}_{id(app)}", # unique identifier per app
+ minsize=cfg.minsize,
+ maxsize=cfg.maxsize,
+ )
+ logger.debug("Connected to {}", engine.dsn)
+ app.state.engine = engine
+
+ logger.debug(_compose_info_on_engine(app))
+
+
+async def close_db_connection(app: FastAPI) -> None:
+ logger.debug("Disconnecting db ...")
+
+ engine: Engine = app.state.engine
+ engine.close()
+ await engine.wait_closed()
+ logger.debug("Disconnected from {}", engine.dsn)
diff --git a/services/api-server/src/simcore_service_api_server/db/repositories/__init__.py b/services/api-server/src/simcore_service_api_server/db/repositories/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/api-server/src/simcore_service_api_server/db/repositories/api_keys.py b/services/api-server/src/simcore_service_api_server/db/repositories/api_keys.py
new file mode 100644
index 00000000000..7602e872daf
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/db/repositories/api_keys.py
@@ -0,0 +1,38 @@
+from typing import Optional
+
+import sqlalchemy as sa
+from loguru import logger
+from psycopg2 import DatabaseError
+
+from .. import tables as tbl
+from .base import BaseRepository
+
+# from ...models.domain.users import User, UserInDB
+
+# For psycopg2 errors SEE https://www.psycopg.org/docs/errors.html#sqlstate-exception-classes
+
+
+class ApiKeysRepository(BaseRepository):
+ async def get_user_id(self, api_key: str, api_secret: str) -> Optional[int]:
+ stmt = sa.select([tbl.api_keys.c.user_id,]).where(
+ sa.and_(
+ tbl.api_keys.c.api_key == api_key,
+ tbl.api_keys.c.api_secret == api_secret,
+ )
+ )
+
+ try:
+ user_id: Optional[int] = await self.connection.scalar(stmt)
+
+ except DatabaseError as err:
+ logger.debug(f"Failed to get user id: {err}")
+ user_id = None
+
+ return user_id
+
+ async def any_user_with_id(self, user_id: int) -> bool:
+ # FIXME: shall identify api_key or api_secret instead
+ stmt = sa.select([tbl.api_keys.c.user_id,]).where(
+ tbl.api_keys.c.user_id == user_id
+ )
+ return (await self.connection.scalar(stmt)) is not None
diff --git a/services/api-server/src/simcore_service_api_server/db/repositories/base.py b/services/api-server/src/simcore_service_api_server/db/repositories/base.py
new file mode 100644
index 00000000000..81f04c0f7b5
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/db/repositories/base.py
@@ -0,0 +1,15 @@
+from aiopg.sa.connection import SAConnection
+
+
+class BaseRepository:
+ """
+ Repositories are pulled at every request
+ All queries to db within that request use same connection
+ """
+
+ def __init__(self, conn: SAConnection) -> None:
+ self._conn = conn
+
+ @property
+ def connection(self) -> SAConnection:
+ return self._conn
diff --git a/services/api-server/src/simcore_service_api_server/db/repositories/users.py b/services/api-server/src/simcore_service_api_server/db/repositories/users.py
new file mode 100644
index 00000000000..f10ab46e081
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/db/repositories/users.py
@@ -0,0 +1,107 @@
+import hashlib
+from typing import List, Optional
+
+import sqlalchemy as sa
+from aiopg.sa.result import RowProxy
+
+from ...models.schemas.profiles import Profile
+from ..tables import GroupType, api_keys, groups, user_to_groups, users
+from .base import BaseRepository
+
+
+class UsersRepository(BaseRepository):
+ async def get_user_id(self, api_key: str, api_secret: str) -> Optional[int]:
+ stmt = sa.select([api_keys.c.user_id,]).where(
+ sa.and_(api_keys.c.api_key == api_key, api_keys.c.api_secret == api_secret,)
+ )
+ user_id: Optional[int] = await self.connection.scalar(stmt)
+ return user_id
+
+ async def any_user_with_id(self, user_id: int) -> bool:
+ # FIXME: shall identify api_key or api_secret instead
+ stmt = sa.select([api_keys.c.user_id,]).where(api_keys.c.user_id == user_id)
+ return (await self.connection.scalar(stmt)) is not None
+
+ async def get_email_from_user_id(self, user_id: int) -> Optional[str]:
+ stmt = sa.select([users.c.email,]).where(users.c.id == user_id)
+ email: Optional[str] = await self.connection.scalar(stmt)
+ return email
+
+ # TEMPORARY ----
+ async def get_profile_from_userid(self, user_id: int) -> Optional[Profile]:
+ stmt = (
+ sa.select(
+ [
+ users.c.email,
+ users.c.role,
+ users.c.name,
+ users.c.primary_gid,
+ groups.c.gid,
+ groups.c.name,
+ groups.c.description,
+ groups.c.type,
+ ],
+ use_labels=True,
+ )
+ .select_from(
+ users.join(
+ user_to_groups.join(groups, user_to_groups.c.gid == groups.c.gid),
+ users.c.id == user_to_groups.c.uid,
+ )
+ )
+ .where(users.c.id == user_id)
+ .order_by(sa.asc(groups.c.name))
+ )
+
+ # all user_group combinations but only the group changes
+ result = await self.connection.execute(stmt)
+ user_groups: List[RowProxy] = await result.fetchall()
+
+ if not user_groups:
+ return None
+
+ # get the primary group and the all group
+ user_primary_group = all_group = {}
+ other_groups = []
+ for user_group in user_groups:
+ if user_group["users_primary_gid"] == user_group["groups_gid"]:
+ user_primary_group = user_group
+ elif user_group["groups_type"] == GroupType.EVERYONE:
+ all_group = user_group
+ else:
+ other_groups.append(user_group)
+
+ parts = user_primary_group["users_name"].split(".") + [""]
+ return Profile.parse_obj(
+ {
+ "login": user_primary_group["users_email"],
+ "first_name": parts[0],
+ "last_name": parts[1],
+ "role": user_primary_group["users_role"].name.capitalize(),
+ "gravatar_id": gravatar_hash(user_primary_group["users_email"]),
+ "groups": {
+ "me": {
+ "gid": user_primary_group["groups_gid"],
+ "label": user_primary_group["groups_name"],
+ "description": user_primary_group["groups_description"],
+ },
+ "organizations": [
+ {
+ "gid": group["groups_gid"],
+ "label": group["groups_name"],
+ "description": group["groups_description"],
+ }
+ for group in other_groups
+ ],
+ "all": {
+ "gid": all_group["groups_gid"],
+ "label": all_group["groups_name"],
+ "description": all_group["groups_description"],
+ },
+ },
+ }
+ )
+
+
+def gravatar_hash(email: str) -> str:
+ return hashlib.md5(email.lower().encode("utf-8")).hexdigest() # nosec
diff --git a/services/api-server/src/simcore_service_api_server/db/tables.py b/services/api-server/src/simcore_service_api_server/db/tables.py
new file mode 100644
index 00000000000..d1d60558155
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/db/tables.py
@@ -0,0 +1,16 @@
+from simcore_postgres_database.models.api_keys import api_keys
+from simcore_postgres_database.models.groups import GroupType, groups, user_to_groups
+from simcore_postgres_database.models.users import UserRole, UserStatus, users
+
+metadata = api_keys.metadata
+
+__all__ = [
+ "api_keys",
+ "users",
+ "groups",
+ "user_to_groups",
+ "metadata",
+ "UserStatus",
+ "UserRole",
+ "GroupType",
+]
diff --git a/services/api-server/src/simcore_service_api_server/models/__init__.py b/services/api-server/src/simcore_service_api_server/models/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/api-server/src/simcore_service_api_server/models/domain/__init__.py b/services/api-server/src/simcore_service_api_server/models/domain/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/api-server/src/simcore_service_api_server/models/domain/api_keys.py b/services/api-server/src/simcore_service_api_server/models/domain/api_keys.py
new file mode 100644
index 00000000000..d0452bb54ad
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/models/domain/api_keys.py
@@ -0,0 +1,15 @@
+from pydantic import BaseModel, Field, SecretStr
+
+
+class ApiKey(BaseModel):
+ api_key: str
+ api_secret: SecretStr
+
+
+class ApiKeyInDB(ApiKey):
+ id_: int = Field(0, alias="id")
+ display_name: str
+ user_id: int
+
+ class Config:
+ orm_mode = True
diff --git a/services/api-server/src/simcore_service_api_server/models/domain/groups.py b/services/api-server/src/simcore_service_api_server/models/domain/groups.py
new file mode 100644
index 00000000000..1843e236012
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/models/domain/groups.py
@@ -0,0 +1,15 @@
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class UsersGroup(BaseModel):
+ gid: str
+ label: str
+ description: Optional[str] = None
+
+
+class Groups(BaseModel):
+ me: UsersGroup
+ organizations: Optional[List[UsersGroup]] = []
+ all_: UsersGroup = Field(..., alias="all")
diff --git a/services/api-server/src/simcore_service_api_server/models/domain/users.py b/services/api-server/src/simcore_service_api_server/models/domain/users.py
new file mode 100644
index 00000000000..5b35d3fc388
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/models/domain/users.py
@@ -0,0 +1,32 @@
+from pydantic import BaseModel, EmailStr, Field
+
+from simcore_postgres_database.models.users import UserRole, UserStatus
+
+from .groups import Groups
+
+
+class UserBase(BaseModel):
+ first_name: str
+ last_name: str
+
+
+class User(UserBase):
+ login: EmailStr
+ role: str
+ groups: Groups
+ gravatar_id: str
+
+
+class UserInDB(BaseModel):
+ id_: int = Field(0, alias="id")
+ name: str
+ email: str
+ password_hash: str
+ primary_gid: int
+ status: UserStatus
+ role: UserRole
+
+ # TODO: connect name <-> first_name, last_name
+
+ class Config:
+ orm_mode = True
diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/__init__.py b/services/api-server/src/simcore_service_api_server/models/schemas/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/api_keys.py b/services/api-server/src/simcore_service_api_server/models/schemas/api_keys.py
new file mode 100644
index 00000000000..8201495e219
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/models/schemas/api_keys.py
@@ -0,0 +1,12 @@
+from pydantic import BaseModel
+
+from ..domain.api_keys import ApiKey
+
+
+class ApiKeyInLogin(ApiKey):
+ pass
+
+
+class ApiKeyInResponse(BaseModel):
+ display_name: str
+ token: str
diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/meta.py b/services/api-server/src/simcore_service_api_server/models/schemas/meta.py
new file mode 100644
index 00000000000..dd23eced796
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/models/schemas/meta.py
@@ -0,0 +1,27 @@
+from typing import Dict, Optional
+
+from pydantic import BaseModel, Field, constr
+
+# TODO: review this RE
+# use https://www.python.org/dev/peps/pep-0440/#version-scheme
+# or https://www.python.org/dev/peps/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions
+#
+VERSION_RE = r"^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$"
+VersionStr = constr(regex=VERSION_RE)
+
+
+class Meta(BaseModel):
+ name: str
+ version: VersionStr
+ released: Optional[Dict[str, VersionStr]] = Field(
+ None, description="Maps every route's path tag with a released version"
+ )
+
+ class Config:
+ schema_extra = {
+ "example": {
+ "name": "simcore_service_foo",
+ "version": "2.4.45",
+ "released": {"v1": "1.3.4", "v2": "2.4.45"},
+ }
+ }
diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/profiles.py b/services/api-server/src/simcore_service_api_server/models/schemas/profiles.py
new file mode 100644
index 00000000000..5ab3bdb8184
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/models/schemas/profiles.py
@@ -0,0 +1,38 @@
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel, EmailStr, Field
+
+from ..domain.groups import Groups
+
+
+class ProfileCommon(BaseModel):
+ first_name: Optional[str] = Field(None, example="James")
+ last_name: Optional[str] = Field(None, example="Maxwell")
+
+
+class ProfileUpdate(ProfileCommon):
+ pass
+
+
+# from simcore_postgres_database.models.users import UserRole
+class UserRoleEnum(str, Enum):
+ # TODO: build from UserRole! or assert Role == UserRole
+ ANONYMOUS = "ANONYMOUS"
+ GUEST = "GUEST"
+ USER = "USER"
+ TESTER = "TESTER"
+
+
+class Profile(ProfileCommon):
+ login: EmailStr
+ role: UserRoleEnum
+ groups: Optional[Groups] = None
+ gravatar_id: Optional[str] = Field(
+ None,
+ description="Hash value of email to retrieve an avatar image from https://www.gravatar.com",
+ max_length=40,
+ )
+
+ class Config:
+ schema_extra = {}
diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/tokens.py b/services/api-server/src/simcore_service_api_server/models/schemas/tokens.py
new file mode 100644
index 00000000000..89fb7692f24
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/models/schemas/tokens.py
@@ -0,0 +1,25 @@
+from datetime import datetime
+from typing import List
+
+from pydantic import BaseModel
+
+
+class JWTMeta(BaseModel):
+ exp: datetime
+ sub: str
+
+
+class JWTUser(BaseModel):
+ username: str
+
+
+class Token(BaseModel):
+ access_token: str
+ token_type: str
+
+
+class TokenData(BaseModel):
+ """ application data encoded in the JWT """
+
+ user_id: int
+ scopes: List[str] = []
diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/users.py b/services/api-server/src/simcore_service_api_server/models/schemas/users.py
new file mode 100644
index 00000000000..32e51ae900f
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/models/schemas/users.py
@@ -0,0 +1,12 @@
+from pydantic import BaseModel
+
+from ..domain.users import User
+
+
+class UserInResponse(User):
+ pass
+
+
+class UserInUpdate(BaseModel):
+ first_name: str
+ last_name: str
diff --git a/services/api-server/src/simcore_service_api_server/services/__init__.py b/services/api-server/src/simcore_service_api_server/services/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/api-server/src/simcore_service_api_server/services/jwt.py b/services/api-server/src/simcore_service_api_server/services/jwt.py
new file mode 100644
index 00000000000..c3aed9445ce
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/services/jwt.py
@@ -0,0 +1,66 @@
+""" Utility functions related with security
+
+"""
+import os
+from datetime import datetime, timedelta
+from typing import Dict, Optional
+
+import jwt
+from jwt import PyJWTError
+from loguru import logger
+from pydantic import ValidationError
+
+from ..models.schemas.tokens import TokenData
+
+# JSON WEB TOKENS (JWT) --------------------------------------------------------------
+
+__SIGNING_KEY__ = os.environ.get("SECRET_KEY")
+__ALGORITHM__ = "HS256"
+ACCESS_TOKEN_EXPIRE_MINUTES = 30
+
+
+def create_access_token(
+ data: TokenData, *, expires_in_mins: Optional[int] = ACCESS_TOKEN_EXPIRE_MINUTES
+) -> str:
+ """
+ To disable expiration, set 'expires_in_mins' to None
+ """
+ # JWT specs define "Claim Names" for the encoded payload
+ # SEE https://tools.ietf.org/html/rfc7519#section-4
+ payload = {
+ "sub": data.user_id,
+ "scopes": data.scopes or [],
+ }
+
+ if expires_in_mins is not None:
+ exp = datetime.utcnow() + timedelta(minutes=expires_in_mins)
+ payload["exp"] = exp
+
+ encoded_jwt = jwt.encode(payload, __SIGNING_KEY__, algorithm=__ALGORITHM__)
+ return encoded_jwt
+
+
+def get_access_token_data(encoded_jwt: str) -> Optional[TokenData]:
+ """
+ Decodes and validates JWT and returns TokenData
+ Returns None, if invalid token
+ """
+ try:
+ # decode JWT [header.payload.signature] and get payload:
+ payload: Dict = jwt.decode(
+ encoded_jwt, __SIGNING_KEY__, algorithms=[__ALGORITHM__]
+ )
+
+ token_data = TokenData(
+ user_id=payload.get("sub"), token_scopes=payload.get("scopes", [])
+ )
+
+ except PyJWTError:
+ logger.debug("Invalid token", exc_info=True)
+ return None
+
+ except ValidationError:
+ logger.warning("Token data corrupted? Check payload -> TokenData conversion")
+ return None
+
+ return token_data
diff --git a/services/api-gateway/src/simcore_service_api_gateway/utils/remote_debug.py b/services/api-server/src/simcore_service_api_server/services/remote_debug.py
similarity index 76%
rename from services/api-gateway/src/simcore_service_api_gateway/utils/remote_debug.py
rename to services/api-server/src/simcore_service_api_server/services/remote_debug.py
index e7b8377dd46..67b9b279d44 100644
--- a/services/api-gateway/src/simcore_service_api_gateway/utils/remote_debug.py
+++ b/services/api-server/src/simcore_service_api_server/services/remote_debug.py
@@ -1,12 +1,12 @@
""" Setup remote debugger with Python Tools for Visual Studio (PTVSD)
"""
-import logging
+
import os
-REMOTE_DEBUG_PORT = 3000
+from loguru import logger
-log = logging.getLogger(__name__)
+REMOTE_DEBUG_PORT = 3000
def setup_remote_debugging(force_enabled=False, *, boot_mode=None):
@@ -16,7 +16,7 @@ def setup_remote_debugging(force_enabled=False, *, boot_mode=None):
boot_mode = boot_mode or os.environ.get("SC_BOOT_MODE")
if boot_mode == "debug-ptvsd" or force_enabled:
try:
- log.debug("Enabling attach ptvsd ...")
+ logger.debug("Enabling attach ptvsd ...")
#
# SEE https://github.com/microsoft/ptvsd#enabling-debugging
#
@@ -30,9 +30,9 @@ def setup_remote_debugging(force_enabled=False, *, boot_mode=None):
"Cannot enable remote debugging. Please install ptvsd first"
)
- log.info("Remote debugging enabled: listening port %s", REMOTE_DEBUG_PORT)
+ logger.info(f"Remote debugging enabled: listening port {REMOTE_DEBUG_PORT}")
else:
- log.debug("Booting without remote debugging since SC_BOOT_MODE=%s", boot_mode)
+ logger.debug(f"Booting without remote debugging since SC_BOOT_MODE={boot_mode}")
__all__ = ["setup_remote_debugging"]
diff --git a/services/api-gateway/src/simcore_service_api_gateway/utils/helpers.py b/services/api-server/src/simcore_service_api_server/services/security.py
similarity index 55%
rename from services/api-gateway/src/simcore_service_api_gateway/utils/helpers.py
rename to services/api-server/src/simcore_service_api_server/services/security.py
index 7317f50d7ed..2dea4396d82 100644
--- a/services/api-gateway/src/simcore_service_api_gateway/utils/helpers.py
+++ b/services/api-server/src/simcore_service_api_server/services/security.py
@@ -1,29 +1,21 @@
-import json
import subprocess # nosec
-from datetime import datetime
from subprocess import CalledProcessError, CompletedProcess # nosec
-from typing import Dict
+from passlib.context import CryptContext
-def to_bool(s: str) -> bool:
- return s.lower() in ["true", "1", "yes"]
+__pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
-def jsoncoverter(obj):
- if isinstance(obj, datetime):
- return obj.__str__()
- if isinstance(obj, bytes):
- return str(obj)
- return obj
+def verify_password(plain_password: str, hashed_password: str) -> bool:
+ return __pwd_context.verify(plain_password, hashed_password)
-def json_dumps(obj: Dict) -> str:
- return json.dumps(obj, indent=2, default=jsoncoverter)
+def get_password_hash(password: str) -> str:
+ return __pwd_context.hash(password)
def create_secret_key() -> str:
# NOTICE that this key is reset when server is restarted!
-
try:
proc: CompletedProcess = subprocess.run( # nosec
"openssl rand -hex 32", check=True, shell=True
diff --git a/services/api-server/src/simcore_service_api_server/services/serialization.py b/services/api-server/src/simcore_service_api_server/services/serialization.py
new file mode 100644
index 00000000000..b4266fbf491
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/services/serialization.py
@@ -0,0 +1,19 @@
+import json
+from datetime import datetime
+from typing import Dict
+
+
+def to_bool(s: str) -> bool:
+ return s.lower() in ["true", "1", "yes"]
+
+
+def _jsoncoverter(obj):
+ if isinstance(obj, datetime):
+ return obj.__str__()
+ if isinstance(obj, bytes):
+ return str(obj)
+ return obj
+
+
+def json_dumps(obj: Dict) -> str:
+ return json.dumps(obj, indent=2, default=_jsoncoverter)
diff --git a/services/api-server/src/simcore_service_api_server/services/webserver.py b/services/api-server/src/simcore_service_api_server/services/webserver.py
new file mode 100644
index 00000000000..74631f5a33a
--- /dev/null
+++ b/services/api-server/src/simcore_service_api_server/services/webserver.py
@@ -0,0 +1,52 @@
+import base64
+
+from cryptography import fernet
+from fastapi import FastAPI
+from httpx import AsyncClient
+from loguru import logger
+
+from ..core.settings import WebServerSettings
+
+
+def _get_secret_key(settings: WebServerSettings):
+ secret_key_bytes = settings.session_secret_key.get_secret_value().encode("utf-8")
+ while len(secret_key_bytes) < 32:
+ secret_key_bytes += secret_key_bytes
+ secret_key = secret_key_bytes[:32]
+
+ if isinstance(secret_key, str):
+ pass
+ elif isinstance(secret_key, (bytes, bytearray)):
+ secret_key = base64.urlsafe_b64encode(secret_key)
+ return secret_key
+
+
+def setup_webserver(app: FastAPI) -> None:
+ settings: WebServerSettings = app.state.settings.webserver
+
+ # normalize & encrypt
+ secret_key = _get_secret_key(settings)
+ app.state.webserver_fernet = fernet.Fernet(secret_key)
+
+ # init client
+ logger.debug(f"Setup webserver at {settings.base_url}...")
+
+ client = AsyncClient(base_url=settings.base_url)
+ app.state.webserver_client = client
+
+ # TODO: raise if attribute already exists
+ # TODO: ping?
+
+
+async def close_webserver(app: FastAPI) -> None:
+ try:
+ client: AsyncClient = app.state.webserver_client
+ await client.aclose()
+ del app.state.webserver_client
+ except AttributeError:
+ pass
+ logger.debug("Webserver closed successfully")
+
+
+def get_webserver_client(app: FastAPI) -> AsyncClient:
+ return app.state.webserver_client
diff --git a/services/api-server/tests/integration/.gitkeep b/services/api-server/tests/integration/.gitkeep
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/api-server/tests/unit/_helpers.py b/services/api-server/tests/unit/_helpers.py
new file mode 100644
index 00000000000..2ba444489a6
--- /dev/null
+++ b/services/api-server/tests/unit/_helpers.py
@@ -0,0 +1,65 @@
+from pprint import pformat
+from typing import Dict
+
+import faker
+import passlib.hash
+from aiopg.sa.result import RowProxy
+
+import simcore_service_api_server.db.tables as orm
+from simcore_service_api_server.db.repositories.base import BaseRepository
+from simcore_service_api_server.db.repositories.users import UsersRepository
+from simcore_service_api_server.models.domain.api_keys import ApiKeyInDB
+
+fake = faker.Faker()
+
+
+def _hash_it(password: str) -> str:
+ return passlib.hash.sha256_crypt.using(rounds=1000).hash(password)
+
+
+# TODO: this should be generated from the metadata in orm.users table
+def random_user(**overrides) -> Dict:
+ data = dict(
+ name=fake.name(),
+ email=fake.email(),
+ password_hash=_hash_it("secret"),
+ status=orm.UserStatus.ACTIVE,
+ created_ip=fake.ipv4(),
+ )
+
+ password = overrides.pop("password")
+ if password:
+ overrides["password_hash"] = _hash_it(password)
+
+ data.update(overrides)
+ return data
+
+
+class RWUsersRepository(UsersRepository):
+ # pylint: disable=no-value-for-parameter
+
+ async def create(self, **user) -> int:
+ values = random_user(**user)
+ user_id = await self.connection.scalar(orm.users.insert().values(**values))
+
+ print("Created user ", pformat(values), f"with user_id={user_id}")
+ return user_id
+
+
+class RWApiKeysRepository(BaseRepository):
+ # pylint: disable=no-value-for-parameter
+
+ async def create(self, name: str, *, api_key: str, api_secret: str, user_id: int):
+ values = dict(
+ display_name=name, user_id=user_id, api_key=api_key, api_secret=api_secret,
+ )
+ _id = await self.connection.scalar(orm.api_keys.insert().values(**values))
+
+ # check inserted
+ row: RowProxy = await (
+ await self.connection.execute(
+ orm.api_keys.select().where(orm.api_keys.c.id == _id)
+ )
+ ).first()
+
+ return ApiKeyInDB.from_orm(row)
diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py
new file mode 100644
index 00000000000..b69ea0e2666
--- /dev/null
+++ b/services/api-server/tests/unit/conftest.py
@@ -0,0 +1,246 @@
+# pylint:disable=unused-variable
+# pylint:disable=unused-argument
+# pylint:disable=redefined-outer-name
+
+import os
+import shutil
+import subprocess
+import sys
+from pathlib import Path
+from typing import Callable, Coroutine, Dict, Union
+
+import aiopg.sa
+import pytest
+import sqlalchemy as sa
+import yaml
+from asgi_lifespan import LifespanManager
+from fastapi import FastAPI
+from fastapi.testclient import TestClient
+from httpx import AsyncClient
+
+import simcore_postgres_database.cli as pg_cli
+import simcore_service_api_server
+from _helpers import RWApiKeysRepository, RWUsersRepository
+from simcore_postgres_database.models.base import metadata
+from simcore_service_api_server.models.domain.api_keys import ApiKeyInDB
+
+current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
+
+
+## TEST_ENVIRON ---
+
+
+@pytest.fixture(scope="session")
+def environment() -> Dict:
+ env = {
+ "WEBSERVER_HOST": "webserver",
+ "WEBSERVER_SESSION_SECRET_KEY": "REPLACE ME with a key of at least length 32.",
+
+ "POSTGRES_HOST": "localhost",
+ "POSTGRES_USER": "test",
+ "POSTGRES_PASSWORD": "test",
+ "POSTGRES_DB": "test",
+
+ "LOG_LEVEL": "debug",
+
+ "SC_BOOT_MODE": "production",
+ }
+ return env
+
+
+## FOLDER LAYOUT ---
+
+
+@pytest.fixture(scope="session")
+def project_slug_dir():
+ folder = current_dir.parent.parent
+ assert folder.exists()
+ assert any(folder.glob("src/simcore_service_api_server"))
+ return folder
+
+
+@pytest.fixture(scope="session")
+def package_dir():
+ dirpath = Path(simcore_service_api_server.__file__).resolve().parent
+ assert dirpath.exists()
+ return dirpath
+
+
+@pytest.fixture(scope="session")
+def osparc_simcore_root_dir(project_slug_dir):
+ root_dir = project_slug_dir.parent.parent
+ assert (
+ root_dir and root_dir.exists()
+ ), "Did you renamed or moved the integration folder under api-server??"
+ assert any(root_dir.glob("services/api-server")), (
+ "%s not look like rootdir" % root_dir
+ )
+ return root_dir
+
+
+@pytest.fixture(scope="session")
+def tests_dir() -> Path:
+ tdir = (current_dir / "..").resolve()
+ assert tdir.exists()
+ assert tdir.name == "tests"
+ return tdir
+
+
+@pytest.fixture(scope="session")
+def tests_utils_dir(tests_dir: Path) -> Path:
+ utils_dir = (tests_dir / "utils").resolve()
+ assert utils_dir.exists()
+ return utils_dir
+
+
+## POSTGRES & APP ---
+
+
+@pytest.fixture(scope="session")
+def docker_compose_file(environment, tests_utils_dir, tmpdir_factory) -> Path:
+ # Overrides fixture in https://github.com/avast/pytest-docker
+
+ # NOTE: do not forget to add the current environ here, otherwise docker-compose fails
+ environ = dict(os.environ)
+ environ.update(environment)
+
+ src_path = tests_utils_dir / "docker-compose.yml"
+ assert src_path.exists
+
+ dst_path = Path(str(tmpdir_factory.mktemp("config").join("docker-compose.yml")))
+
+ shutil.copy(src_path, dst_path.parent)
+ assert dst_path.exists()
+
+ # configs
+ subprocess.run(
+ f'docker-compose --file "{src_path}" config > "{dst_path}"',
+ shell=True,
+ check=True,
+ env=environ,
+ )
+
+ return dst_path
+
+
+@pytest.fixture(scope="session")
+def postgres_service(docker_services, docker_ip, docker_compose_file: Path) -> Dict:
+
+ # check docker-compose's environ is resolved properly
+ config = yaml.safe_load(docker_compose_file.read_text())
+ environ = config["services"]["postgres"]["environment"]
+
+ # builds DSN
+ config = dict(
+ user=environ["POSTGRES_USER"],
+ password=environ["POSTGRES_PASSWORD"],
+ host=docker_ip,
+ port=docker_services.port_for("postgres", 5432),
+ database=environ["POSTGRES_DB"],
+ )
+
+ dsn = "postgresql://{user}:{password}@{host}:{port}/{database}".format(**config)
+
+ def _create_checker() -> Callable:
+ def is_postgres_responsive() -> bool:
+ try:
+ engine = sa.create_engine(dsn)
+ conn = engine.connect()
+ conn.close()
+ except sa.exc.OperationalError:
+ return False
+ return True
+
+ return is_postgres_responsive
+
+ # Wait until service is responsive.
+ docker_services.wait_until_responsive(
+ check=_create_checker(), timeout=30.0, pause=0.1,
+ )
+
+ config["dsn"] = dsn
+ return config
+
+
+@pytest.fixture("session")
+def make_engine(postgres_service: Dict) -> Callable:
+ dsn = postgres_service["dsn"] # session scope freezes dsn
+
+ def maker(is_async=True) -> Union[Coroutine, Callable]:
+ return aiopg.sa.create_engine(dsn) if is_async else sa.create_engine(dsn)
+
+ return maker
+
+
+@pytest.fixture
+def apply_migration(postgres_service: Dict, make_engine) -> None:
+ kwargs = postgres_service.copy()
+ kwargs.pop("dsn")
+ pg_cli.discover.callback(**kwargs)
+ pg_cli.upgrade.callback("head")
+ yield
+ pg_cli.downgrade.callback("base")
+ pg_cli.clean.callback()
+
+ # FIXME: deletes all because downgrade is not reliable!
+ engine = make_engine(False)
+ metadata.drop_all(engine)
+
+
+@pytest.fixture
+def app(monkeypatch, environment, apply_migration) -> FastAPI:
+ # patching environs
+ for key, value in environment.items():
+ monkeypatch.setenv(key, value)
+
+ from simcore_service_api_server.core.application import init_app
+
+ app = init_app()
+ return app
+
+
+@pytest.fixture
+async def initialized_app(app: FastAPI) -> FastAPI:
+ async with LifespanManager(app):
+ yield app
+
+
+@pytest.fixture
+async def client(loop, initialized_app: FastAPI) -> AsyncClient:
+ async with AsyncClient(
+ app=initialized_app,
+ base_url="http://testserver",
+ headers={"Content-Type": "application/json"},
+ ) as client:
+ yield client
+
+
+@pytest.fixture
+def sync_client(app: FastAPI) -> TestClient:
+ # test client:
+ # Context manager to trigger events: https://fastapi.tiangolo.com/advanced/testing-events/
+ with TestClient(app) as cli:
+ yield cli
+
+
+## FAKE DATA ---
+
+
+@pytest.fixture
+async def test_user_id(loop, initialized_app) -> int:
+ # WARNING: created but not deleted upon tear-down, i.e. this is for one use!
+ async with initialized_app.state.engine.acquire() as conn:
+ user_id = await RWUsersRepository(conn).create(
+ email="test@test.com", password="password", name="username",
+ )
+ return user_id
+
+
+@pytest.fixture
+async def test_api_key(loop, initialized_app, test_user_id) -> ApiKeyInDB:
+ # WARNING: created but not deleted upon tear-down, i.e. this is for one use!
+ async with initialized_app.state.engine.acquire() as conn:
+ apikey = await RWApiKeysRepository(conn).create(
+ "test-api-key", api_key="key", api_secret="secret", user_id=test_user_id
+ )
+ return apikey
diff --git a/services/api-server/tests/unit/test_api_meta.py b/services/api-server/tests/unit/test_api_meta.py
new file mode 100644
index 00000000000..d8df761a822
--- /dev/null
+++ b/services/api-server/tests/unit/test_api_meta.py
@@ -0,0 +1,19 @@
+# pylint: disable=unused-variable
+# pylint: disable=unused-argument
+# pylint: disable=redefined-outer-name
+# from fastapi.testclient import TestClient
+
+from httpx import AsyncClient
+
+from simcore_service_api_server.__version__ import api_version, api_vtag
+from simcore_service_api_server.models.schemas.meta import Meta
+
+
+async def test_read_service_meta(client: AsyncClient):
+ response = await client.get(f"{api_vtag}/meta")
+
+ assert response.status_code == 200
+ assert response.json()["version"] == api_version
+
+ meta = Meta(**response.json())
+ assert meta.version == api_version
diff --git a/services/api-server/tests/unit/test_api_user.py b/services/api-server/tests/unit/test_api_user.py
new file mode 100644
index 00000000000..49464d08f66
--- /dev/null
+++ b/services/api-server/tests/unit/test_api_user.py
@@ -0,0 +1,51 @@
+# pylint: disable=unused-variable
+# pylint: disable=unused-argument
+# pylint: disable=redefined-outer-name
+
+# from starlette.testclient import TestClient
+
+import pytest
+from httpx import AsyncClient
+from starlette import status
+
+from simcore_service_api_server.__version__ import api_vtag
+from simcore_service_api_server.models.domain.api_keys import ApiKeyInDB
+from simcore_service_api_server.models.schemas.profiles import Profile
+
+
+@pytest.fixture
+def auth(test_api_key: ApiKeyInDB):
+ ## https://requests.readthedocs.io/en/master/user/authentication/
+ # TODO: https://requests-oauthlib.readthedocs.io/en/latest/oauth2_workflow.html#backend-application-flow
+
+ auth = (test_api_key.api_key, test_api_key.api_secret.get_secret_value())
+ return auth
+
+
+@pytest.mark.skip(reason="fixture under dev")
+async def test_get_profile(client: AsyncClient, auth):
+
+ resp = await client.get(f"/{api_vtag}/meta", auth=auth)
+ assert resp.status_code == status.HTTP_200_OK
+
+ resp = await client.get(f"/{api_vtag}/me", auth=auth)
+ assert resp.status_code == status.HTTP_200_OK
+
+ # validates response
+ profile = Profile(**resp.json())
+ assert profile.first_name == "James"
+
+
+@pytest.mark.skip(reason="fixture under dev")
+async def test_patch_profile(client: AsyncClient, auth):
+
+ resp = await client.patch(
+ f"/{api_vtag}/me",
+ data={"first_name": "Oliver", "last_name": "Heaviside"},
+ auth=auth,
+ )
+ assert resp.status_code == status.HTTP_200_OK, resp.text
+
+ profile = Profile(**resp.json())
+ assert profile.first_name == "Oliver"
+ assert profile.last_name == "Heaviside"
diff --git a/services/api-gateway/tests/unit/test_code_syntax.py b/services/api-server/tests/unit/test_code_syntax.py
similarity index 100%
rename from services/api-gateway/tests/unit/test_code_syntax.py
rename to services/api-server/tests/unit/test_code_syntax.py
index 9e6576bb072..78a6d0c3efa 100644
--- a/services/api-gateway/tests/unit/test_code_syntax.py
+++ b/services/api-server/tests/unit/test_code_syntax.py
@@ -5,11 +5,11 @@
import os
import re
from pathlib import Path
-from pytest_simcore.helpers.utils_pylint import assert_pylint_is_passing
-
import pytest
+from pytest_simcore.helpers.utils_pylint import assert_pylint_is_passing
+
@pytest.fixture
def pylintrc(osparc_simcore_root_dir):
diff --git a/services/api-server/tests/unit/test_fake_generator.py b/services/api-server/tests/unit/test_fake_generator.py
new file mode 100644
index 00000000000..b9c791bd74b
--- /dev/null
+++ b/services/api-server/tests/unit/test_fake_generator.py
@@ -0,0 +1,18 @@
+# pylint:disable=unused-variable
+# pylint:disable=unused-argument
+# pylint:disable=redefined-outer-name
+import pytest
+from pydantic import BaseModel
+
+
+@pytest.mark.skip(reason="TODO")
+def create_fake(model: BaseModel):
+ from faker import Faker
+
+ fake = Faker()
+
+ # for field in model.__fie
+ # navigate fields
+ # if example cls.Config.schema_extra.get("example")
+ # type
+ # default or some value in fake
diff --git a/services/api-server/tests/unit/test_jwt.py b/services/api-server/tests/unit/test_jwt.py
new file mode 100644
index 00000000000..5f13d17d4b4
--- /dev/null
+++ b/services/api-server/tests/unit/test_jwt.py
@@ -0,0 +1,35 @@
+# pylint: disable=unused-variable
+# pylint: disable=unused-argument
+# pylint: disable=redefined-outer-name
+
+import importlib
+
+import pytest
+
+from simcore_service_api_server.models.schemas.tokens import TokenData
+from simcore_service_api_server.services.jwt import (
+ create_access_token,
+ get_access_token_data,
+)
+
+
+@pytest.fixture()
+def mock_secret_key(monkeypatch):
+ monkeypatch.setenv("SECRET_KEY", "your-256-bit-secret")
+
+ import simcore_service_api_server.services.jwt
+
+ importlib.reload(simcore_service_api_server.services.jwt)
+
+
+def test_access_token_data(mock_secret_key):
+
+ data = TokenData(user_id=33, scopes=[])
+ jwt = create_access_token(data, expires_in_mins=None)
+
+ # checks jwt against https://jwt.io/#debugger-io
+ # assert jwt == b"ey ...
+
+ received_data = get_access_token_data(jwt)
+
+ assert data == received_data
diff --git a/services/api-server/tests/unit/test_security.py b/services/api-server/tests/unit/test_security.py
new file mode 100644
index 00000000000..325b001b963
--- /dev/null
+++ b/services/api-server/tests/unit/test_security.py
@@ -0,0 +1,14 @@
+# pylint: disable=unused-variable
+# pylint: disable=unused-argument
+# pylint: disable=redefined-outer-name
+
+from simcore_service_api_server.services.security import (
+ get_password_hash,
+ verify_password,
+)
+
+
+def test_has_password():
+ hashed_pass = get_password_hash("secret")
+ assert hashed_pass != "secret"
+ assert verify_password("secret", hashed_pass)
diff --git a/services/api-server/tests/unit/test_settings.py b/services/api-server/tests/unit/test_settings.py
new file mode 100644
index 00000000000..8263d46ae03
--- /dev/null
+++ b/services/api-server/tests/unit/test_settings.py
@@ -0,0 +1,36 @@
+# import pytest
+import logging
+from pprint import pprint
+
+from simcore_service_api_server.core.settings import (
+ URL,
+ AppSettings,
+ BootModeEnum,
+ PostgresSettings,
+ WebServerSettings,
+)
+
+
+# bring .env-devel in here
+def test_min_environ_for_settings(monkeypatch):
+ monkeypatch.setenv("WEBSERVER_HOST", "production_webserver")
+ monkeypatch.setenv("WEBSERVER_SESSION_SECRET_KEY", "REPLACE ME with a key of at least length 32.")
+
+ monkeypatch.setenv("POSTGRES_HOST", "production_postgres")
+ monkeypatch.setenv("POSTGRES_USER", "test")
+ monkeypatch.setenv("POSTGRES_PASSWORD", "test")
+ monkeypatch.setenv("POSTGRES_DB", "simcoredb")
+
+ monkeypatch.setenv("SC_BOOT_MODE", "production")
+
+ # NOTE: pg and weberver settings parse environ NOW!
+ settings = AppSettings(postgres=PostgresSettings(), webserver=WebServerSettings())
+
+ pprint(settings.dict())
+
+ assert settings.boot_mode == BootModeEnum.production
+ assert settings.loglevel == logging.DEBUG
+
+ assert settings.postgres.dsn == URL(
+ "postgresql://test:test@production_postgres:5432/simcoredb"
+ )
diff --git a/services/api-gateway/tests/utils/docker-compose.yml b/services/api-server/tests/utils/docker-compose.yml
similarity index 93%
rename from services/api-gateway/tests/utils/docker-compose.yml
rename to services/api-server/tests/utils/docker-compose.yml
index a5afea1eca1..51f193277fc 100644
--- a/services/api-gateway/tests/utils/docker-compose.yml
+++ b/services/api-server/tests/utils/docker-compose.yml
@@ -5,7 +5,7 @@ services:
environment:
- POSTGRES_USER=${POSTGRES_USER:-test}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-test}
- - POSTGRES_DB=${POSTGRES_PASSWORD:-test}
+ - POSTGRES_DB=${POSTGRES_DB:-test}
- POSTGRES_HOST=${POSTGRES_HOST:-localhost}
- POSTGRES_PORT=${POSTGRES_PORT:-5432}
ports:
diff --git a/services/api-server/tests/utils/init-pg.py b/services/api-server/tests/utils/init-pg.py
new file mode 100644
index 00000000000..a239a266f47
--- /dev/null
+++ b/services/api-server/tests/utils/init-pg.py
@@ -0,0 +1,108 @@
+# pylint: disable=no-value-for-parameter
+# TODO: reuse in auto and manual testing!
+
+import asyncio
+import os
+from typing import Dict
+from uuid import uuid4
+
+import aiopg.sa
+import faker
+import sqlalchemy as sa
+import yaml
+
+import simcore_postgres_database.cli as pg_cli
+import simcore_service_api_server.db.tables as pg
+
+DSN_FORMAT = "postgresql://{user}:{password}@{host}:{port}/{database}"
+
+default_db_settings = dict(
+ user=os.environ.get("POSTGRES_USER", "test"),
+ password=os.environ.get("POSTGRES_PASSWORD", "test"),
+ host=os.environ.get("POSTGRES_HOST", "localhost"),
+ port=os.environ.get("POSTGRES_PORT", 5432),
+ database=os.environ.get("POSTGRES_DB", 5432),
+)
+default_dsn = DSN_FORMAT.format(**default_db_settings)
+
+fake = faker.Faker()
+
+
+def load_db_config() -> Dict:
+ # TODO:
+ with open("docker-compose-resolved.yaml") as fh:
+ config = yaml.safe_load(fh)
+ environ = config["services"]["postgres"]["environment"]
+
+ return dict(
+ user=environ["POSTGRES_USER"],
+ password=environ["POSTGRES_PASSWORD"],
+ host="localhost",
+ port=5432,
+ database=environ["POSTGRES_DB"],
+ )
+
+
+def init_tables(dsn: str = default_dsn):
+ engine = sa.create_engine(dsn)
+ meta = pg.metadata
+ meta.drop_all(engine)
+ # meta.create_all(engine, tables=[pg.api_keys, pg.users])
+
+
+def random_user(**overrides):
+ data = dict(
+ name=fake.name(),
+ email=fake.email(),
+ password_hash=fake.numerify(text="#" * 5),
+ status=pg.UserStatus.ACTIVE,
+ created_ip=fake.ipv4(),
+ )
+ data.update(overrides)
+ return data
+
+
+def random_api_key(**overrides):
+ data = dict(
+ user_id=1, display_name=fake.word(), api_key=uuid4(), api_secret=uuid4(),
+ )
+ data.update(overrides)
+ return data
+
+
+async def fill_tables(dsn: str = default_dsn):
+ async with aiopg.sa.create_engine(dsn) as engine:
+ async with engine.acquire() as conn:
+ uid: int = await conn.scalar(
+ pg.users.insert().values(**random_user(name="me", email="me@bar.foo"))
+ )
+
+ await conn.scalar(
+ pg.api_keys.insert().values(
+ **random_api_key(
+ display_name="test key",
+ user_id=uid,
+ api_key="key",
+ api_secret="secret",
+ )
+ )
+ )
+
+
+async def main():
+
+ # discover
+ settings = pg_cli.discover.callback(**default_db_settings)
+ dsn: str = DSN_FORMAT.format(**settings)
+
+ # upgrade
+ pg_cli.upgrade.callback("head")
+
+ # FIXME: if already there, it will fail
+ await fill_tables(dsn)
+
+
+if __name__ == "__main__":
+ loop = asyncio.get_event_loop()
+ loop.run_until_complete(main())
+ loop.stop()
diff --git a/services/api-gateway/tools/gen_api.py b/services/api-server/tools/gen_api.py
similarity index 99%
rename from services/api-gateway/tools/gen_api.py
rename to services/api-server/tools/gen_api.py
index fa8f1e68777..f2daac24a13 100644
--- a/services/api-gateway/tools/gen_api.py
+++ b/services/api-server/tools/gen_api.py
@@ -15,7 +15,7 @@
# directories
current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
-package_dir = (current_dir / ".." / "src" / "simcore_service_api_gateway").resolve()
+package_dir = (current_dir / ".." / "src" / "simcore_service_api_server").resolve()
# formatter
diff --git a/services/api-gateway/tools/templates/cruds.py.jinja2 b/services/api-server/tools/templates/cruds.py.jinja2
similarity index 100%
rename from services/api-gateway/tools/templates/cruds.py.jinja2
rename to services/api-server/tools/templates/cruds.py.jinja2
diff --git a/services/api-gateway/tools/templates/orm.py.jinja2 b/services/api-server/tools/templates/orm.py.jinja2
similarity index 100%
rename from services/api-gateway/tools/templates/orm.py.jinja2
rename to services/api-server/tools/templates/orm.py.jinja2
diff --git a/services/api-gateway/tools/templates/resource_custom_methods.py.jinja2 b/services/api-server/tools/templates/resource_custom_methods.py.jinja2
similarity index 94%
rename from services/api-gateway/tools/templates/resource_custom_methods.py.jinja2
rename to services/api-server/tools/templates/resource_custom_methods.py.jinja2
index 99c1dc7d348..4542ce6d6f9 100644
--- a/services/api-gateway/tools/templates/resource_custom_methods.py.jinja2
+++ b/services/api-server/tools/templates/resource_custom_methods.py.jinja2
@@ -1,11 +1,10 @@
{# STANDARD METHODS: https://cloud.google.com/apis/design/custom_methods #}
-import logging
+from loguru import logger
from fastapi import APIRouter, HTTPException
from starlette import status
-log = logging.getLogger(__name__)
router = APIRouter()
diff --git a/services/api-gateway/tools/templates/resource_standard_methods.py.jinja2 b/services/api-server/tools/templates/resource_standard_methods.py.jinja2
similarity index 97%
rename from services/api-gateway/tools/templates/resource_standard_methods.py.jinja2
rename to services/api-server/tools/templates/resource_standard_methods.py.jinja2
index fa87e95e520..0553fda89f4 100644
--- a/services/api-gateway/tools/templates/resource_standard_methods.py.jinja2
+++ b/services/api-server/tools/templates/resource_standard_methods.py.jinja2
@@ -1,4 +1,4 @@
-import logging
+from loguru import logger
from typing import List, Optional
from fastapi import APIRouter, Body, Depends, HTTPException, Query
@@ -9,8 +9,6 @@ from ..store import crud_{{ rnp }} as crud
from ..schemas import schemas_{{ rnp }} as schemas
router = APIRouter()
-log = logging.getLogger(__name__)
-
{# STANDARD METHODS: https://cloud.google.com/apis/design/standard_methods #}
@@ -37,7 +35,7 @@ async def list_{{ rnp }}(
# Applicable naming conventions
# TODO: filter: https://cloud.google.com/apis/design/naming_convention#list_filter_field
# SEE response: https://cloud.google.com/apis/design/naming_convention#list_response
- log.debug("%s %s %s", page_token, page_size, order_by)
+ logger.debug("%s %s %s", page_token, page_size, order_by)
{{ rnp }} = await crud.list_{{ rnp }}(conn)
return {{ rnp }}
diff --git a/services/api-server/tools/templates/schemas.py.jinja2 b/services/api-server/tools/templates/schemas.py.jinja2
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/api-gateway/tools/templates/test_endpoints.py.jinja2 b/services/api-server/tools/templates/test_endpoints.py.jinja2
similarity index 95%
rename from services/api-gateway/tools/templates/test_endpoints.py.jinja2
rename to services/api-server/tools/templates/test_endpoints.py.jinja2
index c1cc99fed48..9bee32645e4 100644
--- a/services/api-gateway/tools/templates/test_endpoints.py.jinja2
+++ b/services/api-server/tools/templates/test_endpoints.py.jinja2
@@ -10,7 +10,7 @@ from starlette import status
# TODO: app is init globally ... which is bad!
-from simcore_service_api_gateway.main import api_version, app, api_vtag
+from simcore_service_api_server.core.application import api_version, app, api_vtag
@pytest.fixture
diff --git a/services/catalog/Makefile b/services/catalog/Makefile
index 219dcd42f41..268b99fe573 100644
--- a/services/catalog/Makefile
+++ b/services/catalog/Makefile
@@ -17,7 +17,7 @@ requirements: ## compiles pip requirements (.in -> .txt)
.PHONY: install-dev install-prod install-ci
install-dev install-prod install-ci: requirements _check_venv_active ## install app in development/production or CI mode
# installing in $(subst install-,,$@) mode
- pip-sync requirements/$(subst install-,,$@).txt
+ pip-sync --quiet requirements/$(subst install-,,$@).txt
PHONY: tests-unit tests-integration tests
diff --git a/services/catalog/docker/healthcheck.py b/services/catalog/docker/healthcheck.py
index 93c59bb7826..23a3ba3ec11 100644
--- a/services/catalog/docker/healthcheck.py
+++ b/services/catalog/docker/healthcheck.py
@@ -18,7 +18,6 @@
import os
import sys
-
from urllib.request import urlopen
SUCCESS, UNHEALTHY = 0, 1
diff --git a/services/catalog/src/simcore_service_catalog/__main__.py b/services/catalog/src/simcore_service_catalog/__main__.py
index ef4df57b5ad..10fa4f0890e 100644
--- a/services/catalog/src/simcore_service_catalog/__main__.py
+++ b/services/catalog/src/simcore_service_catalog/__main__.py
@@ -5,8 +5,8 @@
"""
import uvicorn
-from simcore_service_catalog.main import app
from simcore_service_catalog.config import uvicorn_settings
+from simcore_service_catalog.main import app
def main():
diff --git a/services/catalog/src/simcore_service_catalog/config.py b/services/catalog/src/simcore_service_catalog/config.py
index 5544f30d0b9..a04b5d84611 100644
--- a/services/catalog/src/simcore_service_catalog/config.py
+++ b/services/catalog/src/simcore_service_catalog/config.py
@@ -11,7 +11,7 @@
# DOCKER
is_container_environ: bool = "SC_BOOT_MODE" in os.environ
is_devel = os.environ.get("SC_BUILD_TARGET") == "development"
-is_prod = os.environ.get("SC_BUILD_TARGET") == "production"
+is_prod = os.environ.get("SC_BUILD_TARGET") == "production"
# LOGGING
@@ -40,7 +40,9 @@
**postgres_cfg
)
postgres_cfg: dict = {**postgres_cfg, "uri": postgres_dsn}
-init_tables: bool = cast_to_bool(os.environ.get("POSTGRES_INIT_TABLES", "true" if is_devel else "false"))
+init_tables: bool = cast_to_bool(
+ os.environ.get("POSTGRES_INIT_TABLES", "true" if is_devel else "false")
+)
# SERVER
# NOTE: https://www.uvicorn.org/settings/
diff --git a/services/catalog/src/simcore_service_catalog/endpoints/dags.py b/services/catalog/src/simcore_service_catalog/endpoints/dags.py
index f360b0f7454..a75c2ed6eb1 100644
--- a/services/catalog/src/simcore_service_catalog/endpoints/dags.py
+++ b/services/catalog/src/simcore_service_catalog/endpoints/dags.py
@@ -10,8 +10,8 @@
)
from .. import db
-from ..store import crud_dags as crud
from ..schemas import schemas_dags as schemas
+from ..store import crud_dags as crud
router = APIRouter()
log = logging.getLogger(__name__)
diff --git a/services/catalog/src/simcore_service_catalog/main.py b/services/catalog/src/simcore_service_catalog/main.py
index 6117bed081e..63a1ad27460 100644
--- a/services/catalog/src/simcore_service_catalog/main.py
+++ b/services/catalog/src/simcore_service_catalog/main.py
@@ -64,7 +64,8 @@ async def go():
async with engine.acquire() as conn:
await create_tables(conn)
- await go() # NOTE: non-blocking this way
+ await go() # NOTE: non-blocking this way
+
@app.on_event("shutdown")
def shutdown_event():
diff --git a/services/catalog/src/simcore_service_catalog/orm.py b/services/catalog/src/simcore_service_catalog/orm.py
index b84d5cbdafd..01557d31052 100644
--- a/services/catalog/src/simcore_service_catalog/orm.py
+++ b/services/catalog/src/simcore_service_catalog/orm.py
@@ -1,4 +1,3 @@
from simcore_postgres_database.models.direct_acyclic_graphs import DAG, dags
-
__all__ = ["dags", "DAG"]
diff --git a/services/catalog/src/simcore_service_catalog/schemas/schemas_dags.py b/services/catalog/src/simcore_service_catalog/schemas/schemas_dags.py
index 6c873a1931e..cfea6816e4b 100644
--- a/services/catalog/src/simcore_service_catalog/schemas/schemas_dags.py
+++ b/services/catalog/src/simcore_service_catalog/schemas/schemas_dags.py
@@ -1,7 +1,5 @@
from typing import Dict, Optional
-# TODO: why pylint error in pydantic???
-# pylint: disable=no-name-in-module
from pydantic import BaseModel, EmailStr, Field, Json
from . import project
@@ -32,7 +30,7 @@ class DAGInPath(DAGBase):
class DAGAtDB(DAGBase):
id: int
- workbench: Json[Dict[str, project.Node]]
+ workbench: Json[Dict[str, project.Node]] # pylint: disable=unsubscriptable-object
class Config:
orm_mode = True
diff --git a/services/catalog/src/simcore_service_catalog/utils/helpers.py b/services/catalog/src/simcore_service_catalog/utils/helpers.py
index 59058ae0f75..4d95766d28b 100644
--- a/services/catalog/src/simcore_service_catalog/utils/helpers.py
+++ b/services/catalog/src/simcore_service_catalog/utils/helpers.py
@@ -1,3 +1,2 @@
-
def cast_to_bool(value: str) -> bool:
return value.lower() in ["true", "1", "yes"]
diff --git a/services/catalog/src/simcore_service_catalog/utils/remote_debug.py b/services/catalog/src/simcore_service_catalog/utils/remote_debug.py
index 91b3ba5af67..b29069c9fd8 100644
--- a/services/catalog/src/simcore_service_catalog/utils/remote_debug.py
+++ b/services/catalog/src/simcore_service_catalog/utils/remote_debug.py
@@ -21,15 +21,18 @@ def setup_remote_debugging(force_enabled=False):
# SEE https://github.com/microsoft/ptvsd#enabling-debugging
#
import ptvsd
- ptvsd.enable_attach(address=('0.0.0.0', REMOTE_DEBUG_PORT), redirect_output=True) # nosec
+
+ ptvsd.enable_attach(
+ address=("0.0.0.0", REMOTE_DEBUG_PORT), redirect_output=True
+ ) # nosec
except ImportError:
- raise ValueError("Cannot enable remote debugging. Please install ptvsd first")
+ raise ValueError(
+ "Cannot enable remote debugging. Please install ptvsd first"
+ )
log.info("Remote debugging enabled: listening port %s", REMOTE_DEBUG_PORT)
else:
log.debug("Booting without remote debugging since SC_BOOT_MODE=%s", boot_mode)
-__all__ = [
- 'setup_remote_debugging'
-]
+__all__ = ["setup_remote_debugging"]
diff --git a/services/catalog/tests/unit/test_package.py b/services/catalog/tests/unit/test_package.py
index 9eb0a8c62d6..3c4e17d5671 100644
--- a/services/catalog/tests/unit/test_package.py
+++ b/services/catalog/tests/unit/test_package.py
@@ -4,11 +4,12 @@
import os
import re
-from pytest_simcore.helpers.utils_pylint import assert_pylint_is_passing
from pathlib import Path
import pytest
+from pytest_simcore.helpers.utils_pylint import assert_pylint_is_passing
+
# from simcore_service_catalog.__main__ import main
diff --git a/services/docker-compose-build.yml b/services/docker-compose-build.yml
index d0294305e8b..3f7fada5b88 100644
--- a/services/docker-compose-build.yml
+++ b/services/docker-compose-build.yml
@@ -7,22 +7,22 @@
#
version: "3.7"
services:
- api-gateway:
- image: local/api-gateway:${BUILD_TARGET:?build_target_required}
+ api-server:
+ image: local/api-server:${BUILD_TARGET:?build_target_required}
build:
context: ../
- dockerfile: services/api-gateway/Dockerfile
+ dockerfile: services/api-server/Dockerfile
cache_from:
- - local/api-gateway:${BUILD_TARGET:?build_target_required}
- - ${DOCKER_REGISTRY:-itisfoundation}/api-gateway:cache
- - ${DOCKER_REGISTRY:-itisfoundation}/api-gateway:${DOCKER_IMAGE_TAG:-latest}
+ - local/api-server:${BUILD_TARGET:?build_target_required}
+ - ${DOCKER_REGISTRY:-itisfoundation}/api-server:cache
+ - ${DOCKER_REGISTRY:-itisfoundation}/api-server:${DOCKER_IMAGE_TAG:-latest}
target: ${BUILD_TARGET:?build_target_required}
labels:
org.label-schema.schema-version: "1.0"
org.label-schema.build-date: "${BUILD_DATE}"
org.label-schema.vcs-url: "${VCS_URL}"
org.label-schema.vcs-ref: "${VCS_REF}"
- io.osparc.api-version: "${STORAGE_API_VERSION}"
+ io.osparc.api-version: "${API_SERVER_API_VERSION}"
catalog:
image: local/catalog:${BUILD_TARGET:?build_target_required}
diff --git a/services/docker-compose.devel.yml b/services/docker-compose.devel.yml
index 2870b020ea2..47daede4cfe 100644
--- a/services/docker-compose.devel.yml
+++ b/services/docker-compose.devel.yml
@@ -6,6 +6,14 @@
#
version: "3.7"
services:
+ api-server:
+ environment:
+ - SC_BOOT_MODE=debug-ptvsd
+ - LOGLEVEL=debug
+ volumes:
+ - ./api-server:/devel/services/api-server
+ - ../packages:/devel/packages
+
catalog:
environment:
- SC_BOOT_MODE=debug-ptvsd
diff --git a/services/docker-compose.local.yml b/services/docker-compose.local.yml
index 9b1db0ca2ef..b0b813ad346 100644
--- a/services/docker-compose.local.yml
+++ b/services/docker-compose.local.yml
@@ -12,6 +12,11 @@
#
version: "3.7"
services:
+ api-server:
+ ports:
+ - "8006:8000"
+ - "3006:3000"
+
catalog:
environment:
- SC_BOOT_MODE=${SC_BOOT_MODE:-default}
diff --git a/services/docker-compose.yml b/services/docker-compose.yml
index aea850e08b9..f2da6db23c5 100644
--- a/services/docker-compose.yml
+++ b/services/docker-compose.yml
@@ -1,11 +1,16 @@
version: "3.7"
services:
- api-gateway:
- # get info here: https://www.mock-server.com
- image: mockserver/mockserver
+ api-server:
+ image: ${DOCKER_REGISTRY:-itisfoundation}/api-server:${DOCKER_IMAGE_TAG:-latest}
init: true
environment:
- - MOCKSERVER_LIVENESS_HTTP_GET_PATH=/live
+ - WEBSERVER_HOST=${WEBSERVER_HOST:-webserver}
+ - LOGLEVEL=${LOG_LEVEL:-INFO}
+ env_file:
+ - ../.env
+ depends_on:
+ - postgres
+ - webserver
deploy:
labels:
- io.simcore.zone=${TRAEFIK_SIMCORE_ZONE}
@@ -14,11 +19,11 @@ services:
# ssl header necessary so that socket.io upgrades correctly from polling to websocket mode. the middleware must be attached to the right connection.
- traefik.http.middlewares.${SWARM_STACK_NAME}_sslheader.headers.customrequestheaders.X-Forwarded-Proto=http
- traefik.enable=true
- - traefik.http.services.${SWARM_STACK_NAME}_api-gateway.loadbalancer.server.port=1080
- - traefik.http.routers.${SWARM_STACK_NAME}_api-gateway.rule=hostregexp(`{host:.+}`)
- - traefik.http.routers.${SWARM_STACK_NAME}_api-gateway.entrypoints=simcore_api
- - traefik.http.routers.${SWARM_STACK_NAME}_api-gateway.priority=1
- - traefik.http.routers.${SWARM_STACK_NAME}_api-gateway.middlewares=${SWARM_STACK_NAME}_gzip@docker, ${SWARM_STACK_NAME}_sslheader
+ - traefik.http.services.${SWARM_STACK_NAME}_api-server.loadbalancer.server.port=8000
+ - traefik.http.routers.${SWARM_STACK_NAME}_api-server.rule=hostregexp(`{host:.+}`)
+ - traefik.http.routers.${SWARM_STACK_NAME}_api-server.entrypoints=simcore_api
+ - traefik.http.routers.${SWARM_STACK_NAME}_api-server.priority=1
+ - traefik.http.routers.${SWARM_STACK_NAME}_api-server.middlewares=${SWARM_STACK_NAME}_gzip@docker, ${SWARM_STACK_NAME}_sslheader
networks:
- default
diff --git a/services/sidecar/Dockerfile b/services/sidecar/Dockerfile
index d8228ac4dbd..50aec0e82bc 100644
--- a/services/sidecar/Dockerfile
+++ b/services/sidecar/Dockerfile
@@ -110,7 +110,7 @@ COPY --chown=scu:scu services/sidecar/docker services/sidecar/docker
# --timeout=20s \
# --start-period=30s \
# --retries=3 \
-# CMD ["python3", "services/api-gateway/docker/healthcheck.py", "http://localhost:8000/"]
+# CMD ["python3", "services/api-server/docker/healthcheck.py", "http://localhost:8000/"]
ENTRYPOINT [ "/bin/sh", "services/sidecar/docker/entrypoint.sh" ]
CMD ["/bin/sh", "services/sidecar/docker/boot.sh"]
diff --git a/services/sidecar/src/simcore_service_sidecar/__version__.py b/services/sidecar/src/simcore_service_sidecar/__version__.py
index c6aac55f8e9..7771daecad3 100644
--- a/services/sidecar/src/simcore_service_sidecar/__version__.py
+++ b/services/sidecar/src/simcore_service_sidecar/__version__.py
@@ -1,4 +1,4 @@
-""" Current version of the simcore_service_api_gateway application
+""" Current version of the simcore_service_sidecar application
"""
import pkg_resources
diff --git a/services/web/server/src/simcore_service_webserver/session.py b/services/web/server/src/simcore_service_webserver/session.py
index f5d42ecf222..18809191d86 100644
--- a/services/web/server/src/simcore_service_webserver/session.py
+++ b/services/web/server/src/simcore_service_webserver/session.py
@@ -56,7 +56,7 @@ def setup_session(app: web.Application):
# EncryptedCookieStorage urlsafe_b64decode inside if passes bytes
storage = EncryptedCookieStorage(
- secret_key=secret_key_bytes[:32], cookie_name="API_SESSION"
+ secret_key=secret_key_bytes[:32], cookie_name="osparc.WEBAPI_SESSION"
)
aiohttp_session.setup(app, storage)
diff --git a/services/web/server/tests/unit/with_dbs/test_login.py b/services/web/server/tests/unit/with_dbs/test_login.py
index 64ca0412e4d..491a3937beb 100644
--- a/services/web/server/tests/unit/with_dbs/test_login.py
+++ b/services/web/server/tests/unit/with_dbs/test_login.py
@@ -1,12 +1,16 @@
# pylint:disable=unused-variable
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
+# pylint: disable=protected-access
+import pytest
from aiohttp import web
+from pytest_simcore.helpers.utils_assert import assert_status
from pytest_simcore.helpers.utils_login import NewUser
+from servicelib.application_keys import APP_CONFIG_KEY
from servicelib.rest_responses import unwrap_envelope
-from simcore_service_webserver.db_models import ConfirmationAction, UserStatus
+from simcore_service_webserver.db_models import UserStatus
from simcore_service_webserver.login.cfg import cfg
EMAIL, PASSWORD = "tester@test.com", "password"
@@ -82,3 +86,64 @@ async def test_login_successfully(client):
assert not error
assert data
assert cfg.MSG_LOGGED_IN in data["message"]
+
+
+@pytest.mark.parametrize(
+ "cookie_enabled,expected", [(True, web.HTTPOk), (False, web.HTTPUnauthorized)]
+)
+async def test_proxy_login(client, cookie_enabled, expected):
+
+ restricted_url = client.app.router["get_my_profile"].url_for()
+ assert str(restricted_url) == "/v0/me"
+
+ def build_proxy_session_cookie(identity: str):
+ # NOTE: Creates proxy session for authenticated uses in the api-server.
+ # Will be used as temporary solution until common authentication
+ # service is in place
+ #
+ import json
+ import base64
+ import time
+ from cryptography import fernet
+
+ # Based on aiohttp_session and aiohttp_security
+
+ # HACK to get secret for testing purposes
+ cfg = client.app[APP_CONFIG_KEY]["session"]
+ secret_key_bytes = cfg["secret_key"].encode("utf-8")
+
+ while len(secret_key_bytes) < 32:
+ secret_key_bytes += secret_key_bytes
+ secret_key = secret_key_bytes[:32]
+
+ if isinstance(secret_key, str):
+ pass
+ elif isinstance(secret_key, (bytes, bytearray)):
+ secret_key = base64.urlsafe_b64encode(secret_key)
+ _fernet = fernet.Fernet(secret_key)
+
+ # builds session cookie
+ cookie_name = "osparc.WEBAPI_SESSION"
+ cookie_data = json.dumps(
+ {
+ "created": int(time.time()), # now
+ "session": {"AIOHTTP_SECURITY": identity},
+ "path": "/",
+ # extras? e.g. expiration
+ }
+ ).encode("utf-8")
+ encrypted_cookie_data = _fernet.encrypt(cookie_data).decode("utf-8")
+
+ return {cookie_name: encrypted_cookie_data}
+
+ # ---
+ async with NewUser() as user:
+ cookies = (
+ build_proxy_session_cookie(identity=user["email"]) if cookie_enabled else {}
+ )
+
+ resp = await client.get(restricted_url, cookies=cookies)
+ data, error = await assert_status(resp, expected)
+
+ if not error:
+ assert data["login"] == user["email"]
diff --git a/tests/swarm-deploy/test_swarm_runs.py b/tests/swarm-deploy/test_swarm_runs.py
index 9b5208117e7..0705d696f13 100644
--- a/tests/swarm-deploy/test_swarm_runs.py
+++ b/tests/swarm-deploy/test_swarm_runs.py
@@ -28,7 +28,7 @@
docker_compose_service_names = [
- "api-gateway",
+ "api-server",
"catalog",
"director",
"sidecar",
From 16c30b9fc72f7416ffe890cd4ccbe6b37efa13ef Mon Sep 17 00:00:00 2001
From: Sylvain <35365065+sanderegg@users.noreply.github.com>
Date: Fri, 19 Jun 2020 08:06:31 +0200
Subject: [PATCH 04/43] Is/add notebook migration script (#1565)
[enhancement] add way to import database from a folder
[maintenance] script to upgrade projects that uses a notebook 2.13.0 to use a 2.13.1 notebook
---
packages/postgres-database/Makefile | 36 ++++++++--
.../scripts/copy_database_volume.sh | 65 +++++++++++++++++--
.../migrate_notebook_2_13_0_to_2_13_1.sql | 15 +++++
3 files changed, 107 insertions(+), 9 deletions(-)
create mode 100644 packages/postgres-database/scripts/migrate_notebook_2_13_0_to_2_13_1.sql
diff --git a/packages/postgres-database/Makefile b/packages/postgres-database/Makefile
index 569261dddb4..8cafd06c062 100644
--- a/packages/postgres-database/Makefile
+++ b/packages/postgres-database/Makefile
@@ -22,21 +22,47 @@ tests: ## runs unit tests
.PHONY: import-db
-import-db: scripts/copy_database_volume.sh guard-SOURCE_HOST guard-SOURCE_DATA_VOLUME guard-TARGET_DATA_VOLUME ## copy volume $(SOURCE_DATA_VOLUME) from $(SOURCE_HOST) into local $(TARGET_DATA_VOLUME)
- ./scripts/copy_database_volume.sh
-
+#guard-SOURCE_HOST guard-SOURCE_DATA_VOLUME guard-TARGET_DATA_VOLUME
+import-db-from-docker-volume import-db-from-folder: scripts/copy_database_volume.sh ## copy postgresql data from remote $host from $host_volume or $host_folder to a local $local_volume docker volume
+ @:$(if $(findstring -from-folder,$@),\
+ $(call check_defined, host host_folder local_volume, please define this variable when calling $@), \
+ $(call check_defined, host host_volume local_volume, please define this variable when calling $@))
+ ./scripts/copy_database_volume.sh \
+ --host $(host) \
+ $(if $(findstring -from-folder, $@),--folder $(host_folder),--volume $(host_volume)) \
+ --target $(local_volume)
+
+# Check that given variables are set and all have non-empty values,
+# die with an error otherwise.
+#
+# Params:
+# 1. Variable name(s) to test.
+# 2. (optional) Error message to print.
guard-%:
@ if [ "${${*}}" = "" ]; then \
echo "Environment variable $* not set"; \
exit 1; \
fi
+# Check that given variables are set and all have non-empty values,
+# die with an error otherwise.
+#
+# Params:
+# 1. Variable name(s) to test.
+# 2. (optional) Error message to print.
+check_defined = \
+ $(strip $(foreach 1,$1, \
+ $(call __check_defined,$1,$(strip $(value 2)))))
+__check_defined = \
+ $(if $(value $1),, \
+ $(error Undefined $1$(if $2, ($2))))
+
.PHONY: setup-prod
-export POSTGRES_DATA_VOLUME = $(TARGET_DATA_VOLUME)
-setup-prod: install-dev up-prod ## sets up a database using an external postgres volume to test migration
+setup-prod: guard-POSTGRES_DATA_VOLUME install-dev up-prod ## sets up a database using an external postgres volume defined as $POSTGRES_DATA_VOLUME to test migration
# discovering
sc-pg --help
@echo "To test migration, sc-pg discover -u USER -p PASSWORD, then sc-pg upgrade"
+ # adminer: http://127.0.0.1:18080/?pgsql=postgres&ns=public
.PHONY: setup-commit
setup-commit: install-dev up-pg ## sets up a database to create a new commit into migration history
diff --git a/packages/postgres-database/scripts/copy_database_volume.sh b/packages/postgres-database/scripts/copy_database_volume.sh
index 9b3f58af810..227c1e46d4e 100755
--- a/packages/postgres-database/scripts/copy_database_volume.sh
+++ b/packages/postgres-database/scripts/copy_database_volume.sh
@@ -1,10 +1,67 @@
#!/bin/sh
+
+usage()
+{
+ echo "usage: copy_database_volume.sh [[[-h host ] [[-f folder] | [-v volume]] [-t target]] | [-h]]"
+}
+
+
+if [ $# -eq 0 ]; then
+ usage
+ exit 1
+fi
+
+
+while [ "$1" != "" ]; do
+ case $1 in
+ -h | --host ) shift
+ host=$1
+ ;;
+ -f | --folder ) shift
+ folder=$1
+ ;;
+ -v | --volume ) shift
+ volume=$1
+ ;;
+ -t | --target ) shift
+ target=$1
+ ;;
+ -h | --help ) usage
+ exit
+ ;;
+ * ) usage
+ exit 1
+ esac
+ shift
+done
+
+if [ -z $host ] || [ -z $target ] || ([ -z $folder ] && [ -z $volume ]); then
+ usage
+ exit 1
+fi
+
+if [ ! -z $folder ] && [ ! -z $volume ]; then
+ echo "cannot use both --folder and --volume arguments"
+ usage
+ exit 1
+fi
+
set -o errexit
-set -o nounset
+# set -o nounset
IFS=$(printf '\n\t')
-ssh "${SOURCE_HOST}" \
- "docker run --rm -v ${SOURCE_DATA_VOLUME}:/from alpine ash -c 'cd /from ; tar -cf - . '" \
+
+if [ ! -z $folder ]; then
+ #folder mode
+ ssh $host \
+ "tar -cf - $folder " \
+ | \
+ docker run --rm -i -v "$target":/to alpine ash -c "cd /to ; tar -xpvf - "
+else
+ #docker mode
+ ssh $host \
+ "docker run --rm -v $volume:/from alpine ash -c 'cd /from ; tar -cf - . '" \
| \
- docker run --rm -i -v "${TARGET_DATA_VOLUME}":/to alpine ash -c "cd /to ; tar -xpvf - "
\ No newline at end of file
+ docker run --rm -i -v "$target":/to alpine ash -c "cd /to ; tar -xpvf - "
+fi
diff --git a/packages/postgres-database/scripts/migrate_notebook_2_13_0_to_2_13_1.sql b/packages/postgres-database/scripts/migrate_notebook_2_13_0_to_2_13_1.sql
new file mode 100644
index 00000000000..9e72703cb4b
--- /dev/null
+++ b/packages/postgres-database/scripts/migrate_notebook_2_13_0_to_2_13_1.sql
@@ -0,0 +1,15 @@
+-- select the projects with the services to update (replace the regexp accordingly. NOTE: % is equivalent to .* in SQL)
+-- SELECT workbench
+-- FROM projects
+-- WHERE workbench::text SIMILAR TO '%-notebook", "version": "2.13.0"%'
+-- replace the regexp in here in order to
+UPDATE projects
+SET workbench = (
+ regexp_replace(
+ workbench::text,
+ '-notebook", "version": "2.13.0"',
+ '-notebook", "version": "2.13.1"',
+ 'g'
+ )::json
+ )
+WHERE workbench::text SIMILAR TO '%-notebook", "version": "2.13.0"%'
From b6a2568fd97c016fb97a209645c2924317e3494b Mon Sep 17 00:00:00 2001
From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com>
Date: Fri, 19 Jun 2020 13:12:14 +0200
Subject: [PATCH 05/43] Manage groups, Share studies (#1512)
Backend:
- adds groups endpoint to list, create, get, modify, delete groups through /groups endpoint
- a user in a group has read/write/delete access rights (e.g. member,manager,administrator)
- add/list/modify/delete users in groups through /groups/{gid}/users endpoint
- allow sharing of projects using access_rights defining for each group id the specific read/write/delete rights
- migrates postgres DB accordingly (migrates old access_rights syntax for projects, deprecates user_to_projects table, adds groups thumbnails, )
Frontend:
- New page in Preferences for managing/displaying Organizations and its members
- Testers can create Organizations
- Organization Managers can invite (actually add) osparc existing members by providing their - emails
- Organization Managers can remove members
- Organization Managers can promote members to Manager
- Organization SuperManagers can edit organization details
- Study owners can share the same instance of a study with Organizations and/or Organization Members (Collaborators)
- Study owners can make other collaborators Owner
- Study owners can remove collaborators
Bonus
Backend:
- adds a decorator for checking role permissions
- docs of how to use sc-pg migration capabilities
Frontend:
- Can edit study details also from inside study
- Allow dropping files with no extension
- Show hand cursor over dragable NodeUI ports
- Show move cursor over NodeUI capationbar
- Fix multiFilePicker bug
- Switch button for changing theme
---
.../schemas/project-v0.0.1-converted.yaml | 20 +
api/specs/common/schemas/project-v0.0.1.json | 31 +-
.../webserver/components/schemas/group.yaml | 155 ++
.../webserver/components/schemas/me.yaml | 72 +-
api/specs/webserver/openapi-groups.yaml | 208 ++
api/specs/webserver/openapi-projects.yaml | 18 +-
api/specs/webserver/openapi.yaml | 34 +-
mypy.ini | 41 +
packages/postgres-database/Makefile | 3 +-
packages/postgres-database/README.md | 22 +-
.../1ca14c33e65c_add_groups_accessrights.py | 28 +
...94f201e484a_add_user_to_project_cascade.py | 97 +
...8_move_groups_accessrights_to_users_to_.py | 30 +
.../bb305829cf83_add_groups_thumbnail.py | 28 +
.../models/groups.py | 10 +
.../models/user_to_projects.py | 19 +-
.../storage_models.py | 6 +-
.../webserver_models.py | 2 -
.../tests/test_delete_projects_and_users.py | 18 -
.../pytest_simcore/helpers/utils_projects.py | 8 +-
.../src/servicelib/application.py | 2 +-
.../api/v0/schemas/project-v0.0.1.json | 31 +-
.../api/v0/openapi.yaml | 80 +
.../api/v0/schemas/project-v0.0.1.json | 31 +-
.../src/simcore_service_storage/dsm.py | 15 +-
.../src/simcore_service_storage/models.py | 2 -
services/storage/tests/data/projects.csv | 2 +-
.../storage/tests/data/user_to_projects.csv | 2 -
services/storage/tests/utils.py | 36 +-
.../web/client/source/class/osparc/About.js | 4 +-
.../client/source/class/osparc/auth/Data.js | 18 +
.../source/class/osparc/auth/Manager.js | 2 +
.../osparc/component/export/ExportDAG.js | 8 +-
.../osparc/component/export/Permissions.js | 329 +++
.../osparc/component/export/SaveAsTemplate.js | 72 +-
.../osparc/component/export/ShareWith.js | 150 +-
.../component/filter/OrganizationMembers.js | 70 +
.../osparc/component/filter/Organizations.js | 27 +-
.../filter/OrganizationsAndMembers.js | 62 +
.../component/form/ToggleButtonContainer.js | 4 +-
.../osparc/component/form/tag/TagItem.js | 4 +-
.../osparc/component/form/tag/TagManager.js | 2 +-
.../component/metadata/StudyDetailsEditor.js | 171 +-
.../component/metadata/StudyDetailsWindow.js | 58 -
.../osparc/component/metadata/StudyInfo.js | 31 +-
.../component/service/manager/ActivityTree.js | 2 +-
.../component/widget/InputsMapperTreeItem.js | 4 -
.../component/widget/OrganizationListItem.js | 96 -
.../widget/inputs/NodeOutputTreeItem.js | 4 -
.../osparc/component/workbench/NodeUI.js | 8 +-
.../osparc/component/workbench/WorkbenchUI.js | 16 +-
.../osparc/dashboard/CollaboratorListItem.js | 134 +
.../osparc/dashboard/OrgMemberListItem.js | 125 +
.../osparc/dashboard/OrganizationEditor.js | 173 ++
.../osparc/dashboard/OrganizationListItem.js | 100 +
.../class/osparc/dashboard/ServiceBrowser.js | 2 +-
.../dashboard/ServiceBrowserListItem.js | 52 +-
.../class/osparc/dashboard/StudyBrowser.js | 175 +-
.../dashboard/StudyBrowserButtonBase.js | 12 +-
.../dashboard/StudyBrowserButtonItem.js | 72 +-
.../osparc/dashboard/StudyBrowserButtonNew.js | 2 +-
.../source/class/osparc/data/Permissions.js | 15 +-
.../source/class/osparc/data/Resources.js | 95 +-
.../source/class/osparc/data/model/Study.js | 2 +-
.../class/osparc/desktop/NavigationBar.js | 7 +-
.../class/osparc/desktop/StudyEditor.js | 2 +-
.../desktop/preferences/PreferencesWindow.js | 13 +-
.../desktop/preferences/pages/BasePage.js | 12 +-
.../preferences/pages/OrganizationsPage.js | 410 +++
.../desktop/preferences/pages/ProfilePage.js | 39 +-
.../desktop/preferences/pages/SecurityPage.js | 8 +-
.../client/source/class/osparc/store/Store.js | 93 +-
.../source/class/osparc/ui/basic/Tag.js | 2 +-
.../source/class/osparc/ui/hint/Hint.js | 2 +-
.../class/osparc/ui/markdown/Markdown.js | 2 +-
.../source/class/osparc/ui/switch/Switch.js | 51 +
.../class/osparc/ui/switch/ThemeSwitcher.js | 41 +
.../client/source/class/osparc/utils/Utils.js | 10 +-
.../client/source/resource/common/common.css | 4 +
.../client/source/resource/form/service.json | 2 +-
.../api/v0/openapi.yaml | 2322 +++++++++++++++--
.../api/v0/schemas/project-v0.0.1.json | 31 +-
.../simcore_service_webserver/application.py | 5 +-
.../application_config.py | 1 +
.../computation_comp_tasks_listening_task.py | 72 +-
.../computation_handlers.py | 2 +-
.../computation_subscribe.py | 8 +-
.../data/fake-template-projects.isan.json | 6 +-
.../src/simcore_service_webserver/groups.py | 42 +
.../simcore_service_webserver/groups_api.py | 310 +++
.../groups_exceptions.py | 32 +
.../groups_handlers.py | 190 ++
.../simcore_service_webserver/groups_utils.py | 54 +
.../projects/projects_api.py | 14 +-
.../projects/projects_db.py | 302 ++-
.../projects/projects_handlers.py | 76 +-
.../projects/projects_models.py | 2 -
.../publication_handlers.py | 2 +-
.../security_decorators.py | 28 +
.../security_roles.py | 35 +-
.../simcore_service_webserver/users_api.py | 187 +-
.../users_exceptions.py | 29 +
.../users_handlers.py | 209 +-
.../simcore_service_webserver/users_utils.py | 22 +
.../web/server/tests/integration/conftest.py | 65 +-
.../integration/test_project_workflow.py | 18 +-
.../server/tests/unit/test_projects_models.py | 4 +-
.../tests/unit/with_dbs/config-devel.yml | 6 +-
.../server/tests/unit/with_dbs/conftest.py | 54 +-
.../unit/with_dbs/docker-compose-devel.yml | 6 +-
.../tests/unit/with_dbs/docker-compose.yml | 8 +-
.../unit/with_dbs/test_access_to_studies.py | 2 +-
.../server/tests/unit/with_dbs/test_groups.py | 569 ++++
.../tests/unit/with_dbs/test_projects.py | 564 ++--
.../server/tests/unit/with_dbs/test_users.py | 12 +-
tests/e2e/tutorials/sleepers.js | 10 +-
.../sleepers_project_template_sql.csv | 4 +-
117 files changed, 7623 insertions(+), 1530 deletions(-)
create mode 100644 api/specs/webserver/components/schemas/group.yaml
create mode 100644 api/specs/webserver/openapi-groups.yaml
create mode 100644 mypy.ini
create mode 100644 packages/postgres-database/src/simcore_postgres_database/migration/versions/1ca14c33e65c_add_groups_accessrights.py
create mode 100644 packages/postgres-database/src/simcore_postgres_database/migration/versions/694f201e484a_add_user_to_project_cascade.py
create mode 100644 packages/postgres-database/src/simcore_postgres_database/migration/versions/72f8be1c4838_move_groups_accessrights_to_users_to_.py
create mode 100644 packages/postgres-database/src/simcore_postgres_database/migration/versions/bb305829cf83_add_groups_thumbnail.py
delete mode 100644 services/storage/tests/data/user_to_projects.csv
create mode 100644 services/web/client/source/class/osparc/component/export/Permissions.js
create mode 100644 services/web/client/source/class/osparc/component/filter/OrganizationMembers.js
create mode 100644 services/web/client/source/class/osparc/component/filter/OrganizationsAndMembers.js
delete mode 100644 services/web/client/source/class/osparc/component/metadata/StudyDetailsWindow.js
delete mode 100644 services/web/client/source/class/osparc/component/widget/OrganizationListItem.js
create mode 100644 services/web/client/source/class/osparc/dashboard/CollaboratorListItem.js
create mode 100644 services/web/client/source/class/osparc/dashboard/OrgMemberListItem.js
create mode 100644 services/web/client/source/class/osparc/dashboard/OrganizationEditor.js
create mode 100644 services/web/client/source/class/osparc/dashboard/OrganizationListItem.js
create mode 100644 services/web/client/source/class/osparc/desktop/preferences/pages/OrganizationsPage.js
create mode 100644 services/web/client/source/class/osparc/ui/switch/Switch.js
create mode 100644 services/web/client/source/class/osparc/ui/switch/ThemeSwitcher.js
create mode 100644 services/web/server/src/simcore_service_webserver/groups.py
create mode 100644 services/web/server/src/simcore_service_webserver/groups_api.py
create mode 100644 services/web/server/src/simcore_service_webserver/groups_exceptions.py
create mode 100644 services/web/server/src/simcore_service_webserver/groups_handlers.py
create mode 100644 services/web/server/src/simcore_service_webserver/groups_utils.py
create mode 100644 services/web/server/src/simcore_service_webserver/security_decorators.py
create mode 100644 services/web/server/src/simcore_service_webserver/users_exceptions.py
create mode 100644 services/web/server/src/simcore_service_webserver/users_utils.py
create mode 100644 services/web/server/tests/unit/with_dbs/test_groups.py
diff --git a/api/specs/common/schemas/project-v0.0.1-converted.yaml b/api/specs/common/schemas/project-v0.0.1-converted.yaml
index 28604243918..47b8c6bf938 100644
--- a/api/specs/common/schemas/project-v0.0.1-converted.yaml
+++ b/api/specs/common/schemas/project-v0.0.1-converted.yaml
@@ -34,6 +34,26 @@ properties:
description: >-
object containing the GroupID as key and read/write/execution permissions
as value
+ additionalProperties: false
+ x-patternProperties:
+ ^\d+$:
+ type: object
+ description: the group id
+ additionalProperties: false
+ required:
+ - read
+ - write
+ - delete
+ properties:
+ read:
+ type: boolean
+ description: gives read access
+ write:
+ type: boolean
+ description: gives write access
+ delete:
+ type: boolean
+ description: gives deletion rights
creationDate:
type: string
description: project creation date
diff --git a/api/specs/common/schemas/project-v0.0.1.json b/api/specs/common/schemas/project-v0.0.1.json
index d40e6ceb1a4..4ecd40e4248 100644
--- a/api/specs/common/schemas/project-v0.0.1.json
+++ b/api/specs/common/schemas/project-v0.0.1.json
@@ -46,7 +46,34 @@
},
"accessRights": {
"type": "object",
- "description": "object containing the GroupID as key and read/write/execution permissions as value"
+ "description": "object containing the GroupID as key and read/write/execution permissions as value",
+ "additionalProperties": false,
+ "patternProperties": {
+ "^\\d+$": {
+ "type": "object",
+ "description": "the group id",
+ "additionalProperties": false,
+ "required": [
+ "read",
+ "write",
+ "delete"
+ ],
+ "properties": {
+ "read": {
+ "type": "boolean",
+ "description": "gives read access"
+ },
+ "write": {
+ "type": "boolean",
+ "description": "gives write access"
+ },
+ "delete": {
+ "type": "boolean",
+ "description": "gives deletion rights"
+ }
+ }
+ }
+ }
},
"creationDate": {
"type": "string",
@@ -307,4 +334,4 @@
}
}
}
-}
\ No newline at end of file
+}
diff --git a/api/specs/webserver/components/schemas/group.yaml b/api/specs/webserver/components/schemas/group.yaml
new file mode 100644
index 00000000000..ebb7bfde29e
--- /dev/null
+++ b/api/specs/webserver/components/schemas/group.yaml
@@ -0,0 +1,155 @@
+GroupAccessRights:
+ description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ # Member
+ - read: true
+ write: false
+ delete: false
+ # Manager
+ - read: true
+ write: true
+ delete: false
+ # Administrator
+ - read: true
+ write: true
+ delete: true
+
+UsersGroup:
+ type: object
+ properties:
+ gid:
+ description: the group ID
+ type: string
+ label:
+ description: the group name
+ type: string
+ description:
+ description: the group description
+ type: string
+ thumbnail:
+ description: url to the group thumbnail
+ type: string
+ format: uri
+ access_rights:
+ $ref: "#/GroupAccessRights"
+ required:
+ - gid
+ - label
+ - description
+ - access_rights
+ example:
+ - gid: "27"
+ label: "A user"
+ description: "A very special user"
+ thumbnail: https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png
+ - gid: "1"
+ label: "ITIS Foundation"
+ description: "The Foundation for Research on Information Technologies in Society"
+ thumbnail: https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png
+ - gid: "0"
+ label: "All"
+ description: "Open to all users"
+ thumbnail: https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png
+
+UsersGroupEnveloped:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ $ref: "#/UsersGroup"
+ error:
+ nullable: true
+ default: null
+
+AllUsersGroups:
+ type: object
+ properties:
+ me:
+ $ref: "#/UsersGroup"
+ organizations:
+ type: array
+ items:
+ $ref: "#/UsersGroup"
+ all:
+ $ref: "#/UsersGroup"
+
+AllUsersGroupsEnveloped:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ $ref: "#/AllUsersGroups"
+ error:
+ nullable: true
+ default: null
+
+GroupUser:
+ type: object
+ allOf:
+ - type: object
+ properties:
+ first_name:
+ type: string
+ description: the user first name
+ last_name:
+ type: string
+ description: the user last name
+ login:
+ type: string
+ format: email
+ description: the user login email
+ gravatar_id:
+ type: string
+ description: the user gravatar id hash
+ id:
+ type: string
+ description: the user id
+ gid:
+ type: string
+ description: the user primary gid
+ example:
+ first_name: Mr
+ last_name: Smith
+ login: mr.smith@matrix.com
+ gravatar_id: a1af5c6ecc38e81f29695f01d6ceb540
+ id: "1"
+ gid: "3"
+ - $ref: "#/GroupAccessRights"
+
+GroupUsersArrayEnveloped:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ type: array
+ items:
+ $ref: "#/GroupUser"
+ error:
+ nullable: true
+ default: null
+
+GroupUserEnveloped:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ $ref: "#/GroupUser"
+ error:
+ nullable: true
+ default: null
diff --git a/api/specs/webserver/components/schemas/me.yaml b/api/specs/webserver/components/schemas/me.yaml
index d25e2c22ccc..00a85ab916f 100644
--- a/api/specs/webserver/components/schemas/me.yaml
+++ b/api/specs/webserver/components/schemas/me.yaml
@@ -6,44 +6,32 @@ ProfileCommon:
last_name:
type: string
example:
- login: pcrespov@foo.com
first_name: Pedro
last_name: Crespo
ProfileInput:
allOf:
- - $ref: '#/ProfileCommon'
+ - $ref: "#/ProfileCommon"
example:
first_name: Pedro
last_name: Crespo
ProfileOutput:
allOf:
- - $ref: '#/ProfileCommon'
- - type: object
- properties:
- login:
- type: string
- format: email
- role:
- type: string
- groups:
- type: object
- properties:
- me:
- $ref: '#/UsersGroup'
- organizations:
- type: array
- items:
- $ref: '#/UsersGroup'
- all:
- $ref: '#/UsersGroup'
- gravatar_id:
- type: string
+ - $ref: "#/ProfileCommon"
+ - type: object
+ properties:
+ login:
+ type: string
+ format: email
+ role:
+ type: string
+ groups:
+ $ref: "./group.yaml#/AllUsersGroups"
+ gravatar_id:
+ type: string
example:
login: pcrespov@foo.com
- first_name: Pedro
- last_name: Crespo
role: Admin
gravatar_id: 205e460b479e2e5b48aec07710c08d50
@@ -53,32 +41,11 @@ ProfileEnveloped:
- data
properties:
data:
- $ref: '#/ProfileOutput'
+ $ref: "#/ProfileOutput"
error:
nullable: true
default: null
-
-UsersGroup:
- type: object
- properties:
- gid:
- type: string
- label:
- type: string
- description:
- type: string
- example:
- - gid: '27'
- label: 'A user'
- description: 'A very special user'
- - gid: '1'
- label: 'ITIS Foundation'
- description: 'The Foundation for Research on Information Technologies in Society'
- - gid: '0'
- label: 'All'
- description: 'Open to all users'
-
Token:
description: api keys for third party services
type: object
@@ -97,28 +64,25 @@ Token:
- service
- token_key
example:
- service: 'github-api-v1'
+ service: "github-api-v1"
token_key: N1BP5ZSpB
-
TokenId:
description: toke identifier
type: string
# format: uuid
-
TokenEnveloped:
type: object
required:
- data
properties:
data:
- $ref: '#/Token'
+ $ref: "#/Token"
error:
nullable: true
default: null
-
TokensArrayEnveloped:
type: object
required:
@@ -127,7 +91,7 @@ TokensArrayEnveloped:
data:
type: array
items:
- $ref: '#/Token'
+ $ref: "#/Token"
error:
nullable: true
default: null
@@ -138,7 +102,7 @@ TokenIdEnveloped:
- data
properties:
data:
- $ref: '#/TokenId'
+ $ref: "#/TokenId"
error:
nullable: true
default: null
diff --git a/api/specs/webserver/openapi-groups.yaml b/api/specs/webserver/openapi-groups.yaml
new file mode 100644
index 00000000000..5de07b012bf
--- /dev/null
+++ b/api/specs/webserver/openapi-groups.yaml
@@ -0,0 +1,208 @@
+paths:
+ /groups:
+ get:
+ summary: List my groups
+ operationId: list_groups
+ tags:
+ - group
+ responses:
+ "200":
+ description: list of the groups I belonged to
+ content:
+ application/json:
+ schema:
+ $ref: "./components/schemas/group.yaml#/AllUsersGroupsEnveloped"
+ default:
+ $ref: "#/components/responses/DefaultErrorResponse"
+ post:
+ summary: Create a new group
+ operationId: create_group
+ tags:
+ - group
+ requestBody:
+ required: true
+ description: the group to create
+ content:
+ application/json:
+ schema:
+ $ref: "./components/schemas/group.yaml#/UsersGroup"
+ responses:
+ "201":
+ description: group created
+ content:
+ application/json:
+ schema:
+ $ref: "./components/schemas/group.yaml#/UsersGroupEnveloped"
+
+ default:
+ $ref: "#/components/responses/DefaultErrorResponse"
+
+ /groups/{gid}:
+ parameters:
+ - name: gid
+ in: path
+ required: true
+ schema:
+ type: string
+ get:
+ tags:
+ - group
+ summary: Gets one group details
+ operationId: get_group
+ responses:
+ "200":
+ description: got group
+ content:
+ application/json:
+ schema:
+ $ref: "./components/schemas/group.yaml#/UsersGroupEnveloped"
+ default:
+ $ref: "#/components/responses/DefaultErrorResponse"
+ patch:
+ summary: Update one group
+ operationId: update_group
+ tags:
+ - group
+ requestBody:
+ required: true
+ description: the group to update
+ content:
+ application/json:
+ schema:
+ $ref: "./components/schemas/group.yaml#/UsersGroup"
+ responses:
+ "200":
+ description: the modified group
+ content:
+ application/json:
+ schema:
+ $ref: "./components/schemas/group.yaml#/UsersGroupEnveloped"
+ default:
+ $ref: "#/components/responses/DefaultErrorResponse"
+ delete:
+ tags:
+ - group
+ summary: Deletes one group
+ operationId: delete_group
+ responses:
+ "204":
+ description: group has been successfully deleted
+ default:
+ $ref: "#/components/responses/DefaultErrorResponse"
+
+ /groups/{gid}/users:
+ parameters:
+ - name: gid
+ in: path
+ required: true
+ schema:
+ type: string
+ get:
+ tags:
+ - group
+ summary: Gets list of users in group
+ operationId: get_group_users
+ responses:
+ "200":
+ description: got list of users and their respective rights
+ content:
+ application/json:
+ schema:
+ $ref: "./components/schemas/group.yaml#/GroupUsersArrayEnveloped"
+ default:
+ $ref: "#/components/responses/DefaultErrorResponse"
+ post:
+ tags:
+ - group
+ summary: Adds a user in the group
+ operationId: add_group_user
+ requestBody:
+ required: true
+ description: the user to add
+ content:
+ application/json:
+ schema:
+ anyOf:
+ - type: object
+ required:
+ - uid
+ properties:
+ uid:
+ type: string
+ description: the user id
+ - type: object
+ required:
+ - email
+ properties:
+ email:
+ type: string
+ format: email
+ description: the user email
+ responses:
+ "204":
+ description: user successfully added
+ default:
+ $ref: "#/components/responses/DefaultErrorResponse"
+
+ /groups/{gid}/users/{uid}:
+ parameters:
+ - name: gid
+ in: path
+ required: true
+ schema:
+ type: string
+ - name: uid
+ in: path
+ required: true
+ schema:
+ type: string
+ get:
+ tags:
+ - group
+ summary: Gets specific user in group
+ operationId: get_group_user
+ responses:
+ "200":
+ description: got user
+ content:
+ application/json:
+ schema:
+ $ref: "./components/schemas/group.yaml#/GroupUserEnveloped"
+ default:
+ $ref: "#/components/responses/DefaultErrorResponse"
+ patch:
+ tags:
+ - group
+ summary: Modify specific user in group
+ operationId: update_group_user
+ requestBody:
+ required: true
+ description: the user rights to modify
+ content:
+ application/json:
+ schema:
+ $ref: "./components/schemas/group.yaml#/GroupAccessRights"
+ responses:
+ "200":
+ description: modified user
+ content:
+ application/json:
+ schema:
+ $ref: "./components/schemas/group.yaml#/GroupUserEnveloped"
+ default:
+ $ref: "#/components/responses/DefaultErrorResponse"
+ delete:
+ tags:
+ - group
+ summary: Delete specific user in group
+ operationId: delete_group_user
+ responses:
+ "204":
+ description: successfully removed user
+ default:
+ $ref: "#/components/responses/DefaultErrorResponse"
+
+components:
+ responses:
+ DefaultErrorResponse:
+ $ref: "./openapi.yaml#/components/responses/DefaultErrorResponse"
diff --git a/api/specs/webserver/openapi-projects.yaml b/api/specs/webserver/openapi-projects.yaml
index ad85241d1c8..197fdbb1438 100644
--- a/api/specs/webserver/openapi-projects.yaml
+++ b/api/specs/webserver/openapi-projects.yaml
@@ -78,7 +78,7 @@ paths:
summary: Gets active project
operationId: get_active_project
responses:
- '200':
+ "200":
description: returns active project
content:
application/json:
@@ -236,14 +236,14 @@ paths:
service_version: "1.4.0"
responses:
- '201':
+ "201":
description: created
content:
application/json:
schema:
- $ref: './openapi-projects.yaml#/components/schemas/NodeEnveloped'
+ $ref: "./openapi-projects.yaml#/components/schemas/NodeEnveloped"
default:
- $ref: './openapi.yaml#/components/responses/DefaultErrorResponse'
+ $ref: "./openapi.yaml#/components/responses/DefaultErrorResponse"
/projects/{project_id}/nodes/{node_id}:
parameters:
@@ -283,7 +283,7 @@ paths:
"204":
description: node has been successfully deleted from project
default:
- $ref: './openapi.yaml#/components/responses/DefaultErrorResponse'
+ $ref: "./openapi.yaml#/components/responses/DefaultErrorResponse"
/projects/{study_uuid}/tags/{tag_id}:
parameters:
@@ -303,7 +303,7 @@ paths:
summary: Links an existing label with an existing study
operationId: add_tag
responses:
- '200':
+ "200":
description: The tag has been successfully linked to the study
content:
application/json:
@@ -317,7 +317,7 @@ paths:
summary: Removes an existing link between a label and a study
operationId: remove_tag
responses:
- '200':
+ "200":
description: The tag has been successfully removed from the study
content:
application/json:
@@ -348,7 +348,7 @@ components:
- data
properties:
data:
- $ref: './openapi-projects.yaml#/components/schemas/Node'
+ $ref: "./openapi-projects.yaml#/components/schemas/Node"
error:
nullable: true
default: null
@@ -363,7 +363,7 @@ components:
$ref: "../common/schemas/project.yaml#/components/schemas/ProjectArrayEnveloped"
RunningServiceEnveloped:
- $ref: '../common/schemas/running_service.yaml#/components/schemas/RunningServiceEnveloped'
+ $ref: "../common/schemas/running_service.yaml#/components/schemas/RunningServiceEnveloped"
responses:
DefaultErrorResponse:
diff --git a/api/specs/webserver/openapi.yaml b/api/specs/webserver/openapi.yaml
index ef5c6c20782..34d54bd2e85 100644
--- a/api/specs/webserver/openapi.yaml
+++ b/api/specs/webserver/openapi.yaml
@@ -48,7 +48,6 @@ tags:
oSPARC users can make publications. Starting from submission of new service candidates, but this could
end up accepting other types of publications.
-
paths:
# DIAGNOSTICS ---------------------------------------------------------
/:
@@ -102,6 +101,20 @@ paths:
/me/tokens/{service}:
$ref: "./openapi-user.yaml#/paths/~1me~1tokens~1{service}"
+ # GROUP SETTINGS ------------------------------------------------------------------
+
+ /groups:
+ $ref: "./openapi-groups.yaml#/paths/~1groups"
+
+ /groups/{gid}:
+ $ref: "./openapi-groups.yaml#/paths/~1groups~1{gid}"
+
+ /groups/{gid}/users:
+ $ref: "./openapi-groups.yaml#/paths/~1groups~1{gid}~1users"
+
+ /groups/{gid}/users/{uid}:
+ $ref: "./openapi-groups.yaml#/paths/~1groups~1{gid}~1users~1{uid}"
+
# DATA STORAGE SERVICES ----------------------------------------------------------
/storage/locations:
@@ -150,10 +163,10 @@ paths:
$ref: "./openapi-projects.yaml#/paths/~1projects~1{project_id}~1close"
/projects/{project_id}/nodes:
- $ref: './openapi-projects.yaml#/paths/~1projects~1{project_id}~1nodes'
+ $ref: "./openapi-projects.yaml#/paths/~1projects~1{project_id}~1nodes"
/projects/{project_id}/nodes/{node_id}:
- $ref: './openapi-projects.yaml#/paths/~1projects~1{project_id}~1nodes~1{node_id}'
+ $ref: "./openapi-projects.yaml#/paths/~1projects~1{project_id}~1nodes~1{node_id}"
/nodes/{nodeInstanceUUID}/outputUi/{outputKey}:
$ref: "./openapi-node-v0.0.1.yaml#/paths/~1nodes~1{nodeInstanceUUID}~1outputUi~1{outputKey}"
@@ -165,7 +178,7 @@ paths:
$ref: "./openapi-node-v0.0.1.yaml#/paths/~1nodes~1{nodeInstanceUUID}~1iframe"
/projects/{study_uuid}/tags/{tag_id}:
- $ref: './openapi-projects.yaml#/paths/~1projects~1{study_uuid}~1tags~1{tag_id}'
+ $ref: "./openapi-projects.yaml#/paths/~1projects~1{study_uuid}~1tags~1{tag_id}"
# ACTIVITY -------------------------------------------------------------------------
/activity/status:
@@ -173,21 +186,20 @@ paths:
# TAGS -------------------------------------------------------------------------
/tags:
- $ref: './openapi-tags.yaml#/paths/~1tags'
-
+ $ref: "./openapi-tags.yaml#/paths/~1tags"
+
/tags/{tag_id}:
- $ref: './openapi-tags.yaml#/paths/~1tags~1{tag_id}'
+ $ref: "./openapi-tags.yaml#/paths/~1tags~1{tag_id}"
# PUBLICATIONS -------------------------------------------------------------------------
/publications/service-submission:
- $ref: './openapi-publications.yaml#/paths/~1publications~1service-submission'
+ $ref: "./openapi-publications.yaml#/paths/~1publications~1service-submission"
# CATALOG -------------------------------------------------------------------------
/catalog/dags:
- $ref: './openapi-catalog.yaml#/paths/~1catalog~1dags'
+ $ref: "./openapi-catalog.yaml#/paths/~1catalog~1dags"
/catalog/dags/{dag_id}:
- $ref: './openapi-catalog.yaml#/paths/~1catalog~1dags~1{dag_id}'
-
+ $ref: "./openapi-catalog.yaml#/paths/~1catalog~1dags~1{dag_id}"
components:
responses:
diff --git a/mypy.ini b/mypy.ini
new file mode 100644
index 00000000000..bd8c2057cf8
--- /dev/null
+++ b/mypy.ini
@@ -0,0 +1,41 @@
+# Global options:
+[mypy]
+python_version = 3.6
+warn_return_any = True
+warn_unused_configs = True
+
+# Per-module options:
+[mypy-aio-pika.*]
+ignore_missing_imports = True
+[mypy-aiohttp.*]
+ignore_missing_imports = True
+[mypy-aiohttp_jinja2.*]
+ignore_missing_imports = True
+[mypy-aiohttp_security.*]
+ignore_missing_imports = True
+[mypy-aiopg.*]
+ignore_missing_imports = True
+[mypy-aiosmtplib.*]
+ignore_missing_imports = True
+[mypy-asyncpg.*]
+ignore_missing_imports = True
+[mypy-celery.*]
+ignore_missing_imports = True
+[mypy-change_case.*]
+ignore_missing_imports = True
+[mypy-json2html.*]
+ignore_missing_imports = True
+[mypy-jsondiff.*]
+ignore_missing_imports = True
+[mypy-passlib.*]
+ignore_missing_imports = True
+[mypy-prometheus_client.*]
+ignore_missing_imports = True
+[mypy-psycopg2.*]
+ignore_missing_imports = True
+[mypy-sqlalchemy.*]
+ignore_missing_imports = True
+[mypy-tenacity.*]
+ignore_missing_imports = True
+[mypy-trafaret.*]
+ignore_missing_imports = True
diff --git a/packages/postgres-database/Makefile b/packages/postgres-database/Makefile
index 8cafd06c062..630ede5bf07 100644
--- a/packages/postgres-database/Makefile
+++ b/packages/postgres-database/Makefile
@@ -74,6 +74,7 @@ setup-commit: install-dev up-pg ## sets up a database to create a new commit int
# some info
sc-pg info
@echo "To add new commit, sc-pg review -m \"Some message\" "
+ # adminer: http://127.0.0.1:18080/?pgsql=postgres&username=test&db=test&ns=public
.PHONY: migrate
@@ -96,5 +97,3 @@ up-pg up-prod: $(docker-compose-configs) ## starts pg server
down-pg down-prod: $(docker-compose-configs) ## stops pg server
docker-compose -f tests/docker-compose.yml $(if $(findstring -prod,$@),-f tests/docker-compose.prod.yml,) down
-
-
diff --git a/packages/postgres-database/README.md b/packages/postgres-database/README.md
index 08425d2a363..78164b328b5 100644
--- a/packages/postgres-database/README.md
+++ b/packages/postgres-database/README.md
@@ -50,10 +50,30 @@ Once finalized, the migration script also needs to be added to version control.
### Upgrade
Upgrades to given revision (get ``info`` to check history)
+
```bash
simcore-postgres-database upgrade head
```
-
[alembic]:https://alembic.sqlalchemy.org/en/latest/
[flask-migrate]:https://flask-migrate.readthedocs.io/en/latest/
+
+
+### Development
+
+1. In order to create/modify/delete tables one can use sc-pg to start a clean database:
+
+ ```console
+ make setup-commit # this will start a clean database and it is visible under http://127.0.0.1:18080/?pgsql=postgres&username=test&db=test&ns=public
+ ```
+
+2. Modify the models in [src/simcore_postgres_database/models](src/simcore_postgres_database/models) according to the new needs
+3. Create a migration script:
+
+ ```console
+ sc-pg review -m "some meaningful message" # this will generate an alembic migration script in [scripts](./scripts)
+ sc-pg upgrade # this will apply the generated migration script on the database
+ sc-pg downgrade # this will downgrade the database again to the previous state
+ ```
+
+ NOTE: when changing the scripts, one needs to delete the current script or the database state will be undefined.
diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/1ca14c33e65c_add_groups_accessrights.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/1ca14c33e65c_add_groups_accessrights.py
new file mode 100644
index 00000000000..1bc7827cf6e
--- /dev/null
+++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/1ca14c33e65c_add_groups_accessrights.py
@@ -0,0 +1,28 @@
+"""add groups accessRights
+
+Revision ID: 1ca14c33e65c
+Revises: 53e095260441
+Create Date: 2020-05-29 13:28:10.425714+00:00
+
+"""
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects import postgresql
+
+# revision identifiers, used by Alembic.
+revision = '1ca14c33e65c'
+down_revision = '53e095260441'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column('groups', sa.Column('access_rights', postgresql.JSONB(astext_type=sa.Text()), server_default=sa.text("'{}'::jsonb"), nullable=False))
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_column('groups', 'access_rights')
+ # ### end Alembic commands ###
diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/694f201e484a_add_user_to_project_cascade.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/694f201e484a_add_user_to_project_cascade.py
new file mode 100644
index 00000000000..0c191918196
--- /dev/null
+++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/694f201e484a_add_user_to_project_cascade.py
@@ -0,0 +1,97 @@
+"""add user_to_project cascade
+
+Revision ID: 694f201e484a
+Revises: 72f8be1c4838
+Create Date: 2020-06-05 14:18:55.443267+00:00
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = "694f201e484a"
+down_revision = "72f8be1c4838"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_constraint(
+ "user_to_projects_user_id_fkey", "user_to_projects", type_="foreignkey"
+ )
+ op.drop_constraint(
+ "user_to_projects_project_id_fkey", "user_to_projects", type_="foreignkey"
+ )
+ op.create_foreign_key(
+ "fk_user_to_projects_id_projects",
+ "user_to_projects",
+ "projects",
+ ["project_id"],
+ ["id"],
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ )
+ op.create_foreign_key(
+ "fk_user_to_projects_id_users",
+ "user_to_projects",
+ "users",
+ ["user_id"],
+ ["id"],
+ onupdate="CASCADE",
+ ondelete="CASCADE",
+ )
+
+ # change contents in projects access_rights
+ # NOTE: this does not need to be reversed as it was not used before
+ op.execute(
+ sa.DDL(
+ "UPDATE projects SET access_rights = (regexp_replace(access_rights::text, '\"rwx\"', '{\"read\":true, \"write\":false, \"delete\":false}')::jsonb) WHERE access_rights != '{}'"
+ )
+ )
+ # add prj_owner into access rights column
+ # NOTE: this dows not need to be reversed
+ op.execute(
+ sa.DDL(
+ """
+WITH user_project as (
+ SELECT projects.id AS pid, projects.access_rights AS current_rights, '{"' || users.primary_gid || '"}' AS json_key
+ FROM projects
+ INNER JOIN users
+ ON (projects.prj_owner = users.id)
+)
+
+UPDATE projects
+ SET access_rights = jsonb_insert(current_rights::jsonb,json_key::text[], '{"read":true, "write":true, "delete":true}'::jsonb, true)
+ FROM user_project
+ WHERE projects.id = pid
+ """
+ )
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_constraint(
+ "fk_user_to_projects_id_users", "user_to_projects", type_="foreignkey"
+ )
+ op.drop_constraint(
+ "fk_user_to_projects_id_projects", "user_to_projects", type_="foreignkey"
+ )
+ op.create_foreign_key(
+ "user_to_projects_project_id_fkey",
+ "user_to_projects",
+ "projects",
+ ["project_id"],
+ ["id"],
+ )
+ op.create_foreign_key(
+ "user_to_projects_user_id_fkey",
+ "user_to_projects",
+ "users",
+ ["user_id"],
+ ["id"],
+ )
+ # ### end Alembic commands ###
diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/72f8be1c4838_move_groups_accessrights_to_users_to_.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/72f8be1c4838_move_groups_accessrights_to_users_to_.py
new file mode 100644
index 00000000000..7597be8538d
--- /dev/null
+++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/72f8be1c4838_move_groups_accessrights_to_users_to_.py
@@ -0,0 +1,30 @@
+"""move groups accessrights to users_to_group
+
+Revision ID: 72f8be1c4838
+Revises: bb305829cf83
+Create Date: 2020-06-02 13:01:35.073902+00:00
+
+"""
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects import postgresql
+
+# revision identifiers, used by Alembic.
+revision = '72f8be1c4838'
+down_revision = 'bb305829cf83'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_column('groups', 'access_rights')
+ op.add_column('user_to_groups', sa.Column('access_rights', postgresql.JSONB(astext_type=sa.Text()), server_default=sa.text('\'{"read": true, "write": false, "delete": false}\'::jsonb'), nullable=False))
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_column('user_to_groups', 'access_rights')
+ op.add_column('groups', sa.Column('access_rights', postgresql.JSONB(astext_type=sa.Text()), server_default=sa.text("'{}'::jsonb"), autoincrement=False, nullable=False))
+ # ### end Alembic commands ###
diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/bb305829cf83_add_groups_thumbnail.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/bb305829cf83_add_groups_thumbnail.py
new file mode 100644
index 00000000000..ebd81cf09d8
--- /dev/null
+++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/bb305829cf83_add_groups_thumbnail.py
@@ -0,0 +1,28 @@
+"""add groups thumbnail
+
+Revision ID: bb305829cf83
+Revises: 1ca14c33e65c
+Create Date: 2020-06-02 12:06:21.302890+00:00
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = 'bb305829cf83'
+down_revision = '1ca14c33e65c'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column('groups', sa.Column('thumbnail', sa.String(), nullable=True))
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_column('groups', 'thumbnail')
+ # ### end Alembic commands ###
diff --git a/packages/postgres-database/src/simcore_postgres_database/models/groups.py b/packages/postgres-database/src/simcore_postgres_database/models/groups.py
index 4051a0a25bb..8ddb68a3a2e 100644
--- a/packages/postgres-database/src/simcore_postgres_database/models/groups.py
+++ b/packages/postgres-database/src/simcore_postgres_database/models/groups.py
@@ -7,6 +7,7 @@
import enum
import sqlalchemy as sa
+from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.sql import func
from .base import metadata
@@ -32,6 +33,7 @@ class GroupType(enum.Enum):
sa.Column("name", sa.String, nullable=False),
sa.Column("description", sa.String, nullable=False),
sa.Column("type", sa.Enum(GroupType), nullable=False, server_default="STANDARD"),
+ sa.Column("thumbnail", sa.String, nullable=True),
sa.Column("created", sa.DateTime(), nullable=False, server_default=func.now()),
sa.Column(
"modified",
@@ -66,6 +68,14 @@ class GroupType(enum.Enum):
ondelete="CASCADE",
),
),
+ sa.Column(
+ "access_rights",
+ JSONB,
+ nullable=False,
+ server_default=sa.text(
+ "'{\"read\": true, \"write\": false, \"delete\": false}'::jsonb"
+ ),
+ ),
sa.Column("created", sa.DateTime(), nullable=False, server_default=func.now()),
sa.Column(
"modified",
diff --git a/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py b/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py
index 38f7dbf2b1c..5015d5daea6 100644
--- a/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py
+++ b/packages/postgres-database/src/simcore_postgres_database/models/user_to_projects.py
@@ -4,6 +4,8 @@
from .projects import projects
from .users import users
+
+# DEPRECATED!!!!!!!!!!!!!! DO NOT USE!!!!!!!
user_to_projects = sa.Table(
"user_to_projects",
metadata,
@@ -11,11 +13,24 @@
sa.Column(
"user_id",
sa.BigInteger,
- sa.ForeignKey(users.c.id), # TODO: , ondelete="CASCADE"),
+ sa.ForeignKey(
+ users.c.id,
+ name="fk_user_to_projects_id_users",
+ ondelete="CASCADE",
+ onupdate="CASCADE",
+ ),
nullable=False,
),
sa.Column(
- "project_id", sa.BigInteger, sa.ForeignKey(projects.c.id), nullable=False,
+ "project_id",
+ sa.BigInteger,
+ sa.ForeignKey(
+ projects.c.id,
+ name="fk_user_to_projects_id_projects",
+ ondelete="CASCADE",
+ onupdate="CASCADE",
+ ),
+ nullable=False,
),
# TODO: do not ondelete=cascase for project_id or it will delete SHARED PROJECT
# add instead sa.UniqueConstraint('user_id', 'project_id', name='user_project_uniqueness'),
diff --git a/packages/postgres-database/src/simcore_postgres_database/storage_models.py b/packages/postgres-database/src/simcore_postgres_database/storage_models.py
index 2e6a911ba05..81984f2d5c5 100644
--- a/packages/postgres-database/src/simcore_postgres_database/storage_models.py
+++ b/packages/postgres-database/src/simcore_postgres_database/storage_models.py
@@ -5,18 +5,16 @@
"""
from .models.base import metadata
from .models.file_meta_data import file_meta_data
+from .models.groups import groups, user_to_groups
from .models.projects import projects
from .models.tokens import tokens
-from .models.user_to_projects import user_to_projects
-from .models.user_to_projects import users
-from .models.groups import groups, user_to_groups
+from .models.users import users
__all__ = [
"tokens",
"file_meta_data",
"metadata",
"projects",
- "user_to_projects",
"users",
"groups",
"user_to_groups",
diff --git a/packages/postgres-database/src/simcore_postgres_database/webserver_models.py b/packages/postgres-database/src/simcore_postgres_database/webserver_models.py
index b46494da692..eaab49671d8 100644
--- a/packages/postgres-database/src/simcore_postgres_database/webserver_models.py
+++ b/packages/postgres-database/src/simcore_postgres_database/webserver_models.py
@@ -12,7 +12,6 @@
from .models.projects import ProjectType, projects
from .models.tags import study_tags, tags
from .models.tokens import tokens
-from .models.user_to_projects import user_to_projects
from .models.users import UserRole, UserStatus, users
__all__ = [
@@ -24,7 +23,6 @@
"UserStatus",
"projects",
"ProjectType",
- "user_to_projects",
"confirmations",
"ConfirmationAction",
"tokens",
diff --git a/packages/postgres-database/tests/test_delete_projects_and_users.py b/packages/postgres-database/tests/test_delete_projects_and_users.py
index f0e4b9e18cd..d27564b20a0 100644
--- a/packages/postgres-database/tests/test_delete_projects_and_users.py
+++ b/packages/postgres-database/tests/test_delete_projects_and_users.py
@@ -16,7 +16,6 @@
from simcore_postgres_database.webserver_models import (
UserStatus,
projects,
- user_to_projects,
users,
)
@@ -70,16 +69,6 @@ async def start():
projects.insert().values(**random_project(prj_owner=4))
)
- await conn.execute(
- user_to_projects.insert().values(user_id=1, project_id=1)
- )
- await conn.execute(
- user_to_projects.insert().values(user_id=1, project_id=2)
- )
- await conn.execute(
- user_to_projects.insert().values(user_id=2, project_id=3)
- )
-
return engine
return loop.run_until_complete(start())
@@ -175,10 +164,3 @@ async def test_view(engine):
res = await conn.execute(projects.select())
rows = await res.fetchall()
assert len(rows) == 3
-
- # effect of cascade is that relation deletes as well
- res = await conn.execute(user_to_projects.select())
- rows = await res.fetchall()
-
- assert len(rows) == 1
- assert not any(row[user_to_projects.c.user_id] == 1 for row in rows)
diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_projects.py b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_projects.py
index 9f87139f649..870d61eaec8 100644
--- a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_projects.py
+++ b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_projects.py
@@ -74,16 +74,10 @@ async def create_project(
async def delete_all_projects(app: web.Application):
- from simcore_service_webserver.projects.projects_models import (
- projects,
- user_to_projects,
- )
+ from simcore_service_webserver.projects.projects_models import projects
db = app[APP_PROJECT_DBAPI]
async with db.engine.acquire() as conn:
- query = user_to_projects.delete()
- await conn.execute(query)
-
query = projects.delete()
await conn.execute(query)
diff --git a/packages/service-library/src/servicelib/application.py b/packages/service-library/src/servicelib/application.py
index 57c639e0fee..1e2cd1228d7 100644
--- a/packages/service-library/src/servicelib/application.py
+++ b/packages/service-library/src/servicelib/application.py
@@ -11,7 +11,7 @@ async def startup_info(app: web.Application):
async def shutdown_info(app: web.Application):
- print(f"INFO: SHUTING DOWN {app} ...", flush=True)
+ print(f"INFO: SHUTTING DOWN {app} ...", flush=True)
def create_safe_application(config: Optional[Dict] = None) -> web.Application:
diff --git a/services/director/src/simcore_service_director/api/v0/schemas/project-v0.0.1.json b/services/director/src/simcore_service_director/api/v0/schemas/project-v0.0.1.json
index d40e6ceb1a4..4ecd40e4248 100644
--- a/services/director/src/simcore_service_director/api/v0/schemas/project-v0.0.1.json
+++ b/services/director/src/simcore_service_director/api/v0/schemas/project-v0.0.1.json
@@ -46,7 +46,34 @@
},
"accessRights": {
"type": "object",
- "description": "object containing the GroupID as key and read/write/execution permissions as value"
+ "description": "object containing the GroupID as key and read/write/execution permissions as value",
+ "additionalProperties": false,
+ "patternProperties": {
+ "^\\d+$": {
+ "type": "object",
+ "description": "the group id",
+ "additionalProperties": false,
+ "required": [
+ "read",
+ "write",
+ "delete"
+ ],
+ "properties": {
+ "read": {
+ "type": "boolean",
+ "description": "gives read access"
+ },
+ "write": {
+ "type": "boolean",
+ "description": "gives write access"
+ },
+ "delete": {
+ "type": "boolean",
+ "description": "gives deletion rights"
+ }
+ }
+ }
+ }
},
"creationDate": {
"type": "string",
@@ -307,4 +334,4 @@
}
}
}
-}
\ No newline at end of file
+}
diff --git a/services/storage/src/simcore_service_storage/api/v0/openapi.yaml b/services/storage/src/simcore_service_storage/api/v0/openapi.yaml
index b0dfb02d379..e06b3b29233 100644
--- a/services/storage/src/simcore_service_storage/api/v0/openapi.yaml
+++ b/services/storage/src/simcore_service_storage/api/v0/openapi.yaml
@@ -1764,6 +1764,26 @@ paths:
accessRights:
type: object
description: object containing the GroupID as key and read/write/execution permissions as value
+ additionalProperties: false
+ x-patternProperties:
+ ^\d+$:
+ type: object
+ description: the group id
+ additionalProperties: false
+ required:
+ - read
+ - write
+ - delete
+ properties:
+ read:
+ type: boolean
+ description: gives read access
+ write:
+ type: boolean
+ description: gives write access
+ delete:
+ type: boolean
+ description: gives deletion rights
creationDate:
type: string
description: project creation date
@@ -1976,6 +1996,26 @@ paths:
accessRights:
type: object
description: object containing the GroupID as key and read/write/execution permissions as value
+ additionalProperties: false
+ x-patternProperties:
+ ^\d+$:
+ type: object
+ description: the group id
+ additionalProperties: false
+ required:
+ - read
+ - write
+ - delete
+ properties:
+ read:
+ type: boolean
+ description: gives read access
+ write:
+ type: boolean
+ description: gives write access
+ delete:
+ type: boolean
+ description: gives deletion rights
creationDate:
type: string
description: project creation date
@@ -2198,6 +2238,26 @@ paths:
accessRights:
type: object
description: object containing the GroupID as key and read/write/execution permissions as value
+ additionalProperties: false
+ x-patternProperties:
+ ^\d+$:
+ type: object
+ description: the group id
+ additionalProperties: false
+ required:
+ - read
+ - write
+ - delete
+ properties:
+ read:
+ type: boolean
+ description: gives read access
+ write:
+ type: boolean
+ description: gives write access
+ delete:
+ type: boolean
+ description: gives deletion rights
creationDate:
type: string
description: project creation date
@@ -2520,6 +2580,26 @@ components:
accessRights:
type: object
description: object containing the GroupID as key and read/write/execution permissions as value
+ additionalProperties: false
+ x-patternProperties:
+ ^\d+$:
+ type: object
+ description: the group id
+ additionalProperties: false
+ required:
+ - read
+ - write
+ - delete
+ properties:
+ read:
+ type: boolean
+ description: gives read access
+ write:
+ type: boolean
+ description: gives write access
+ delete:
+ type: boolean
+ description: gives deletion rights
creationDate:
type: string
description: project creation date
diff --git a/services/storage/src/simcore_service_storage/api/v0/schemas/project-v0.0.1.json b/services/storage/src/simcore_service_storage/api/v0/schemas/project-v0.0.1.json
index d40e6ceb1a4..4ecd40e4248 100644
--- a/services/storage/src/simcore_service_storage/api/v0/schemas/project-v0.0.1.json
+++ b/services/storage/src/simcore_service_storage/api/v0/schemas/project-v0.0.1.json
@@ -46,7 +46,34 @@
},
"accessRights": {
"type": "object",
- "description": "object containing the GroupID as key and read/write/execution permissions as value"
+ "description": "object containing the GroupID as key and read/write/execution permissions as value",
+ "additionalProperties": false,
+ "patternProperties": {
+ "^\\d+$": {
+ "type": "object",
+ "description": "the group id",
+ "additionalProperties": false,
+ "required": [
+ "read",
+ "write",
+ "delete"
+ ],
+ "properties": {
+ "read": {
+ "type": "boolean",
+ "description": "gives read access"
+ },
+ "write": {
+ "type": "boolean",
+ "description": "gives write access"
+ },
+ "delete": {
+ "type": "boolean",
+ "description": "gives deletion rights"
+ }
+ }
+ }
+ }
},
"creationDate": {
"type": "string",
@@ -307,4 +334,4 @@
}
}
}
-}
\ No newline at end of file
+}
diff --git a/services/storage/src/simcore_service_storage/dsm.py b/services/storage/src/simcore_service_storage/dsm.py
index a4ad5918ea8..668724589ca 100644
--- a/services/storage/src/simcore_service_storage/dsm.py
+++ b/services/storage/src/simcore_service_storage/dsm.py
@@ -31,7 +31,6 @@
_location_from_id,
file_meta_data,
projects,
- user_to_projects,
)
from .s3 import get_config_s3
from .settings import (
@@ -218,11 +217,8 @@ async def list_files(
# now parse the project to search for node/project names
try:
async with self.engine.acquire() as conn:
- joint_table = user_to_projects.join(projects)
- query = (
- sa.select([projects])
- .select_from(joint_table)
- .where(user_to_projects.c.user_id == user_id)
+ query = sa.select([projects]).where(
+ projects.c.prj_owner == user_id
)
async for row in conn.execute(query):
@@ -381,11 +377,8 @@ async def list_datasets(self, user_id: str, location: str) -> DatasetMetaDataVec
if self.has_project_db:
try:
async with self.engine.acquire() as conn:
- joint_table = user_to_projects.join(projects)
- query = (
- sa.select([projects])
- .select_from(joint_table)
- .where(user_to_projects.c.user_id == user_id)
+ query = sa.select([projects]).where(
+ projects.c.prj_owner == user_id
)
async for row in conn.execute(query):
proj_data = dict(row.items())
diff --git a/services/storage/src/simcore_service_storage/models.py b/services/storage/src/simcore_service_storage/models.py
index 5676caef903..2954b9397a5 100644
--- a/services/storage/src/simcore_service_storage/models.py
+++ b/services/storage/src/simcore_service_storage/models.py
@@ -12,7 +12,6 @@
metadata,
projects,
tokens,
- user_to_projects,
users,
groups,
user_to_groups,
@@ -174,7 +173,6 @@ def __str__(self):
"FileMetaDataEx",
"projects",
"users",
- "user_to_projects",
"groups",
"user_to_groups",
]
diff --git a/services/storage/tests/data/projects.csv b/services/storage/tests/data/projects.csv
index ec7b4af07a9..1f9f3796ba9 100644
--- a/services/storage/tests/data/projects.csv
+++ b/services/storage/tests/data/projects.csv
@@ -1,2 +1,2 @@
id,type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench,published,access_rights
-151,STANDARD,161b8782-b13e-5840-9ae2-e2250c231001,Kember use case,Kember Cordiac Model with PostPro Viewer,"","",2019-06-27 11:42:03.168,2019-06-27 11:43:49.128,"{""ad9bda7f-1dc5-5480-ab22-5fef4fc53eac"": {""key"": ""simcore/services/comp/kember-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Kember cardiac model"", ""inputs"": {""dt"": 0.01, ""T"": 1000, ""forcing_factor"": 0}, ""inputNodes"": [], ""outputs"": {}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 100}}, ""a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8"": {""key"": ""simcore/services/dynamic/kember-viewer"", ""version"": ""2.9.0"", ""label"": ""kember-viewer"", ""inputs"": {""outputController"": {""nodeUuid"": ""ad9bda7f-1dc5-5480-ab22-5fef4fc53eac"", ""output"": ""out_1""}}, ""inputNodes"": [""ad9bda7f-1dc5-5480-ab22-5fef4fc53eac""], ""outputs"": {}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 100}}}",False,"{}"
+151,STANDARD,161b8782-b13e-5840-9ae2-e2250c231001,Kember use case,Kember Cordiac Model with PostPro Viewer,"",21,2019-06-27 11:42:03.168,2019-06-27 11:43:49.128,"{""ad9bda7f-1dc5-5480-ab22-5fef4fc53eac"": {""key"": ""simcore/services/comp/kember-cardiac-model"", ""version"": ""1.0.0"", ""label"": ""Kember cardiac model"", ""inputs"": {""dt"": 0.01, ""T"": 1000, ""forcing_factor"": 0}, ""inputNodes"": [], ""outputs"": {}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 100}}, ""a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8"": {""key"": ""simcore/services/dynamic/kember-viewer"", ""version"": ""2.9.0"", ""label"": ""kember-viewer"", ""inputs"": {""outputController"": {""nodeUuid"": ""ad9bda7f-1dc5-5480-ab22-5fef4fc53eac"", ""output"": ""out_1""}}, ""inputNodes"": [""ad9bda7f-1dc5-5480-ab22-5fef4fc53eac""], ""outputs"": {}, ""progress"": 100, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 100}}}",False,"{}"
diff --git a/services/storage/tests/data/user_to_projects.csv b/services/storage/tests/data/user_to_projects.csv
deleted file mode 100644
index a72f5822d66..00000000000
--- a/services/storage/tests/data/user_to_projects.csv
+++ /dev/null
@@ -1,2 +0,0 @@
-id,user_id,project_id
-122,21,151
diff --git a/services/storage/tests/utils.py b/services/storage/tests/utils.py
index 494a7036a68..45086dba81b 100644
--- a/services/storage/tests/utils.py
+++ b/services/storage/tests/utils.py
@@ -12,7 +12,6 @@
FileMetaData,
file_meta_data,
projects,
- user_to_projects,
users,
groups,
user_to_groups,
@@ -124,29 +123,14 @@ def create_full_tables(url):
meta.drop_all(
bind=engine,
- tables=[
- user_to_groups,
- file_meta_data,
- projects,
- user_to_projects,
- users,
- groups,
- ],
+ tables=[user_to_groups, file_meta_data, projects, users, groups,],
checkfirst=True,
)
meta.create_all(
- bind=engine,
- tables=[
- file_meta_data,
- projects,
- user_to_projects,
- users,
- groups,
- user_to_groups,
- ],
+ bind=engine, tables=[file_meta_data, projects, users, groups, user_to_groups,],
)
- for t in ["file_meta_data", "projects", "users", "user_to_projects"]:
+ for t in ["users", "file_meta_data", "projects"]:
filename = t + ".csv"
csv_file = str(data_dir() / Path(filename))
with open(csv_file, "r") as file:
@@ -161,12 +145,12 @@ def create_full_tables(url):
# cur = conn.cursor()
# columns = [["file_uuid","location_id","location","bucket_name","object_name","project_id","project_name","node_id","node_name","file_name","user_id","user_name"],[],[],[]]
# if False:
- # for t in ["file_meta_data", "projects", "users", "user_to_projects"]:
+ # for t in ["file_meta_data", "projects", "users"]:
# filename = t + ".sql"
# sqlfile = str(data_dir() / Path(filename))
# cur.execute(open(sqlfile, "r").read())
# else:
- # for t in ["file_meta_data", "projects", "users", "user_to_projects"]:
+ # for t in ["file_meta_data", "projects", "users"]:
# filename = t + ".csv"
# csv_file = str(data_dir() / Path(filename))
# if False:
@@ -189,14 +173,6 @@ def drop_all_tables(url):
engine = sa.create_engine(url)
meta.drop_all(
- bind=engine,
- tables=[
- file_meta_data,
- projects,
- user_to_projects,
- users,
- groups,
- user_to_groups,
- ],
+ bind=engine, tables=[file_meta_data, projects, users, groups, user_to_groups,],
)
engine.dispose()
diff --git a/services/web/client/source/class/osparc/About.js b/services/web/client/source/class/osparc/About.js
index 0974b5950f2..b489e0c853e 100644
--- a/services/web/client/source/class/osparc/About.js
+++ b/services/web/client/source/class/osparc/About.js
@@ -77,13 +77,13 @@ qx.Class.define("osparc.About", {
entryLabel = new qx.ui.basic.Label(item);
}
entryLayout.set({
- font: osparc.utils.Utils.getFont(14, true)
+ font: "title-14"
});
entryLayout.add(entryLabel);
let entryVersion = new qx.ui.basic.Label().set({
value: vers,
- font: osparc.utils.Utils.getFont(14)
+ font: "text-14"
});
entryLayout.add(entryVersion);
diff --git a/services/web/client/source/class/osparc/auth/Data.js b/services/web/client/source/class/osparc/auth/Data.js
index c88c3ab5884..3096e6b76de 100644
--- a/services/web/client/source/class/osparc/auth/Data.js
+++ b/services/web/client/source/class/osparc/auth/Data.js
@@ -25,6 +25,24 @@ qx.Class.define("osparc.auth.Data", {
type: "singleton",
properties: {
+ /**
+ * User Id
+ */
+ userId: {
+ init: null,
+ nullable: false,
+ check: "Number"
+ },
+
+ /**
+ * Group ID
+ */
+ groupId: {
+ init: null,
+ nullable: false,
+ check: "Number"
+ },
+
/**
* Basic authentification with a token
*/
diff --git a/services/web/client/source/class/osparc/auth/Manager.js b/services/web/client/source/class/osparc/auth/Manager.js
index 13ccc0d7a68..e04a951dbe3 100644
--- a/services/web/client/source/class/osparc/auth/Manager.js
+++ b/services/web/client/source/class/osparc/auth/Manager.js
@@ -148,6 +148,8 @@ qx.Class.define("osparc.auth.Manager", {
__loginUser: function(profile) {
osparc.auth.Data.getInstance().setEmail(profile.login);
osparc.auth.Data.getInstance().setToken(profile.login);
+ osparc.auth.Data.getInstance().setUserId(profile.id);
+ osparc.auth.Data.getInstance().setGroupId(profile["groups"]["me"]["gid"]);
osparc.data.Permissions.getInstance().setRole(profile.role);
},
diff --git a/services/web/client/source/class/osparc/component/export/ExportDAG.js b/services/web/client/source/class/osparc/component/export/ExportDAG.js
index bb6b176b923..a0bb4a45e55 100644
--- a/services/web/client/source/class/osparc/component/export/ExportDAG.js
+++ b/services/web/client/source/class/osparc/component/export/ExportDAG.js
@@ -105,18 +105,12 @@ qx.Class.define("osparc.component.export.ExportDAG", {
flex: 1
});
- // const shareWith = new osparc.component.export.ShareWith("exportDAG");
- // this._add(shareWith);
-
const exportBtn = this.__getExportBtn();
exportBtn.addListener("execute", () => {
if (manager.validate()) {
this.__exportAsMacroService(exportBtn);
}
}, this);
- // shareWith.addListener("changeReady", e => {
- // exportBtn.setEnabled(e.getData());
- // });
this._add(exportBtn);
},
@@ -149,7 +143,7 @@ qx.Class.define("osparc.component.export.ExportDAG", {
},
__getExportBtn: function() {
- const exportBtn = new osparc.ui.form.FetchButton(this.tr("Export")).set({
+ const exportBtn = new osparc.ui.form.FetchButton(this.tr("Publish Group")).set({
allowGrowX: false,
alignX: "right"
});
diff --git a/services/web/client/source/class/osparc/component/export/Permissions.js b/services/web/client/source/class/osparc/component/export/Permissions.js
new file mode 100644
index 00000000000..8c00053b4c7
--- /dev/null
+++ b/services/web/client/source/class/osparc/component/export/Permissions.js
@@ -0,0 +1,329 @@
+/* ************************************************************************
+
+ osparc - the simcore frontend
+
+ https://osparc.io
+
+ Copyright:
+ 2020 IT'IS Foundation, https://itis.swiss
+
+ License:
+ MIT: https://opensource.org/licenses/MIT
+
+ Authors:
+ * Odei Maiz (odeimaiz)
+
+************************************************************************ */
+
+/**
+ * Widget for modifying Study permissions. This is the way for sharing studies
+ * - Creates a copy of study data
+ * - It allows changing study's access right, so that the study owners can:
+ * - Share it with Organizations and/or Organization Members (Collaborators)
+ * - Make other Collaborators Owner
+ * - Remove collaborators
+ */
+
+qx.Class.define("osparc.component.export.Permissions", {
+ extend: qx.ui.core.Widget,
+
+ /**
+ * @param studyData {Object} Object containing the serialized Study Data
+ */
+ construct: function(studyData) {
+ this.base(arguments);
+
+ this.__studyData = osparc.utils.Utils.deepCloneObject(studyData);
+
+ this._setLayout(new qx.ui.layout.VBox(15));
+
+ this.__buildLayout();
+
+ this.__getMyFriends();
+ },
+
+ events: {
+ "updateStudy": "qx.event.type.Data"
+ },
+
+ statics: {
+ getCollaboratorAccessRight: function() {
+ return {
+ "read": true,
+ "write": true,
+ "delete": false
+ };
+ },
+
+ getOwnerAccessRight: function() {
+ return {
+ "read": true,
+ "write": true,
+ "delete": true
+ };
+ },
+
+ removeCollaborator: function(studyData, gid) {
+ return delete studyData["accessRights"][gid];
+ },
+
+ createWindow: function(winText, shareResourceWidget) {
+ const window = new qx.ui.window.Window(winText).set({
+ appearance: "service-window",
+ layout: new qx.ui.layout.Grow(),
+ autoDestroy: true,
+ contentPadding: 10,
+ width: 400,
+ height: 300,
+ showMaximize: false,
+ showMinimize: false,
+ modal: true
+ });
+ window.add(shareResourceWidget);
+ window.center();
+ return window;
+ }
+ },
+
+ members: {
+ __studyData: null,
+ __organizationsAndMembers: null,
+ __collaboratorsModel: null,
+ __myFrieds: null,
+
+ createWindow: function() {
+ return this.self().createWindow(this.tr("Share with people and organizations"), this);
+ },
+
+ __buildLayout: function() {
+ const addCollaborator = this.__createAddCollaborator();
+ this._add(addCollaborator);
+
+ const collaboratorsList = this.__createCollaboratorsList();
+ this._add(collaboratorsList, {
+ flex: 1
+ });
+ },
+
+ __createAddCollaborator: function() {
+ const vBox = new qx.ui.container.Composite(new qx.ui.layout.VBox(5));
+ vBox.setVisibility(this.__isUserOwner() ? "visible" : "excluded");
+
+ const label = new qx.ui.basic.Label().set({
+ value: this.tr("Add Collaborators and Organizations")
+ });
+ vBox.add(label);
+
+ const hBox = new qx.ui.container.Composite(new qx.ui.layout.HBox(10).set({
+ alignY: "middle"
+ }));
+ vBox.add(hBox, {
+ flex: 1
+ });
+
+ const organizationsAndMembers = this.__organizationsAndMembers = new osparc.component.filter.OrganizationsAndMembers("asfd");
+ hBox.add(organizationsAndMembers, {
+ flex: 1
+ });
+
+ const addCollaboratorBtn = new qx.ui.form.Button(this.tr("Add")).set({
+ allowGrowY: false
+ });
+ addCollaboratorBtn.addListener("execute", () => {
+ this.__addCollaborator();
+ }, this);
+ hBox.add(addCollaboratorBtn);
+
+ return vBox;
+ },
+
+ __createCollaboratorsList: function() {
+ const collaboratorsUIList = new qx.ui.form.List().set({
+ decorator: "no-border",
+ spacing: 3,
+ width: 150
+ });
+
+ const collaboratorsModel = this.__collaboratorsModel = new qx.data.Array();
+ const collaboratorsCtrl = new qx.data.controller.List(collaboratorsModel, collaboratorsUIList, "name");
+ collaboratorsCtrl.setDelegate({
+ createItem: () => new osparc.dashboard.CollaboratorListItem(),
+ bindItem: (ctrl, item, id) => {
+ ctrl.bindProperty("gid", "model", null, item, id);
+ ctrl.bindProperty("gid", "key", null, item, id);
+ ctrl.bindProperty("thumbnail", "thumbnail", null, item, id);
+ ctrl.bindProperty("name", "title", null, item, id); // user
+ ctrl.bindProperty("label", "title", null, item, id); // organization
+ ctrl.bindProperty("login", "subtitle", null, item, id); // user
+ ctrl.bindProperty("description", "subtitle", null, item, id); // organization
+ ctrl.bindProperty("isOrg", "isOrganization", null, item, id);
+ ctrl.bindProperty("access_rights", "accessRights", null, item, id);
+ ctrl.bindProperty("showOptions", "showOptions", null, item, id);
+ },
+ configureItem: item => {
+ item.getChildControl("thumbnail").getContentElement()
+ .setStyles({
+ "border-radius": "16px"
+ });
+ item.addListener("promoteCollaborator", e => {
+ const orgMember = e.getData();
+ this.__promoteCollaborator(orgMember);
+ });
+ item.addListener("removeCollaborator", e => {
+ const orgMember = e.getData();
+ this.__deleteCollaborator(orgMember);
+ });
+ }
+ });
+
+ return collaboratorsUIList;
+ },
+
+ __getMyFriends: function() {
+ this.__myFrieds = {};
+
+ const store = osparc.store.Store.getInstance();
+ const promises = [];
+ promises.push(store.getGroupsOrganizations());
+ promises.push(store.getVisibleMembers());
+ Promise.all(promises)
+ .then(values => {
+ const orgs = values[0];
+ const orgMembers = values[1];
+ orgs.forEach(org => {
+ org["isOrg"] = true;
+ this.__myFrieds[org["gid"]] = org;
+ });
+ for (const gid of Object.keys(orgMembers)) {
+ const orgMember = orgMembers[gid];
+ orgMember["isOrg"] = false;
+ this.__myFrieds[gid] = orgMember;
+ }
+ this.__reloadOrganizationsAndMembers();
+ this.__reloadCollaboratorsList();
+ });
+ },
+
+ __reloadOrganizationsAndMembers: function() {
+ this.__organizationsAndMembers.reset();
+
+ const aceessRights = this.__studyData["accessRights"];
+ const myFriends = this.__myFrieds;
+ for (const gid of Object.keys(myFriends)) {
+ const myFriend = myFriends[gid];
+ if (parseInt(gid) !== osparc.auth.Data.getInstance().getGroupId() && !(parseInt(gid) in aceessRights)) {
+ const btn = this.__organizationsAndMembers.addOption(myFriend);
+ btn.setIcon(myFriend["isOrg"] ? "@FontAwesome5Solid/users/14" : "@FontAwesome5Solid/user/14");
+ }
+ }
+ },
+
+ __reloadCollaboratorsList: function() {
+ this.__collaboratorsModel.removeAll();
+
+ const aceessRights = this.__studyData["accessRights"];
+ Object.keys(aceessRights).forEach(gid => {
+ if (Object.prototype.hasOwnProperty.call(this.__myFrieds, gid)) {
+ const collaborator = this.__myFrieds[gid];
+ if ("first_name" in collaborator) {
+ collaborator["thumbnail"] = osparc.utils.Avatar.getUrl(collaborator["login"], 32);
+ collaborator["name"] = osparc.utils.Utils.firstsUp(collaborator["first_name"], collaborator["last_name"]);
+ }
+ collaborator["access_rights"] = aceessRights[gid];
+ if (this.__isUserOwner()) {
+ collaborator["showOptions"] = true;
+ }
+ const collaboratorModel = qx.data.marshal.Json.createModel(collaborator);
+ if (parseInt(gid) === osparc.auth.Data.getInstance().getGroupId()) {
+ this.__collaboratorsModel.insertAt(0, collaboratorModel);
+ } else {
+ this.__collaboratorsModel.append(collaboratorModel);
+ }
+ }
+ });
+ },
+
+ __isUserOwner: function() {
+ const myGid = osparc.auth.Data.getInstance().getGroupId();
+ const aceessRights = this.__studyData["accessRights"];
+ if (myGid in aceessRights) {
+ return aceessRights[myGid]["delete"];
+ }
+ return false;
+ },
+
+ __addCollaborator: function() {
+ const gids = this.__organizationsAndMembers.getSelectedGIDs();
+ if (gids.length === 0) {
+ return;
+ }
+
+ gids.forEach(gid => {
+ this.__studyData["accessRights"][gid] = this.self().getCollaboratorAccessRight();
+ });
+ const params = {
+ url: {
+ "projectId": this.__studyData["uuid"]
+ },
+ data: this.__studyData
+ };
+ osparc.data.Resources.fetch("studies", "put", params)
+ .then(() => {
+ this.fireDataEvent("updateStudy", this.__studyData["uuid"]);
+ osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Collaborator(s) successfully added"));
+ this.__reloadOrganizationsAndMembers();
+ this.__reloadCollaboratorsList();
+ })
+ .catch(err => {
+ osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Something went adding collaborator(s)"), "ERROR");
+ console.error(err);
+ });
+ },
+
+ __promoteCollaborator: function(collaborator) {
+ this.__studyData["accessRights"][collaborator["gid"]] = this.self().getOwnerAccessRight();
+ const params = {
+ url: {
+ "projectId": this.__studyData["uuid"]
+ },
+ data: this.__studyData
+ };
+ osparc.data.Resources.fetch("studies", "put", params)
+ .then(() => {
+ this.fireDataEvent("updateStudy", this.__studyData["uuid"]);
+ osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Collaborator successfully made Owner"));
+ this.__reloadOrganizationsAndMembers();
+ this.__reloadCollaboratorsList();
+ })
+ .catch(err => {
+ osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Something went wrong making Collaborator Owner"), "ERROR");
+ console.error(err);
+ });
+ },
+
+ __deleteCollaborator: function(collaborator) {
+ const success = this.self().removeCollaborator(this.__studyData, collaborator["gid"]);
+ if (!success) {
+ osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Something went wrong removing Collaborator"), "ERROR");
+ }
+
+ const params = {
+ url: {
+ "projectId": this.__studyData["uuid"]
+ },
+ data: this.__studyData
+ };
+ osparc.data.Resources.fetch("studies", "put", params)
+ .then(() => {
+ this.fireDataEvent("updateStudy", this.__studyData["uuid"]);
+ osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Collaborator successfully removed"));
+ this.__reloadOrganizationsAndMembers();
+ this.__reloadCollaboratorsList();
+ })
+ .catch(err => {
+ osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Something went wrong removing Collaborator"), "ERROR");
+ console.error(err);
+ });
+ }
+ }
+});
diff --git a/services/web/client/source/class/osparc/component/export/SaveAsTemplate.js b/services/web/client/source/class/osparc/component/export/SaveAsTemplate.js
index a18815e74a5..eca7b4a5728 100644
--- a/services/web/client/source/class/osparc/component/export/SaveAsTemplate.js
+++ b/services/web/client/source/class/osparc/component/export/SaveAsTemplate.js
@@ -16,73 +16,107 @@
************************************************************************ */
/**
- *
+ * Widget for creating a template from a study
+ * - Creates a copy of study data
+ * - Using the ShareWith widget allows to publish the template
*/
qx.Class.define("osparc.component.export.SaveAsTemplate", {
extend: qx.ui.core.Widget,
- construct: function(studyId, formData) {
+ /**
+ * @param studyId {String} Study Id
+ * @param studyData {Object} Object containing part or the entire serialized Study Data
+ */
+ construct: function(studyId, studyData) {
this.base(arguments);
- this.__studyId = studyId;
- this.__formData = osparc.utils.Utils.deepCloneObject(formData);
-
this._setLayout(new qx.ui.layout.VBox(5));
+ this.__studyId = studyId;
+ this.__formData = osparc.utils.Utils.deepCloneObject(studyData);
+
this.__buildLayout();
+
+ this.setHeaderText(this.tr("Make Template accessible to"));
+ this.setButtonText(this.tr("Publish"));
},
statics: {
- createSaveAsTemplateWindow: function(saveAsTemplate) {
- const window = new qx.ui.window.Window("Save as Template").set({
+ createWindow: function(winText, shareResourceWidget) {
+ const window = new qx.ui.window.Window(winText).set({
appearance: "service-window",
layout: new qx.ui.layout.Grow(),
autoDestroy: true,
- contentPadding: 0,
+ contentPadding: 10,
width: 400,
height: 300,
+ showMaximize: false,
showMinimize: false,
modal: true
});
- window.add(saveAsTemplate);
+ window.add(shareResourceWidget);
window.center();
return window;
}
},
+ properties: {
+ headerText: {
+ check: "String",
+ init: "",
+ event: "changeHeaderText"
+ },
+
+ buttonText: {
+ check: "String",
+ init: "",
+ event: "changeButtonText"
+ }
+ },
+
events: {
"finished": "qx.event.type.Data"
},
members: {
__studyId: null,
- __formData: null,
__shareWith: null,
+ __formData: null,
+
+ createWindow: function() {
+ return this.self().createWindow(this.tr("Save as Template"), this);
+ },
__buildLayout: function() {
- const shareWith = this.__shareWith = new osparc.component.export.ShareWith("saveAsTemplate");
+ const shareWith = this.__shareWith = new osparc.component.export.ShareWith();
+ this.bind("headerText", shareWith, "legend");
this._add(shareWith, {
flex: 1
});
- const saveAsTemplateBtn = new osparc.ui.form.FetchButton(this.tr("Save as Template")).set({
+ const shareResourceBtn = new osparc.ui.form.FetchButton().set({
allowGrowX: false,
alignX: "right"
});
- saveAsTemplateBtn.addListener("execute", () => {
- this.__saveAsTemplate(saveAsTemplateBtn);
+ this.bind("buttonText", shareResourceBtn, "label");
+ shareResourceBtn.addListener("execute", () => {
+ this.__shareResource(shareResourceBtn);
}, this);
- shareWith.bind("ready", saveAsTemplateBtn, "enabled");
- this._add(saveAsTemplateBtn);
+ shareWith.bind("ready", shareResourceBtn, "enabled");
+ this._add(shareResourceBtn);
},
- __saveAsTemplate: function(btn) {
+ __shareResource: function(btn) {
btn.setFetching(true);
const selectedGroupIDs = this.__shareWith.getSelectedGroups();
- selectedGroupIDs.forEach(selectedGroupID => {
- this.__formData["accessRights"][selectedGroupID] = "rwx";
+ selectedGroupIDs.forEach(gid => {
+ this.__formData["accessRights"][gid] = {
+ "read": true,
+ "write": false,
+ "delete": false
+ };
});
const params = {
diff --git a/services/web/client/source/class/osparc/component/export/ShareWith.js b/services/web/client/source/class/osparc/component/export/ShareWith.js
index 99833550c5c..a2ebf560ed0 100644
--- a/services/web/client/source/class/osparc/component/export/ShareWith.js
+++ b/services/web/client/source/class/osparc/component/export/ShareWith.js
@@ -17,22 +17,25 @@
/**
* View that shows who you want to share the resource with:
- * - Everyone
- * - My organizations
* - Private
+ * - Organization members
+ * - My organizations
+ * - Everyone
*/
qx.Class.define("osparc.component.export.ShareWith", {
extend: qx.ui.groupbox.GroupBox,
- construct: function(filterGroupId) {
- this.base(arguments, this.tr("Share with"));
+ construct: function(header) {
+ this.base(arguments, header);
this.set({
appearance: "settings-groupbox",
layout: new qx.ui.layout.VBox(10)
});
+ this.__buildLayout();
+
const store = osparc.store.Store.getInstance();
Promise.all([
store.getGroupsMe(),
@@ -41,9 +44,14 @@ qx.Class.define("osparc.component.export.ShareWith", {
.then(values => {
const groupMe = values[0];
const groupAll = values[1];
- this.__sharingOptions["me"]["gid"] = groupMe["gid"];
- this.__sharingOptions["all"]["gid"] = groupAll["gid"];
- this.__buildLayout(filterGroupId);
+ this.__rbManager.getChildren().forEach(rb => {
+ if (rb.contextId === this.__sharingOpts["me"].contextId) {
+ rb.gid = groupMe["gid"];
+ }
+ if (rb.contextId === this.__sharingOpts["all"].contextId) {
+ rb.gid = groupAll["gid"];
+ }
+ });
});
},
@@ -57,43 +65,87 @@ qx.Class.define("osparc.component.export.ShareWith", {
},
members: { // eslint-disable-line qx-rules/no-refs-in-members
- __sharingOptions: {
+ __sharingOpts: {
"me": {
- shareContextId: 0,
- label: "Private",
- gid: null
+ contextId: 0,
+ label: "Private"
},
+ /*
+ "orgMembers": {
+ contextId: 1,
+ label: "Organization Members"
+ },
+ */
"orgs": {
- shareContextId: 1,
- label: "Organizations",
- gid: null
+ contextId: 2,
+ label: "Organizations"
},
"all": {
- shareContextId: 2,
- label: "Everyone",
- gid: null
+ contextId: 3,
+ label: "Everyone"
}
},
__rbManager: null,
- __myOrganizationsHB: null,
+ __privateLayout: null,
+ __publicLayout: null,
+ __myOrgMembersHB: null,
+ __myOrgMembers: null,
+ __myOrgs: null,
- __buildLayout: function(filterGroupId) {
+ __buildLayout: function() {
this.__rbManager = new qx.ui.form.RadioGroup().set({
allowEmptySelection: true
});
- for (let [sharingOptionKey, sharingOption] of Object.entries(this.__sharingOptions)) {
+ for (let [sharingOptionKey, sharingOption] of Object.entries(this.__sharingOpts)) {
const rb = new qx.ui.form.RadioButton(sharingOption.label);
- rb.shareContextId = sharingOption.shareContextId;
- if (sharingOptionKey === "orgs") {
- const vBox = new qx.ui.container.Composite(new qx.ui.layout.VBox());
- const myOrganizationsHB = this.__myOrganizationsHB = new osparc.component.filter.Organizations(filterGroupId);
- vBox.add(rb);
- vBox.add(myOrganizationsHB);
- this.add(vBox);
- } else {
- rb.gid = sharingOption["gid"];
- this.add(rb);
+ rb.contextId = sharingOption.contextId;
+ switch (sharingOptionKey) {
+ case "me":
+ this.__privateLayout = rb;
+ this.add(rb);
+ break;
+ case "orgMembers": {
+ const vBox = new qx.ui.container.Composite(new qx.ui.layout.VBox());
+ const myOrgMembersHB = this.__myOrgMembersHB = new qx.ui.container.Composite(new qx.ui.layout.HBox().set({
+ alignY: "middle"
+ }));
+ const myOrgsSB = new qx.ui.form.SelectBox();
+ osparc.data.Resources.get("organizations")
+ .then(resp => {
+ const orgs = resp["organizations"];
+ orgs.sort(this.__sortByLabel);
+ orgs.forEach(org => {
+ const orgItem = new qx.ui.form.ListItem(org["label"]);
+ orgItem.gid = org["gid"];
+ myOrgsSB.add(orgItem);
+ });
+ });
+ myOrgMembersHB.add(myOrgsSB);
+ const myOrgMembers = this.__myOrgMembers = new osparc.component.filter.OrganizationMembers("asdfasdf");
+ myOrgMembersHB.add(myOrgMembers, {
+ flex: 1
+ });
+ myOrgsSB.addListener("changeSelection", e => {
+ myOrgMembers.setOrganizationId(e.getData()[0].gid);
+ });
+ vBox.add(rb);
+ vBox.add(myOrgMembersHB);
+ this.add(vBox);
+ break;
+ }
+ case "orgs": {
+ const vBox = new qx.ui.container.Composite(new qx.ui.layout.VBox());
+ const myOrgs = this.__myOrgs = new osparc.component.filter.Organizations();
+ vBox.add(rb);
+ vBox.add(myOrgs);
+ this.add(vBox);
+ break;
+ }
+ case "all":
+ this.__publicLayout = rb;
+ this.add(rb);
+ break;
}
this.__rbManager.add(rb);
}
@@ -106,36 +158,54 @@ qx.Class.define("osparc.component.export.ShareWith", {
const selection = this.__rbManager.getSelection();
this.setReady(Boolean(selection.length));
- const isOrganizationsSelected = this.__isGroupSelected("orgs");
- this.__myOrganizationsHB.setVisibility(isOrganizationsSelected ? "visible" : "excluded");
+ // this.__myOrgMembersHB.setVisibility(this.__isGroupSelected("orgMembers") ? "visible" : "excluded");
+ this.__myOrgs.setVisibility(this.__isGroupSelected("orgs") ? "visible" : "excluded");
},
__isGroupSelected: function(groupKey) {
const selection = this.__rbManager.getSelection();
- if (selection.length === 1 && selection[0].shareContextId === this.__sharingOptions[groupKey].shareContextId) {
+ if (selection.length === 1 && selection[0].contextId === this.__sharingOpts[groupKey].contextId) {
return true;
}
return false;
},
- __getSelectedOrganizationIDs: function() {
+ __getSelectedOrgMemberIDs: function() {
+ if (this.__isGroupSelected("orgMembers")) {
+ return this.__myOrgMembers.getSelectedOrgMemberIDs();
+ }
+ return [];
+ },
+
+ __getSelectedOrgIDs: function() {
if (this.__isGroupSelected("orgs")) {
- return this.__myOrganizationsHB.getSelectedOrganizationIDs();
+ return this.__myOrgs.getSelectedOrgIDs();
}
return [];
},
+ showPrivate: function(show) {
+ this.__privateLayout.setVisibility(show ? "visible" : "excluded");
+ },
+
+ showPublic: function(show) {
+ this.__publicLayout.setVisibility(show ? "visible" : "excluded");
+ },
+
getSelectedGroups: function() {
let groupIDs = [];
const selection = this.__rbManager.getSelection();
if (selection.length) {
- switch (selection[0].shareContextId) {
- case 0:
- case 2:
+ switch (selection[0].contextId) {
+ case this.__sharingOpts["me"].contextId:
+ case this.__sharingOpts["all"].contextId:
groupIDs = [selection[0].gid];
break;
- case 1:
- groupIDs = this.__getSelectedOrganizationIDs();
+ // case this.__sharingOpts["orgMembers"].contextId:
+ // groupIDs = this.__getSelectedOrgMemberIDs();
+ // break;
+ case this.__sharingOpts["orgs"].contextId:
+ groupIDs = this.__getSelectedOrgIDs();
break;
}
}
diff --git a/services/web/client/source/class/osparc/component/filter/OrganizationMembers.js b/services/web/client/source/class/osparc/component/filter/OrganizationMembers.js
new file mode 100644
index 00000000000..04ae22524fc
--- /dev/null
+++ b/services/web/client/source/class/osparc/component/filter/OrganizationMembers.js
@@ -0,0 +1,70 @@
+/* ************************************************************************
+
+ osparc - the simcore frontend
+
+ https://osparc.io
+
+ Copyright:
+ 2020 IT'IS Foundation, https://itis.swiss
+
+ License:
+ MIT: https://opensource.org/licenses/MIT
+
+ Authors:
+ * Odei Maiz (odeimaiz)
+
+************************************************************************ */
+
+/**
+ * Filter for members for the given organization.
+ */
+qx.Class.define("osparc.component.filter.OrganizationMembers", {
+ extend: osparc.component.filter.TagsFilter,
+
+ /**
+ * Constructor for Organizations creates the filter and builds its menu.
+ *
+ * @extends osparc.component.filter.TagsFilter
+ */
+ construct: function(filterGroupId) {
+ this.base(arguments, this.tr("Members"), "organizationMembers", filterGroupId);
+ },
+
+ properties: {
+ organizationId: {
+ check: "Number",
+ nullable: true,
+ apply: "_applyOrganizationId",
+ event: "changeOrganizationId"
+ }
+ },
+
+ members: {
+ _applyOrganizationId: function(orgId) {
+ this._removeAllOptions();
+ const params = {
+ url: {
+ gid: orgId
+ }
+ };
+ osparc.data.Resources.get("organizationMembers", params)
+ .then(members => {
+ members.sort((a, b) => (a["first_name"] > b["first_name"]) ? 1 : -1);
+ members.forEach(member => {
+ const name = osparc.utils.Utils.firstsUp(member["first_name"], member["last_name"]);
+ const bnt = this._addOption(name);
+ bnt.uid = member["id"];
+ });
+ });
+ },
+
+ getSelectedOrgMemberIDs: function() {
+ const selectedOrganizationMemberIDs = [];
+ const activeMenuButtons = this._getActiveMenuButtons();
+ activeMenuButtons.forEach(activeMenuButton => {
+ selectedOrganizationMemberIDs.push(activeMenuButton.uid);
+ });
+ return selectedOrganizationMemberIDs;
+ }
+ }
+});
diff --git a/services/web/client/source/class/osparc/component/filter/Organizations.js b/services/web/client/source/class/osparc/component/filter/Organizations.js
index f116eb15a6e..8396ce8b017 100644
--- a/services/web/client/source/class/osparc/component/filter/Organizations.js
+++ b/services/web/client/source/class/osparc/component/filter/Organizations.js
@@ -26,21 +26,18 @@ qx.Class.define("osparc.component.filter.Organizations", {
*
* @extends osparc.component.filter.TagsFilter
*/
- construct: function(filterGroupId) {
- this.base(arguments, this.tr("My Organizations"), "organizations", filterGroupId);
+ construct: function() {
+ this.base(arguments, this.tr("Select Organization"), "organizations", "organizations");
this.__buildMenu();
},
members: {
- /**
- * Function that uses the information in {osparc.store.Store.getGroupsOrganizations} to build the menu for the filter.
- */
__buildMenu: function() {
- const store = osparc.store.Store.getInstance();
- store.getGroupsOrganizations()
- .then(orgs => {
- orgs.sort(this.__sortByLabel);
+ osparc.data.Resources.get("organizations")
+ .then(resp => {
+ const orgs = resp["organizations"];
+ orgs.sort((a, b) => (a["label"] > b["label"]) ? 1 : -1);
orgs.forEach(org => {
const bnt = this._addOption(osparc.utils.Utils.capitalize(org["label"]));
bnt.gid = org["gid"];
@@ -48,17 +45,7 @@ qx.Class.define("osparc.component.filter.Organizations", {
});
},
- __sortByLabel: function(org1, org2) {
- if (org1.label > org2.label) {
- return 1;
- }
- if (org1.label < org2.label) {
- return -1;
- }
- return 0;
- },
-
- getSelectedOrganizationIDs: function() {
+ getSelectedOrgIDs: function() {
const selectedOrganizationIDs = [];
const activeMenuButtons = this._getActiveMenuButtons();
activeMenuButtons.forEach(activeMenuButton => {
diff --git a/services/web/client/source/class/osparc/component/filter/OrganizationsAndMembers.js b/services/web/client/source/class/osparc/component/filter/OrganizationsAndMembers.js
new file mode 100644
index 00000000000..43172fbad78
--- /dev/null
+++ b/services/web/client/source/class/osparc/component/filter/OrganizationsAndMembers.js
@@ -0,0 +1,62 @@
+/* ************************************************************************
+
+ osparc - the simcore frontend
+
+ https://osparc.io
+
+ Copyright:
+ 2020 IT'IS Foundation, https://itis.swiss
+
+ License:
+ MIT: https://opensource.org/licenses/MIT
+
+ Authors:
+ * Odei Maiz (odeimaiz)
+
+************************************************************************ */
+
+/**
+ * Filter for members for the given organization.
+ */
+qx.Class.define("osparc.component.filter.OrganizationsAndMembers", {
+ extend: osparc.component.filter.TagsFilter,
+
+ /**
+ * Constructor for Organizations creates the filter and builds its menu.
+ *
+ * @extends osparc.component.filter.TagsFilter
+ */
+ construct: function(filterGroupId) {
+ this.base(arguments, this.tr("Members"), "organizationsAndMembers", filterGroupId);
+ },
+
+ members: {
+ addOption: function(group) {
+ let name = "";
+ if ("first_name" in group) {
+ name = group["first_name"] + " " + group["last_name"];
+ } else {
+ name = group["label"];
+ }
+ const btn = this._addOption(name);
+ btn.gid = group["gid"];
+ return btn;
+ },
+
+ addOptions: function(groups) {
+ this._removeAllOptions();
+ groups.forEach(group => {
+ this.addOption(group);
+ });
+ },
+
+ getSelectedGIDs: function() {
+ const selectedGIDs = [];
+ const activeMenuButtons = this._getActiveMenuButtons();
+ activeMenuButtons.forEach(activeMenuButton => {
+ selectedGIDs.push(activeMenuButton.gid);
+ });
+ return selectedGIDs;
+ }
+ }
+});
diff --git a/services/web/client/source/class/osparc/component/form/ToggleButtonContainer.js b/services/web/client/source/class/osparc/component/form/ToggleButtonContainer.js
index b534f6b5957..534946a3b8d 100644
--- a/services/web/client/source/class/osparc/component/form/ToggleButtonContainer.js
+++ b/services/web/client/source/class/osparc/component/form/ToggleButtonContainer.js
@@ -16,8 +16,8 @@ qx.Class.define("osparc.component.form.ToggleButtonContainer", {
},
events: {
- changeSelection: "qx.event.type.Data",
- changeVisibility: "qx.event.type.Data"
+ "changeSelection": "qx.event.type.Data",
+ "changeVisibility": "qx.event.type.Data"
},
members: {
diff --git a/services/web/client/source/class/osparc/component/form/tag/TagItem.js b/services/web/client/source/class/osparc/component/form/tag/TagItem.js
index 20173a7db56..935583a991b 100644
--- a/services/web/client/source/class/osparc/component/form/tag/TagItem.js
+++ b/services/web/client/source/class/osparc/component/form/tag/TagItem.js
@@ -53,8 +53,8 @@ qx.Class.define("osparc.component.form.tag.TagItem", {
}
},
events: {
- cancelNewTag: "qx.event.type.Event",
- deleteTag: "qx.event.type.Event"
+ "cancelNewTag": "qx.event.type.Event",
+ "deleteTag": "qx.event.type.Event"
},
members: {
__tag: null,
diff --git a/services/web/client/source/class/osparc/component/form/tag/TagManager.js b/services/web/client/source/class/osparc/component/form/tag/TagManager.js
index 7ab7cfb08fc..1885a8ed177 100644
--- a/services/web/client/source/class/osparc/component/form/tag/TagManager.js
+++ b/services/web/client/source/class/osparc/component/form/tag/TagManager.js
@@ -35,7 +35,7 @@ qx.Class.define("osparc.component.form.tag.TagManager", {
this.open();
},
events: {
- changeSelected: "qx.event.type.Data"
+ "changeSelected": "qx.event.type.Data"
},
properties: {
liveUpdate: {
diff --git a/services/web/client/source/class/osparc/component/metadata/StudyDetailsEditor.js b/services/web/client/source/class/osparc/component/metadata/StudyDetailsEditor.js
index 33e7de057dc..b32e43721dd 100644
--- a/services/web/client/source/class/osparc/component/metadata/StudyDetailsEditor.js
+++ b/services/web/client/source/class/osparc/component/metadata/StudyDetailsEditor.js
@@ -26,35 +26,32 @@ qx.Class.define("osparc.component.metadata.StudyDetailsEditor", {
extend: qx.ui.core.Widget,
/**
- * @param study {Object|osparc.data.model.Study} Study (metadata)
+ * @param studyData {Object} Object containing the serialized Study Data
* @param isTemplate {Boolean} Weather the study is template or not
* @param winWidth {Number} Width for the window, needed for stretching the thumbnail
*/
- construct: function(study, isTemplate, winWidth) {
+ construct: function(studyData, isTemplate, winWidth) {
this.base(arguments);
this._setLayout(new qx.ui.layout.Grow());
- this.__isTemplate = isTemplate;
- this.__selectedTags = study.tags;
- this.__model = qx.data.marshal.Json.createModel(study);
+ this.__studyModel = qx.data.marshal.Json.createModel(studyData);
+ this.__selectedTags = studyData.tags;
+ this.__workbench = studyData.workbench;
this.__stack = new qx.ui.container.Stack();
- this.__displayView = this.__createDisplayView(study, winWidth);
- this.__editView = this.__createEditView();
+ this.__displayView = this.__createDisplayView(studyData, isTemplate, winWidth);
+ this.__editView = this.__createEditView(isTemplate);
this.__stack.add(this.__displayView);
this.__stack.add(this.__editView);
this._add(this.__stack);
-
- // Workaround: qx serializer is not doing well with uuid as object keys.
- this.__workbench = study.workbench;
},
events: {
- updatedStudy: "qx.event.type.Data",
- updatedTemplate: "qx.event.type.Data",
- updateTags: "qx.event.type.Data",
- closed: "qx.event.type.Event",
- openedStudy: "qx.event.type.Event"
+ "updateStudy": "qx.event.type.Event",
+ "updateTemplate": "qx.event.type.Event",
+ "updateTags": "qx.event.type.Data",
+ "closed": "qx.event.type.Event",
+ "openStudy": "qx.event.type.Event"
},
properties: {
@@ -66,22 +63,48 @@ qx.Class.define("osparc.component.metadata.StudyDetailsEditor", {
}
},
+ statics: {
+ popUpInWindow: function(title, studyDetailsEditor, width = 400, height = 400) {
+ const win = new qx.ui.window.Window(title).set({
+ autoDestroy: true,
+ layout: new qx.ui.layout.VBox(),
+ appearance: "service-window",
+ showMinimize: false,
+ showMaximize: false,
+ resizable: true,
+ contentPadding: 10,
+ width: width,
+ height: height,
+ modal: true
+ });
+ win.add(studyDetailsEditor);
+ win.center();
+ win.open();
+ return win;
+ }
+ },
+
members: {
__stack: null,
- __workbench: null,
- __model: null,
- __isTemplate: null,
__fields: null,
+ __openButton: null,
+ __study: null,
+ __studyModel: null,
+ __workbench: null,
__selectedTags: null,
- __createDisplayView: function(study, winWidth) {
+ showOpenButton: function(show) {
+ this.__openButton.setVisibility(show ? "visible" : "excluded");
+ },
+
+ __createDisplayView: function(study, isTemplate, winWidth) {
const displayView = new qx.ui.container.Composite(new qx.ui.layout.VBox(10));
- displayView.add(this.__createButtons());
+ displayView.add(this.__createButtons(isTemplate));
displayView.add(new osparc.component.metadata.StudyDetails(study, winWidth));
return displayView;
},
- __createButtons: function() {
+ __createButtons: function(isTemplate) {
const isCurrentUserOwner = this.__isUserOwner();
const canCreateTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.create");
const canUpdateTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.update");
@@ -92,16 +115,16 @@ qx.Class.define("osparc.component.metadata.StudyDetailsEditor", {
marginTop: 10
});
- const openButton = new qx.ui.form.Button("Open").set({
- appearance: "lg-button"
+ const openButton = this.__openButton = new qx.ui.form.Button("Open").set({
+ appearance: "md-button"
});
osparc.utils.Utils.setIdToWidget(openButton, "openStudyBtn");
- openButton.addListener("execute", () => this.fireEvent("openedStudy"), this);
+ openButton.addListener("execute", () => this.fireEvent("openStudy"), this);
buttonsLayout.add(openButton);
const modeButton = new qx.ui.form.Button("Edit", "@FontAwesome5Solid/edit/16").set({
- appearance: "lg-button",
- visibility: isCurrentUserOwner && (!this.__isTemplate || canUpdateTemplate) ? "visible" : "excluded"
+ appearance: "md-button",
+ visibility: isCurrentUserOwner && (!isTemplate || canUpdateTemplate) ? "visible" : "excluded"
});
osparc.utils.Utils.setIdToWidget(modeButton, "editStudyBtn");
modeButton.addListener("execute", () => this.setMode("edit"), this);
@@ -111,24 +134,35 @@ qx.Class.define("osparc.component.metadata.StudyDetailsEditor", {
flex: 1
});
- if (isCurrentUserOwner && (!this.__isTemplate && canCreateTemplate)) {
- const saveAsTemplateButton = new qx.ui.form.Button(this.tr("Save as template")).set({
- appearance: "lg-button"
+ if (!isTemplate) {
+ const permissionsButton = new qx.ui.form.Button(this.tr("Permissions")).set({
+ appearance: "md-button"
});
- osparc.utils.Utils.setIdToWidget(saveAsTemplateButton, "saveAsTemplateBtn");
- saveAsTemplateButton.addListener("execute", e => {
- this.__openSaveAsTemplate();
+ osparc.utils.Utils.setIdToWidget(permissionsButton, "permissionsBtn");
+ permissionsButton.addListener("execute", e => {
+ this.__openPermissions();
}, this);
- buttonsLayout.add(saveAsTemplateButton);
+ buttonsLayout.add(permissionsButton);
+
+ if (isCurrentUserOwner && canCreateTemplate) {
+ const saveAsTemplateButton = new qx.ui.form.Button(this.tr("Save as Template")).set({
+ appearance: "md-button"
+ });
+ osparc.utils.Utils.setIdToWidget(saveAsTemplateButton, "saveAsTemplateBtn");
+ saveAsTemplateButton.addListener("execute", e => {
+ this.__openSaveAsTemplate();
+ }, this);
+ buttonsLayout.add(saveAsTemplateButton);
+ }
}
return buttonsLayout;
},
- __createEditView: function() {
+ __createEditView: function(isTemplate) {
const isCurrentUserOwner = this.__isUserOwner();
const canUpdateTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.update");
- const fieldIsEnabled = isCurrentUserOwner && (!this.__isTemplate || canUpdateTemplate);
+ const fieldIsEnabled = isCurrentUserOwner && (!isTemplate || canUpdateTemplate);
const editView = new qx.ui.container.Composite(new qx.ui.layout.VBox(8));
const buttons = new qx.ui.container.Composite(new qx.ui.layout.HBox(8).set({
@@ -136,36 +170,36 @@ qx.Class.define("osparc.component.metadata.StudyDetailsEditor", {
}));
this.__fields = {
- name: new qx.ui.form.TextField(this.__model.getName()).set({
+ name: new qx.ui.form.TextField(this.__studyModel.getName()).set({
font: "title-18",
height: 35,
enabled: fieldIsEnabled
}),
- description: new qx.ui.form.TextArea(this.__model.getDescription()).set({
+ description: new qx.ui.form.TextArea(this.__studyModel.getDescription()).set({
autoSize: true,
minHeight: 100,
maxHeight: 500,
enabled: fieldIsEnabled
}),
- thumbnail: new qx.ui.form.TextField(this.__model.getThumbnail()).set({
+ thumbnail: new qx.ui.form.TextField(this.__studyModel.getThumbnail()).set({
enabled: fieldIsEnabled
})
};
- const modeButton = new qx.ui.form.Button("Save", "@FontAwesome5Solid/save/16").set({
+ const saveButton = new qx.ui.form.Button(this.tr("Save"), "@FontAwesome5Solid/save/16").set({
appearance: "lg-button"
});
- osparc.utils.Utils.setIdToWidget(modeButton, "studyDetailsEditorSaveBtn");
- modeButton.addListener("execute", e => {
+ osparc.utils.Utils.setIdToWidget(saveButton, "studyDetailsEditorSaveBtn");
+ saveButton.addListener("execute", e => {
const btn = e.getTarget();
btn.setIcon("@FontAwesome5Solid/circle-notch/16");
btn.getChildControl("icon").getContentElement()
.addClass("rotate");
- this.__saveStudy(btn);
+ this.__saveStudy(isTemplate, btn);
}, this);
const cancelButton = new qx.ui.form.Button(this.tr("Cancel")).set({
appearance: "lg-button",
- enabled: isCurrentUserOwner && (!this.__isTemplate || canUpdateTemplate)
+ enabled: isCurrentUserOwner && (!isTemplate || canUpdateTemplate)
});
osparc.utils.Utils.setIdToWidget(cancelButton, "studyDetailsEditorCancelBtn");
cancelButton.addListener("execute", () => this.setMode("display"), this);
@@ -196,7 +230,7 @@ qx.Class.define("osparc.component.metadata.StudyDetailsEditor", {
editView.add(this.__tagsSection());
}
- buttons.add(modeButton);
+ buttons.add(saveButton);
buttons.add(cancelButton);
editView.add(buttons);
@@ -215,13 +249,13 @@ qx.Class.define("osparc.component.metadata.StudyDetailsEditor", {
appearance: "link-button"
});
editButton.addListener("execute", () => {
- const tagManager = new osparc.component.form.tag.TagManager(this.__selectedTags, editButton, "study", this.__model.getUuid());
+ const tagManager = new osparc.component.form.tag.TagManager(this.__selectedTags, editButton, "study", this.__studyModel.getUuid());
tagManager.addListener("changeSelected", evt => {
this.__selectedTags = evt.getData().selected;
}, this);
tagManager.addListener("close", () => {
this.__renderTags();
- this.fireDataEvent("updateTags", this.__model.getUuid());
+ this.fireDataEvent("updateTags", this.__studyModel.getUuid());
}, this);
});
header.add(editButton);
@@ -244,21 +278,21 @@ qx.Class.define("osparc.component.metadata.StudyDetailsEditor", {
return this.__tagsContainer;
},
- __saveStudy: function(btn) {
+ __saveStudy: function(isTemplate, btn) {
const params = {
url: {
- projectId: this.__model.getUuid()
+ projectId: this.__studyModel.getUuid()
},
data: this.__serializeForm()
};
- osparc.data.Resources.fetch(this.__isTemplate ? "templates" : "studies", "put", params)
+ osparc.data.Resources.fetch(isTemplate ? "templates" : "studies", "put", params)
.then(data => {
btn.resetIcon();
btn.getChildControl("icon").getContentElement()
.removeClass("rotate");
- this.__model.set(data);
+ this.__studyModel.set(data);
this.setMode("display");
- this.fireDataEvent(this.__isTemplate ? "updatedTemplate" : "updatedStudy", data);
+ this.fireEvent(isTemplate ? "updateTemplate" : "updateStudy");
})
.catch(err => {
btn.resetIcon();
@@ -269,28 +303,43 @@ qx.Class.define("osparc.component.metadata.StudyDetailsEditor", {
});
},
+ __openPermissions: function() {
+ const studyData = qx.util.Serializer.toNativeObject(this.__studyModel);
+ const permissionsView = new osparc.component.export.Permissions(studyData);
+ const window = permissionsView.createWindow();
+ permissionsView.addListener("updateStudy", e => {
+ this.fireEvent("updateStudy");
+ });
+ permissionsView.addListener("finished", e => {
+ if (e.getData()) {
+ window.close();
+ }
+ }, this);
+ window.open();
+ },
+
__openSaveAsTemplate: function() {
- const saveAsTemplateView = new osparc.component.export.SaveAsTemplate(this.__model.getUuid(), this.__serializeForm());
- const window = osparc.component.export.SaveAsTemplate.createSaveAsTemplateWindow(saveAsTemplateView);
+ const saveAsTemplateView = new osparc.component.export.SaveAsTemplate(this.__studyModel.getUuid(), this.__serializeForm());
+ const window = saveAsTemplateView.createWindow();
saveAsTemplateView.addListener("finished", e => {
const template = e.getData();
if (template) {
- this.fireDataEvent("updatedTemplate", template);
- this.__model.set(template);
+ this.__studyModel.set(template);
this.setMode("display");
-
+ this.fireEvent("updateTemplate");
window.close();
}
}, this);
-
window.open();
},
__serializeForm: function() {
- const data = {
- ...qx.util.Serializer.toNativeObject(this.__model),
+ let data = {};
+ data = {
+ ...qx.util.Serializer.toNativeObject(this.__studyModel),
workbench: this.__workbench
};
+
for (let key in this.__fields) {
data[key] = this.__fields[key].getValue();
}
@@ -323,8 +372,8 @@ qx.Class.define("osparc.component.metadata.StudyDetailsEditor", {
},
__isUserOwner: function() {
- if (this.__model) {
- return this.__model.getPrjOwner() === osparc.auth.Data.getInstance().getEmail();
+ if (this.__studyModel) {
+ return this.__studyModel.getPrjOwner() === osparc.auth.Data.getInstance().getEmail();
}
return false;
}
diff --git a/services/web/client/source/class/osparc/component/metadata/StudyDetailsWindow.js b/services/web/client/source/class/osparc/component/metadata/StudyDetailsWindow.js
deleted file mode 100644
index b2e6607a347..00000000000
--- a/services/web/client/source/class/osparc/component/metadata/StudyDetailsWindow.js
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * oSPARC - The SIMCORE frontend - https://osparc.io
- * Copyright: 2019 IT'IS Foundation - https://itis.swiss
- * License: MIT - https://opensource.org/licenses/MIT
- * Authors: Odei Maiz (odeimaiz)
- */
-
-/**
- * Window that contains the StudyDetails of the given study metadata.
- *
- * *Example*
- *
- * Here is a little example of how to use the widget.
- *
- *
- * const win = new osparc.component.metadata.StudyDetailsWindow(study);
- * win.center();
- * win.open();
- *
- */
-
-qx.Class.define("osparc.component.metadata.StudyDetailsWindow", {
- extend: qx.ui.window.Window,
-
- /**
- * @param study {Object|osparc.data.model.Study} Study (metadata)
- */
- construct: function(study) {
- this.base(arguments, this.tr("Study information") + " · " + study.getName());
-
- const windowWidth = 700;
- const windowHeight = 800;
- this.set({
- layout: new qx.ui.layout.Grow(),
- autoDestroy: true,
- contentPadding: 10,
- showMinimize: false,
- resizable: true,
- modal: true,
- width: windowWidth,
- height: windowHeight
- });
-
- const studyDetails = new osparc.component.metadata.StudyDetails(study, windowWidth);
- const scroll = new qx.ui.container.Scroll().set({
- height: windowHeight
- });
- scroll.add(studyDetails);
- this.add(scroll);
- },
-
- properties: {
- appearance: {
- refine: true,
- init: "info-service-window"
- }
- }
-});
diff --git a/services/web/client/source/class/osparc/component/metadata/StudyInfo.js b/services/web/client/source/class/osparc/component/metadata/StudyInfo.js
index 33bb1badcc5..2eadc7b766a 100644
--- a/services/web/client/source/class/osparc/component/metadata/StudyInfo.js
+++ b/services/web/client/source/class/osparc/component/metadata/StudyInfo.js
@@ -38,7 +38,7 @@ qx.Class.define("osparc.component.metadata.StudyInfo", {
this.__study = study;
- this._add(this.__createExpandButton());
+ this._add(this.__getMoreInfoMenuButton());
const windowWidth = 400;
this._add(new osparc.component.metadata.StudyDetails(study, windowWidth));
},
@@ -46,18 +46,31 @@ qx.Class.define("osparc.component.metadata.StudyInfo", {
members: {
__study: null,
- __createExpandButton: function() {
- const expandButton = new qx.ui.form.Button().set({
- label: this.tr("Show all"),
+ __getMoreInfoMenuButton: function() {
+ const moreInfoButton = new qx.ui.form.Button(this.tr("More Info")).set({
icon: "@FontAwesome5Solid/external-link-alt/16",
allowGrowX: false
});
- expandButton.addListener("execute", function() {
- const win = new osparc.component.metadata.StudyDetailsWindow(this.__study);
- win.center();
- win.open();
+
+ moreInfoButton.addListener("execute", function() {
+ this.__createStudyDetailsEditor();
}, this);
- return expandButton;
+ return moreInfoButton;
+ },
+
+ __createStudyDetailsEditor: function() {
+ const width = 500;
+ const height = 500;
+ const title = this.tr("Study Details Editor");
+ const studyDetails = new osparc.component.metadata.StudyDetailsEditor(this.__study.serializeStudy(), false, width);
+ studyDetails.showOpenButton(false);
+ const win = osparc.component.metadata.StudyDetailsEditor.popUpInWindow(title, studyDetails, width, height);
+ [
+ "updateStudy"
+ ].forEach(event => studyDetails.addListener(event, () => {
+ qx.event.message.Bus.getInstance().dispatchByName("updateStudy", this.__study.serializeStudy());
+ win.close();
+ }));
}
}
});
diff --git a/services/web/client/source/class/osparc/component/service/manager/ActivityTree.js b/services/web/client/source/class/osparc/component/service/manager/ActivityTree.js
index 69a0b2f1518..607052e6769 100644
--- a/services/web/client/source/class/osparc/component/service/manager/ActivityTree.js
+++ b/services/web/client/source/class/osparc/component/service/manager/ActivityTree.js
@@ -75,7 +75,7 @@ qx.Class.define("osparc.component.service.manager.ActivityTree", {
},
events: {
- treeUpdated: "qx.event.type.Event"
+ "treeUpdated": "qx.event.type.Event"
},
members: {
diff --git a/services/web/client/source/class/osparc/component/widget/InputsMapperTreeItem.js b/services/web/client/source/class/osparc/component/widget/InputsMapperTreeItem.js
index 4ec36fcb3bb..9827786b413 100644
--- a/services/web/client/source/class/osparc/component/widget/InputsMapperTreeItem.js
+++ b/services/web/client/source/class/osparc/component/widget/InputsMapperTreeItem.js
@@ -39,10 +39,6 @@
qx.Class.define("osparc.component.widget.InputsMapperTreeItem", {
extend: qx.ui.tree.VirtualTreeItem,
- construct: function() {
- this.base(arguments);
- },
-
properties: {
isDir: {
check: "Boolean",
diff --git a/services/web/client/source/class/osparc/component/widget/OrganizationListItem.js b/services/web/client/source/class/osparc/component/widget/OrganizationListItem.js
deleted file mode 100644
index 8896dafcb8b..00000000000
--- a/services/web/client/source/class/osparc/component/widget/OrganizationListItem.js
+++ /dev/null
@@ -1,96 +0,0 @@
-/* ************************************************************************
-
- osparc - the simcore frontend
-
- https://osparc.io
-
- Copyright:
- 2020 IT'IS Foundation, https://itis.swiss
-
- License:
- MIT: https://opensource.org/licenses/MIT
-
- Authors:
- * Odei Maiz (odeimaiz)
-
-************************************************************************ */
-
-/**
- *
- */
-
-qx.Class.define("osparc.component.widget.OrganizationListItem", {
- extend: qx.ui.core.Widget,
- implement: qx.ui.form.IModel,
- include: qx.ui.form.MModelProperty,
-
- construct: function() {
- this.base(arguments);
-
- const layout = new qx.ui.layout.HBox();
- this._setLayout(layout);
- },
-
- properties: {
- gid: {
- check: "String",
- nullable: false
- },
-
- label: {
- check: "String",
- apply: "_applyLabel",
- nullable: false
- },
-
- description: {
- check: "String",
- init: "",
- apply: "_applyDescription",
- nullable: true
- }
- },
-
- members: {
-
- _createChildControlImpl: function(id) {
- let control;
- switch (id) {
- case "label":
- control = new qx.ui.basic.Label().set({
- font: osparc.utils.Utils.getFont(14, true),
- alignY: "bottom"
- });
- this._add(control);
- break;
- case "description":
- control = new qx.ui.basic.Label().set({
- font: osparc.utils.Utils.getFont(12),
- alignY: "bottom"
- });
- this._add(control, {
- flex: 1
- });
- break;
- }
-
- return control || this.base(arguments, id);
- },
-
- _applyLabel: function(value) {
- if (value === null) {
- return;
- }
- const label = this.getChildControl("label");
- label.setValue(value);
- },
-
- _applyDescription: function(value) {
- if (value === null) {
- return;
- }
- const label = this.getChildControl("description");
- label.setValue(": " + value);
- }
- }
-});
diff --git a/services/web/client/source/class/osparc/component/widget/inputs/NodeOutputTreeItem.js b/services/web/client/source/class/osparc/component/widget/inputs/NodeOutputTreeItem.js
index 1e4f62fa096..4cb0e7819c2 100644
--- a/services/web/client/source/class/osparc/component/widget/inputs/NodeOutputTreeItem.js
+++ b/services/web/client/source/class/osparc/component/widget/inputs/NodeOutputTreeItem.js
@@ -44,10 +44,6 @@
qx.Class.define("osparc.component.widget.inputs.NodeOutputTreeItem", {
extend: qx.ui.tree.VirtualTreeItem,
- construct: function() {
- this.base(arguments);
- },
-
properties: {
isDir: {
check: "Boolean",
diff --git a/services/web/client/source/class/osparc/component/workbench/NodeUI.js b/services/web/client/source/class/osparc/component/workbench/NodeUI.js
index 4a047c0b726..40b5b10c18a 100644
--- a/services/web/client/source/class/osparc/component/workbench/NodeUI.js
+++ b/services/web/client/source/class/osparc/component/workbench/NodeUI.js
@@ -64,6 +64,9 @@ qx.Class.define("osparc.component.workbench.NodeUI", {
this.__createNodeLayout();
this.subscribeToFilterGroup("workbench");
+
+ this.getChildControl("captionbar").setCursor("move");
+ this.getChildControl("title").setCursor("move");
},
properties: {
@@ -111,10 +114,6 @@ qx.Class.define("osparc.component.workbench.NodeUI", {
return this.getNode().getNodeId();
},
- getCaptionBar: function() {
- return this.getChildControl("captionbar");
- },
-
_createChildControlImpl: function(id) {
let control;
switch (id) {
@@ -241,6 +240,7 @@ qx.Class.define("osparc.component.workbench.NodeUI", {
paddingLeft: 5,
paddingRight: 5
});
+ uiPort.setCursor("pointer");
return uiPort;
},
diff --git a/services/web/client/source/class/osparc/component/workbench/WorkbenchUI.js b/services/web/client/source/class/osparc/component/workbench/WorkbenchUI.js
index d7b1816e3b2..4a54b281d4f 100644
--- a/services/web/client/source/class/osparc/component/workbench/WorkbenchUI.js
+++ b/services/web/client/source/class/osparc/component/workbench/WorkbenchUI.js
@@ -870,14 +870,17 @@ qx.Class.define("osparc.component.workbench.WorkbenchUI", {
qx.event.message.Bus.getInstance().dispatchByName("maximizeIframe", false);
+ this.addListener("resize", () => this.__updateAllEdges(), this);
+ });
+
+ this.addListenerOnce("appear", () => {
const domEl = this.getContentElement().getDomElement();
domEl.addEventListener("dragenter", this.__dragEnter.bind(this), false);
domEl.addEventListener("dragover", this.__dragOver.bind(this), false);
domEl.addEventListener("dragleave", this.__dragLeave.bind(this), false);
domEl.addEventListener("drop", this.__drop.bind(this), false);
-
- this.addListener("resize", () => this.__updateAllEdges(), this);
});
+
this.addListener("disappear", () => {
// Reset filters
osparc.component.filter.UIFilterController.getInstance().resetGroup("workbench");
@@ -905,12 +908,9 @@ qx.Class.define("osparc.component.workbench.WorkbenchUI", {
return (pointerEvent.target instanceof SVGElement);
},
- __allowDrop: function(pointerEvent) {
+ __allowDropFile: function(pointerEvent) {
const files = pointerEvent.dataTransfer.files;
- if (files.length === 1) {
- return files[0].type !== "";
- }
- return false;
+ return files.length === 1;
},
__dragEnter: function(e) {
@@ -928,7 +928,7 @@ qx.Class.define("osparc.component.workbench.WorkbenchUI", {
__drop: function(e) {
this.__dragging(e, false);
- if (this.__allowDrop(e)) {
+ if (this.__allowDropFile(e)) {
const pos = {
x: e.offsetX,
y: e.offsetY
diff --git a/services/web/client/source/class/osparc/dashboard/CollaboratorListItem.js b/services/web/client/source/class/osparc/dashboard/CollaboratorListItem.js
new file mode 100644
index 00000000000..9e6d7f15c58
--- /dev/null
+++ b/services/web/client/source/class/osparc/dashboard/CollaboratorListItem.js
@@ -0,0 +1,134 @@
+/* ************************************************************************
+
+ osparc - the simcore frontend
+
+ https://osparc.io
+
+ Copyright:
+ 2020 IT'IS Foundation, https://itis.swiss
+
+ License:
+ MIT: https://opensource.org/licenses/MIT
+
+ Authors:
+ * Odei Maiz (odeimaiz)
+
+************************************************************************ */
+
+qx.Class.define("osparc.dashboard.CollaboratorListItem", {
+ extend: osparc.dashboard.ServiceBrowserListItem,
+
+ construct: function() {
+ this.base(arguments);
+ },
+
+ properties: {
+ isOrganization: {
+ check: "Boolean",
+ event: "changeIsOrganization",
+ nullable: true
+ },
+
+ accessRights: {
+ check: "Object",
+ apply: "_applyAccessRights",
+ event: "changeAccessRights",
+ nullable: true
+ },
+
+ showOptions: {
+ check: "Boolean",
+ apply: "_applyShowOptions",
+ event: "changeShowOptions",
+ nullable: true
+ }
+ },
+
+ events: {
+ "promoteCollaborator": "qx.event.type.Data",
+ "removeCollaborator": "qx.event.type.Data"
+ },
+
+ members: {
+ _createChildControlImpl: function(id) {
+ let control;
+ switch (id) {
+ case "options": {
+ const iconSize = 25;
+ control = new qx.ui.form.MenuButton().set({
+ maxWidth: iconSize,
+ maxHeight: iconSize,
+ alignX: "center",
+ alignY: "middle",
+ icon: "@FontAwesome5Solid/ellipsis-v/"+(iconSize-11),
+ focusable: false
+ });
+ osparc.utils.Utils.setIdToWidget(control, "studyItemMenuButton");
+ this._add(control, {
+ row: 0,
+ column: 3,
+ rowSpan: 2
+ });
+ break;
+ }
+ }
+
+ return control || this.base(arguments, id);
+ },
+
+ _applyAccessRights: function(value) {
+ if (value === null) {
+ return;
+ }
+ const subtitle = this.getChildControl("contact");
+ if (value.getDelete()) {
+ subtitle.setValue(this.tr("Owner"));
+ } else if (value.getWrite()) {
+ subtitle.setValue(this.tr("Collaborator"));
+ } else {
+ subtitle.setValue(this.tr("Viewer"));
+ }
+ },
+
+ _applyShowOptions: function(value) {
+ const optionsMenu = this.getChildControl("options");
+ optionsMenu.setVisibility(value ? "visible" : "excluded");
+ if (value) {
+ const menu = this.__getOptionsMenu();
+ optionsMenu.setMenu(menu);
+ optionsMenu.setVisibility(menu.getChildren().length ? "visible" : "excluded");
+ }
+ },
+
+ __getOptionsMenu: function() {
+ const menu = new qx.ui.menu.Menu().set({
+ position: "bottom-right"
+ });
+
+ const accessRights = this.getAccessRights();
+ if (!accessRights.getDelete() && !this.getIsOrganization()) {
+ const makeOwnerButton = new qx.ui.menu.Button(this.tr("Make Owner"));
+ makeOwnerButton.addListener("execute", () => {
+ this.fireDataEvent("promoteCollaborator", {
+ gid: this.getKey(),
+ name: this.getTitle()
+ });
+ });
+ menu.add(makeOwnerButton);
+ }
+
+ if (!accessRights.getDelete()) {
+ const removeButton = new qx.ui.menu.Button(this.tr("Remove Collaborator"));
+ removeButton.addListener("execute", () => {
+ this.fireDataEvent("removeCollaborator", {
+ gid: this.getKey(),
+ name: this.getTitle()
+ });
+ });
+ menu.add(removeButton);
+ }
+
+ return menu;
+ }
+ }
+});
diff --git a/services/web/client/source/class/osparc/dashboard/OrgMemberListItem.js b/services/web/client/source/class/osparc/dashboard/OrgMemberListItem.js
new file mode 100644
index 00000000000..24f7ca06b66
--- /dev/null
+++ b/services/web/client/source/class/osparc/dashboard/OrgMemberListItem.js
@@ -0,0 +1,125 @@
+/* ************************************************************************
+
+ osparc - the simcore frontend
+
+ https://osparc.io
+
+ Copyright:
+ 2020 IT'IS Foundation, https://itis.swiss
+
+ License:
+ MIT: https://opensource.org/licenses/MIT
+
+ Authors:
+ * Odei Maiz (odeimaiz)
+
+************************************************************************ */
+
+qx.Class.define("osparc.dashboard.OrgMemberListItem", {
+ extend: osparc.dashboard.ServiceBrowserListItem,
+
+ construct: function() {
+ this.base(arguments);
+ },
+
+ properties: {
+ accessRights: {
+ check: "Object",
+ apply: "_applyAccessRights",
+ event: "changeAccessRights",
+ nullable: true
+ },
+
+ showOptions: {
+ check: "Boolean",
+ apply: "_applyShowOptions",
+ event: "changeShowOptions",
+ nullable: true
+ }
+ },
+
+ events: {
+ "promoteOrgMember": "qx.event.type.Data",
+ "removeOrgMember": "qx.event.type.Data"
+ },
+
+ members: {
+ _createChildControlImpl: function(id) {
+ let control;
+ switch (id) {
+ case "options": {
+ const iconSize = 25;
+ control = new qx.ui.form.MenuButton().set({
+ maxWidth: iconSize,
+ maxHeight: iconSize,
+ alignX: "center",
+ alignY: "middle",
+ icon: "@FontAwesome5Solid/ellipsis-v/"+(iconSize-11),
+ focusable: false
+ });
+ osparc.utils.Utils.setIdToWidget(control, "studyItemMenuButton");
+ this._add(control, {
+ row: 0,
+ column: 3,
+ rowSpan: 2
+ });
+ break;
+ }
+ }
+
+ return control || this.base(arguments, id);
+ },
+
+ _applyAccessRights: function(value) {
+ if (value === null) {
+ return;
+ }
+ const subtitle = this.getChildControl("contact");
+ if (value.getDelete()) {
+ subtitle.setValue(this.tr("Administrator"));
+ } else if (value.getWrite()) {
+ subtitle.setValue(this.tr("Manager"));
+ } else {
+ subtitle.setValue(this.tr("Member"));
+ }
+ },
+
+ _applyShowOptions: function(value) {
+ const optionsMenu = this.getChildControl("options");
+ optionsMenu.setVisibility(value ? "visible" : "excluded");
+ if (value) {
+ const menu = this.__getOptionsMenu();
+ optionsMenu.setMenu(menu);
+ }
+ },
+
+ __getOptionsMenu: function() {
+ const menu = new qx.ui.menu.Menu().set({
+ position: "bottom-right"
+ });
+
+ const accessRights = this.getAccessRights();
+ if (accessRights && !accessRights.getDelete() && !accessRights.getWrite()) {
+ const promoteButton = new qx.ui.menu.Button(this.tr("Promote to Manager"));
+ promoteButton.addListener("execute", () => {
+ this.fireDataEvent("promoteOrgMember", {
+ key: this.getKey(),
+ name: this.getTitle()
+ });
+ });
+ menu.add(promoteButton);
+ }
+
+ const removeButton = new qx.ui.menu.Button(this.tr("Remove Member"));
+ removeButton.addListener("execute", () => {
+ this.fireDataEvent("removeOrgMember", {
+ key: this.getKey(),
+ name: this.getTitle()
+ });
+ });
+ menu.add(removeButton);
+
+ return menu;
+ }
+ }
+});
diff --git a/services/web/client/source/class/osparc/dashboard/OrganizationEditor.js b/services/web/client/source/class/osparc/dashboard/OrganizationEditor.js
new file mode 100644
index 00000000000..0a2b67be035
--- /dev/null
+++ b/services/web/client/source/class/osparc/dashboard/OrganizationEditor.js
@@ -0,0 +1,173 @@
+/* ************************************************************************
+
+ osparc - the simcore frontend
+
+ https://osparc.io
+
+ Copyright:
+ 2020 IT'IS Foundation, https://itis.swiss
+
+ License:
+ MIT: https://opensource.org/licenses/MIT
+
+ Authors:
+ * Odei Maiz (odeimaiz)
+
+************************************************************************ */
+
+qx.Class.define("osparc.dashboard.OrganizationEditor", {
+ extend: qx.ui.core.Widget,
+
+ construct: function(newOrg = true) {
+ this.base(arguments);
+
+ this._setLayout(new qx.ui.layout.VBox(8));
+
+ const manager = this.__validator = new qx.ui.form.validation.Manager();
+ const title = this.getChildControl("title");
+ title.setRequired(true);
+ manager.add(title);
+ this.getChildControl("description");
+ this.getChildControl("thumbnail");
+ newOrg ? this.getChildControl("create") : this.getChildControl("save");
+ },
+
+ statics: {
+ popUpInWindow: function(title, organizationEditor, width = 300, height = 200) {
+ const win = new qx.ui.window.Window(title).set({
+ autoDestroy: true,
+ layout: new qx.ui.layout.VBox(),
+ appearance: "service-window",
+ showMinimize: false,
+ showMaximize: false,
+ resizable: true,
+ contentPadding: 10,
+ width: width,
+ height: height,
+ modal: true
+ });
+ win.add(organizationEditor);
+ win.center();
+ win.open();
+ organizationEditor.addListener("cancel", () => {
+ win.close();
+ });
+ return win;
+ }
+ },
+
+ properties: {
+ gid: {
+ check: "Number",
+ init: 0,
+ nullable: false,
+ event: "changeGid"
+ },
+
+ label: {
+ check: "String",
+ init: "",
+ nullable: false,
+ event: "changeLabel"
+ },
+
+ description: {
+ check: "String",
+ init: "",
+ nullable: false,
+ event: "changeDescription"
+ },
+
+ thumbnail: {
+ check: "String",
+ init: "",
+ nullable: false,
+ event: "changeThumbnail"
+ }
+ },
+
+ events: {
+ "createOrg": "qx.event.type.Event",
+ "updateOrg": "qx.event.type.Event",
+ "cancel": "qx.event.type.Event"
+ },
+
+ members: {
+ _createChildControlImpl: function(id) {
+ let control;
+ switch (id) {
+ case "title": {
+ control = new qx.ui.form.TextField().set({
+ font: "title-14",
+ placeholder: this.tr("Title"),
+ height: 35
+ });
+ this.bind("label", control, "value");
+ control.bind("value", this, "label");
+ this._add(control);
+ break;
+ }
+ case "description": {
+ control = new qx.ui.form.TextArea().set({
+ font: "text-14",
+ placeholder: this.tr("Description"),
+ autoSize: true,
+ minHeight: 70,
+ maxHeight: 140
+ });
+ this.bind("description", control, "value");
+ control.bind("value", this, "description");
+ this._add(control);
+ break;
+ }
+ case "thumbnail": {
+ control = new qx.ui.form.TextField().set({
+ font: "text-14",
+ placeholder: this.tr("Thumbnail"),
+ height: 35
+ });
+ this.bind("thumbnail", control, "value");
+ control.bind("value", this, "thumbnail");
+ this._add(control);
+ break;
+ }
+ case "create": {
+ const buttons = this.getChildControl("buttonsLayout");
+ control = new osparc.ui.form.FetchButton(this.tr("Create"));
+ control.addListener("execute", () => {
+ if (this.__validator.validate()) {
+ control.setFetching(true);
+ this.fireEvent("createOrg");
+ }
+ }, this);
+ buttons.addAt(control, 0);
+ break;
+ }
+ case "save": {
+ const buttons = this.getChildControl("buttonsLayout");
+ control = new osparc.ui.form.FetchButton(this.tr("Save"));
+ control.addListener("execute", () => {
+ if (this.__validator.validate()) {
+ control.setFetching(true);
+ this.fireEvent("updateOrg");
+ }
+ }, this);
+ buttons.addAt(control, 0);
+ break;
+ }
+ case "buttonsLayout": {
+ control = new qx.ui.container.Composite(new qx.ui.layout.HBox(8).set({
+ alignX: "right"
+ }));
+ const cancelButton = new qx.ui.form.Button(this.tr("Cancel"));
+ cancelButton.addListener("execute", () => this.fireEvent("cancel"), this);
+ control.add(cancelButton);
+ this._add(control);
+ break;
+ }
+ }
+
+ return control || this.base(arguments, id);
+ }
+ }
+});
diff --git a/services/web/client/source/class/osparc/dashboard/OrganizationListItem.js b/services/web/client/source/class/osparc/dashboard/OrganizationListItem.js
new file mode 100644
index 00000000000..c7a83ae6032
--- /dev/null
+++ b/services/web/client/source/class/osparc/dashboard/OrganizationListItem.js
@@ -0,0 +1,100 @@
+/* ************************************************************************
+
+ osparc - the simcore frontend
+
+ https://osparc.io
+
+ Copyright:
+ 2020 IT'IS Foundation, https://itis.swiss
+
+ License:
+ MIT: https://opensource.org/licenses/MIT
+
+ Authors:
+ * Odei Maiz (odeimaiz)
+
+************************************************************************ */
+
+qx.Class.define("osparc.dashboard.OrganizationListItem", {
+ extend: osparc.dashboard.ServiceBrowserListItem,
+
+ construct: function() {
+ this.base(arguments);
+ },
+
+ properties: {
+ accessRights: {
+ check: "Object",
+ nullable: false,
+ apply: "_applyAccessRights",
+ event: "changeAcessRights"
+ }
+ },
+
+ events: {
+ "openEditOrganization": "qx.event.type.Data"
+ },
+
+ members: {
+ _createChildControlImpl: function(id) {
+ let control;
+ switch (id) {
+ case "options": {
+ const iconSize = 25;
+ control = new qx.ui.form.MenuButton().set({
+ maxWidth: iconSize,
+ maxHeight: iconSize,
+ alignX: "center",
+ alignY: "middle",
+ icon: "@FontAwesome5Solid/ellipsis-v/"+(iconSize-11),
+ focusable: false
+ });
+ osparc.utils.Utils.setIdToWidget(control, "studyItemMenuButton");
+ this._add(control, {
+ row: 0,
+ column: 3,
+ rowSpan: 2
+ });
+ break;
+ }
+ }
+
+ return control || this.base(arguments, id);
+ },
+
+ _applyAccessRights: function(value) {
+ if (value === null) {
+ return;
+ }
+ if (value.getDelete()) {
+ const optionsMenu = this.getChildControl("options");
+ const menu = this.__getOptionsMenu();
+ optionsMenu.setMenu(menu);
+ }
+ },
+
+ __getOptionsMenu: function() {
+ const menu = new qx.ui.menu.Menu().set({
+ position: "bottom-right"
+ });
+
+ const editOrgButton = new qx.ui.menu.Button(this.tr("Edit details"));
+ editOrgButton.addListener("execute", () => {
+ this.fireDataEvent("openEditOrganization", this.getKey());
+ });
+ menu.add(editOrgButton);
+
+ return menu;
+ },
+
+ // overriden
+ _applyThumbnail: function(value) {
+ const thumbnail = this.getChildControl("thumbnail");
+ if (value) {
+ thumbnail.setSource(value);
+ } else {
+ thumbnail.setSource("@FontAwesome5Solid/users/24");
+ }
+ }
+ }
+});
diff --git a/services/web/client/source/class/osparc/dashboard/ServiceBrowser.js b/services/web/client/source/class/osparc/dashboard/ServiceBrowser.js
index 49b6891bca1..0bbf49cdce2 100644
--- a/services/web/client/source/class/osparc/dashboard/ServiceBrowser.js
+++ b/services/web/client/source/class/osparc/dashboard/ServiceBrowser.js
@@ -160,7 +160,7 @@ qx.Class.define("osparc.dashboard.ServiceBrowser", {
ctrl.bindProperty("key", "key", null, item, id);
ctrl.bindProperty("version", "version", null, item, id);
ctrl.bindProperty("name", "title", null, item, id);
- ctrl.bindProperty("description", "description", null, item, id);
+ ctrl.bindProperty("description", "subtitle", null, item, id);
ctrl.bindProperty("type", "type", null, item, id);
ctrl.bindProperty("category", "category", null, item, id);
ctrl.bindProperty("contact", "contact", null, item, id);
diff --git a/services/web/client/source/class/osparc/dashboard/ServiceBrowserListItem.js b/services/web/client/source/class/osparc/dashboard/ServiceBrowserListItem.js
index 447d7cf5dbe..53045e26795 100644
--- a/services/web/client/source/class/osparc/dashboard/ServiceBrowserListItem.js
+++ b/services/web/client/source/class/osparc/dashboard/ServiceBrowserListItem.js
@@ -32,7 +32,7 @@
* bindItem: (c, item, id) => {
* c.bindProperty("key", "model", null, item, id);
* c.bindProperty("name", "title", null, item, id);
- * c.bindProperty("description", "description", null, item, id);
+ * c.bindProperty("description", "subtitle", null, item, id);
* c.bindProperty("type", "type", null, item, id);
* c.bindProperty("category", "category", null, item, id);
* c.bindProperty("contact", "contact", null, item, id);
@@ -49,8 +49,8 @@ qx.Class.define("osparc.dashboard.ServiceBrowserListItem", {
construct: function() {
this.base(arguments);
- const layout = new qx.ui.layout.Grid(0, 5);
- layout.setColumnFlex(0, 1);
+ const layout = new qx.ui.layout.Grid(8, 5);
+ layout.setColumnFlex(1, 1);
this._setLayout(layout);
this.setPadding(5);
@@ -83,15 +83,21 @@ qx.Class.define("osparc.dashboard.ServiceBrowserListItem", {
nullable : true
},
+ thumbnail: {
+ check : "String",
+ apply : "_applyThumbnail",
+ nullable : true
+ },
+
title: {
check : "String",
apply : "_applyTitle",
nullable : true
},
- description: {
+ subtitle: {
check : "String",
- apply : "_applyDescription",
+ apply : "_applySubtitle",
nullable : true
},
@@ -139,23 +145,39 @@ qx.Class.define("osparc.dashboard.ServiceBrowserListItem", {
_createChildControlImpl: function(id) {
let control;
switch (id) {
+ case "thumbnail":
+ control = new qx.ui.basic.Image().set({
+ scale: true,
+ allowGrowX: true,
+ allowGrowY: true,
+ allowShrinkX: true,
+ allowShrinkY: true,
+ maxWidth: 32,
+ maxHeight: 32
+ });
+ this._add(control, {
+ row: 0,
+ column: 0,
+ rowSpan: 2
+ });
+ break;
case "title":
control = new qx.ui.basic.Label().set({
font: "title-14"
});
this._add(control, {
row: 0,
- column: 0
+ column: 1
});
break;
- case "description":
+ case "subtitle":
control = new osparc.ui.markdown.Markdown().set({
font: "text-13",
maxHeight: 16
});
this._add(control, {
row: 1,
- column: 0
+ column: 1
});
break;
case "contact":
@@ -164,7 +186,7 @@ qx.Class.define("osparc.dashboard.ServiceBrowserListItem", {
});
this._add(control, {
row: 1,
- column: 1
+ column: 2
});
break;
}
@@ -181,6 +203,14 @@ qx.Class.define("osparc.dashboard.ServiceBrowserListItem", {
osparc.utils.Utils.setIdToWidget(this, "serviceBrowserListItem_"+id);
},
+ _applyThumbnail: function(value) {
+ if (value === null) {
+ return;
+ }
+ const thumbnail = this.getChildControl("thumbnail");
+ thumbnail.setSource(value);
+ },
+
_applyTitle: function(value) {
if (value === null) {
return;
@@ -189,11 +219,11 @@ qx.Class.define("osparc.dashboard.ServiceBrowserListItem", {
label.setValue(value);
},
- _applyDescription: function(value) {
+ _applySubtitle: function(value) {
if (value === null) {
return;
}
- const label = this.getChildControl("description");
+ const label = this.getChildControl("subtitle");
label.setValue(value);
},
diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js
index a004254ffc9..f07bc02e376 100644
--- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js
+++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js
@@ -91,6 +91,21 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
this.__itemSelected(null);
},
+ __reloadUserStudy: function(studyId) {
+ const params = {
+ url: {
+ projectId: studyId
+ }
+ };
+ osparc.data.Resources.getOne("studies", params)
+ .then(studyData => {
+ this.__resetStudyItem(studyData);
+ })
+ .catch(err => {
+ console.error(err);
+ });
+ },
+
/**
* Function that asks the backend for the list of studies belonging to the user
* and sets it
@@ -99,14 +114,14 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
if (osparc.data.Permissions.getInstance().canDo("studies.user.read")) {
osparc.data.Resources.get("studies")
.then(studies => {
- this.__setStudyList(studies);
+ this.__resetStudyList(studies);
this.__itemSelected(null);
})
.catch(err => {
console.error(err);
});
} else {
- this.__setStudyList([]);
+ this.__resetStudyList([]);
}
},
@@ -117,14 +132,14 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
if (osparc.data.Permissions.getInstance().canDo("studies.templates.read")) {
osparc.data.Resources.get("templates")
.then(templates => {
- this.__setTemplateList(templates);
+ this.__resetTemplateList(templates);
this.__itemSelected(null);
})
.catch(err => {
console.error(err);
});
} else {
- this.__setTemplateList([]);
+ this.__resetTemplateList([]);
}
},
@@ -308,7 +323,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
commandEsc.addListener("execute", e => {
this.__itemSelected(null);
});
- osparc.store.Store.getInstance().addListener("changeTags", () => this.__setStudyList(osparc.store.Store.getInstance().getStudies()), this);
+ osparc.store.Store.getInstance().addListener("changeTags", () => this.__resetStudyList(osparc.store.Store.getInstance().getStudies()), this);
},
__createStudyBtnClkd: function(templateData) {
@@ -328,7 +343,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
},
__createStudy: function(minStudyData, templateId) {
- this.__showLoadingPage(this.tr("Creating Study"));
+ this.__showLoadingPage(this.tr("Creating ") + (minStudyData.name || this.tr("Study")));
if (templateId) {
const params = {
url: {
@@ -358,7 +373,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
},
__startStudy: function(studyData) {
- this.__showLoadingPage(this.tr("Starting Study"));
+ this.__showLoadingPage(this.tr("Starting ") + (studyData.name || this.tr("Study")));
osparc.store.Store.getInstance().getServicesDAGs(false)
.then(() => {
this.__hideLoadingPage();
@@ -368,7 +383,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
__loadStudy: function(studyData) {
const study = new osparc.data.model.Study(studyData);
- study.setAccessRights({});
this.__studyEditor = this.__studyEditor || new osparc.desktop.StudyEditor();
this.__studyEditor.setStudy(study);
this.fireDataEvent("startStudy", this.__studyEditor);
@@ -392,7 +406,16 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
return tempList;
},
- __setStudyList: function(userStudyList) {
+ __resetStudyItem: function(studyData) {
+ const userStudyList = this.__userStudies;
+ const index = userStudyList.findIndex(userStudy => userStudy["uuid"] === studyData["uuid"]);
+ if (index !== -1) {
+ this.__userStudies[index] = studyData;
+ this.__resetStudyList(userStudyList);
+ }
+ },
+
+ __resetStudyList: function(userStudyList) {
this.__userStudies = userStudyList;
this.__userStudyContainer.removeAll();
this.self().sortStudyList(userStudyList);
@@ -402,7 +425,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
osparc.component.filter.UIFilterController.dispatch("studyBrowser");
},
- __setTemplateList: function(tempStudyList) {
+ __resetTemplateList: function(tempStudyList) {
this.__templateStudies = tempStudyList;
this.__templateStudyContainer.removeAll();
this.__templateStudyContainer.add(this.__createNewStudyButton());
@@ -467,11 +490,16 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
const moreInfoButton = this.__getMoreInfoMenuButton(studyData, isTemplate);
menu.add(moreInfoButton);
- const isCurrentUserOwner = this.__isUserOwner(studyData);
- const canCreateTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.create");
- if (isCurrentUserOwner && !isTemplate && canCreateTemplate) {
- const saveAsTemplateButton = this.__getSaveAsTemplateMenuButton(studyData);
- menu.add(saveAsTemplateButton);
+ if (!isTemplate) {
+ const shareStudyButton = this.__getPermissionsMenuButton(studyData);
+ menu.add(shareStudyButton);
+
+ const isCurrentUserOwner = this.__isUserOwner(studyData);
+ const canCreateTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.create");
+ if (isCurrentUserOwner && canCreateTemplate) {
+ const saveAsTemplateButton = this.__getSaveAsTemplateMenuButton(studyData);
+ menu.add(saveAsTemplateButton);
+ }
}
const deleteButton = this.__getDeleteStudyMenuButton(studyData, isTemplate);
@@ -495,41 +523,35 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
__getMoreInfoMenuButton: function(studyData, isTemplate) {
const moreInfoButton = new qx.ui.menu.Button(this.tr("More Info"));
moreInfoButton.addListener("execute", () => {
- const winWidth = 400;
- const studyDetailsEditor = this.__createStudyDetailsEditor(studyData, isTemplate, winWidth);
- const win = new qx.ui.window.Window(this.tr("Study Details Editor")).set({
- autoDestroy: true,
- layout: new qx.ui.layout.VBox(),
- appearance: "service-window",
- showMinimize: false,
- showMaximize: false,
- resizable: true,
- contentPadding: 10,
- width: winWidth,
- height: 400,
- modal: true
- });
- [
- "updatedStudy",
- "updatedTemplate",
- "openedStudy"
- ].forEach(event => {
- studyDetailsEditor.addListener(event, () => {
- win.close();
- });
- });
- win.add(studyDetailsEditor);
- win.open();
- win.center();
+ this.__createStudyDetailsEditor(studyData, isTemplate);
}, this);
return moreInfoButton;
},
+ __getPermissionsMenuButton: function(studyData) {
+ const permissionsButton = new qx.ui.menu.Button(this.tr("Permissions"));
+ permissionsButton.addListener("execute", () => {
+ const permissionsView = new osparc.component.export.Permissions(studyData);
+ permissionsView.addListener("updateStudy", e => {
+ const studyId = e.getData();
+ this.__reloadUserStudy(studyId);
+ }, this);
+ const window = permissionsView.createWindow();
+ permissionsView.addListener("finished", e => {
+ if (e.getData()) {
+ window.close();
+ }
+ }, this);
+ window.open();
+ }, this);
+ return permissionsButton;
+ },
+
__getSaveAsTemplateMenuButton: function(studyData) {
- const saveAsTemplateButton = new qx.ui.menu.Button(this.tr("Save as template"));
+ const saveAsTemplateButton = new qx.ui.menu.Button(this.tr("Save as Template"));
saveAsTemplateButton.addListener("execute", () => {
const saveAsTemplateView = new osparc.component.export.SaveAsTemplate(studyData.uuid, studyData);
- const window = osparc.component.export.SaveAsTemplate.createSaveAsTemplateWindow(saveAsTemplateView);
+ const window = saveAsTemplateView.createWindow();
saveAsTemplateView.addListener("finished", e => {
const template = e.getData();
if (template) {
@@ -544,7 +566,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
__getDeleteStudyMenuButton: function(studyData, isTemplate) {
const isCurrentUserOwner = this.__isUserOwner(studyData);
- if (!isCurrentUserOwner) {
+ if (isTemplate && !isCurrentUserOwner) {
return null;
}
@@ -556,7 +578,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
win.open();
win.addListener("close", () => {
if (win.getConfirmed()) {
- this.__deleteStudies([studyData], isTemplate);
+ this.__deleteStudy(studyData, isTemplate);
}
}, this);
}, this);
@@ -615,10 +637,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
__createStudyDetailsEditor: function(studyData, isTemplate, winWidth) {
const studyDetails = new osparc.component.metadata.StudyDetailsEditor(studyData, isTemplate, winWidth);
- studyDetails.addListener("closed", () => this.__itemSelected(null), this);
- studyDetails.addListener("updatedStudy", () => this.reloadUserStudies(), this);
- studyDetails.addListener("updatedTemplate", () => this.reloadTemplateStudies(), this);
- studyDetails.addListener("openedStudy", () => {
+ studyDetails.addListener("updateStudy", () => this.reloadUserStudies(), this);
+ studyDetails.addListener("updateTemplate", () => this.reloadTemplateStudies(), this);
+ studyDetails.addListener("openStudy", () => {
if (isTemplate) {
this.__createStudyBtnClkd(studyData);
} else {
@@ -627,13 +648,20 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
}, this);
studyDetails.addListener("updateTags", () => {
if (isTemplate) {
- this.__setTemplateList(osparc.store.Store.getInstance().getTemplates());
+ this.__resetTemplateList(osparc.store.Store.getInstance().getTemplates());
} else {
- this.__setStudyList(osparc.store.Store.getInstance().getStudies());
+ this.__resetStudyList(osparc.store.Store.getInstance().getStudies());
}
});
- return studyDetails;
+ const height = 400;
+ const title = this.tr("Study Details Editor");
+ const win = osparc.component.metadata.StudyDetailsEditor.popUpInWindow(title, studyDetails, winWidth, height);
+ [
+ "updateStudy",
+ "updateTemplate",
+ "openStudy"
+ ].forEach(event => studyDetails.addListener(event, () => win.close()));
},
__updateDeleteStudiesButton: function(studiesDeleteButton) {
@@ -667,20 +695,39 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
}
},
+ __deleteStudy: function(studyData, isTemplate = false) {
+ const myGid = osparc.auth.Data.getInstance().getGroupId();
+ const collabGids = Object.keys(studyData["accessRights"]);
+ const amICollaborator = collabGids.indexOf(myGid) > -1;
+
+ const params = {
+ url: {
+ projectId: studyData.uuid
+ }
+ };
+ let operationPromise = null;
+ if (collabGids.length > 1 && amICollaborator) {
+ // remove collaborator
+ const permissions = osparc.component.export.Permissions;
+ permissions.removeCollaborator(studyData, myGid);
+ params["data"] = studyData;
+ operationPromise = osparc.data.Resources.fetch(isTemplate ? "templates" : "studies", "put", params);
+ } else {
+ // delete study
+ operationPromise = osparc.data.Resources.fetch(isTemplate ? "templates" : "studies", "delete", params, studyData.uuid);
+ }
+ operationPromise
+ .then(() => this.__removeFromStudyList(studyData.uuid, isTemplate))
+ .catch(err => {
+ console.error(err);
+ osparc.component.message.FlashMessenger.getInstance().logAs(err, "ERROR");
+ })
+ .finally(this.__itemSelected(null));
+ },
+
__deleteStudies: function(studiesData, areTemplates = false) {
studiesData.forEach(studyData => {
- const params = {
- url: {
- projectId: studyData.uuid
- }
- };
- osparc.data.Resources.fetch(areTemplates ? "templates" : "studies", "delete", params, studyData.uuid)
- .then(() => this.__removeFromStudyList(studyData.uuid, areTemplates))
- .catch(err => {
- console.error(err);
- osparc.component.message.FlashMessenger.getInstance().logAs(err, "ERROR");
- })
- .finally(this.__itemSelected(null));
+ this.__deleteStudy(studyData, areTemplates);
});
},
diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserButtonBase.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserButtonBase.js
index 967878ce008..d2caa7afe33 100644
--- a/services/web/client/source/class/osparc/dashboard/StudyBrowserButtonBase.js
+++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserButtonBase.js
@@ -100,7 +100,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserButtonBase", {
});
this._mainLayout.addAt(control, 1);
break;
- case "shared-creator":
+ case "shared-description2":
control = new qx.ui.container.Composite(new qx.ui.layout.HBox(6)).set({
anonymous: true
});
@@ -108,18 +108,18 @@ qx.Class.define("osparc.dashboard.StudyBrowserButtonBase", {
break;
case "shared": {
control = new qx.ui.basic.Image();
- const sharedCreatorLayout = this.getChildControl("shared-creator");
- sharedCreatorLayout.addAt(control, 0);
+ const sharedDescription2Layout = this.getChildControl("shared-description2");
+ sharedDescription2Layout.addAt(control, 0);
break;
}
- case "creator": {
+ case "description2": {
control = new qx.ui.basic.Label().set({
anonymous: true,
font: "text-13",
allowGrowY: false
});
- const sharedCreatorLayout = this.getChildControl("shared-creator");
- sharedCreatorLayout.addAt(control, 1, {
+ const sharedDescription2Layout = this.getChildControl("shared-description2");
+ sharedDescription2Layout.addAt(control, 1, {
flex: 1
});
break;
diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserButtonItem.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserButtonItem.js
index 2262d12984f..55e4e796eb1 100644
--- a/services/web/client/source/class/osparc/dashboard/StudyBrowserButtonItem.js
+++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserButtonItem.js
@@ -108,7 +108,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserButtonItem", {
statics: {
MENU_BTN_Z: 20,
MENU_BTN_WIDTH: 25,
- SHARED_ME: "@FontAwesome5Solid/user/14",
+ SHARED_USER: "@FontAwesome5Solid/user/14",
SHARED_ORGS: "@FontAwesome5Solid/users/14",
SHARED_ALL: "@FontAwesome5Solid/globe/14"
},
@@ -199,7 +199,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserButtonItem", {
_applyLastChangeDate: function(value, old) {
if (value && !this.getIsTemplate()) {
- const label = this.getChildControl("description");
+ const label = this.getChildControl("description2");
let dateStr = null;
if (value.getDate() === (new Date()).getDate()) {
dateStr = this.tr("Today");
@@ -215,7 +215,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserButtonItem", {
_applyCreator: function(value, old) {
if (this.getIsTemplate()) {
- const label = this.getChildControl("creator");
+ const label = this.getChildControl("description2");
label.setValue(value);
}
},
@@ -227,48 +227,76 @@ qx.Class.define("osparc.dashboard.StudyBrowserButtonItem", {
const store = osparc.store.Store.getInstance();
Promise.all([
store.getGroupsAll(),
- store.getGroupsOrganizations(),
- store.getGroupsMe()
+ store.getGroupsMe(),
+ store.getVisibleMembers(),
+ store.getGroupsOrganizations()
])
.then(values => {
- const groups = [[values[0]], values[1], [values[2]]];
+ const all = values[0];
+ const me = values[1];
+ const orgMembs = [];
+ const orgMembers = values[2];
+ for (const gid of Object.keys(orgMembers)) {
+ orgMembs.push(orgMembers[gid]);
+ }
+ const orgs = values.length === 4 ? values[3] : [];
+ const groups = [[me], orgMembs, orgs, [all]];
this.__setSharedIcon(image, value, groups);
});
}
},
__setSharedIcon: function(image, value, groups) {
+ let sharedGrps = [];
+ const myGroupId = osparc.auth.Data.getInstance().getGroupId();
for (let i=0; i {
- const grp = groups[i].find(group => group["gid"] === parseInt(key));
+ const sharedGrp = [];
+ const gids = Object.keys(value);
+ for (let j=0; j group["gid"] === gid);
if (grp) {
- hintText += (grp["label"] + "
");
+ sharedGrp.push(grp);
}
- });
- if (hintText === "") {
+ }
+ if (sharedGrp.length === 0) {
continue;
+ } else {
+ sharedGrps = sharedGrps.concat(sharedGrp);
}
switch (i) {
case 0:
- image.setSource(this.self().SHARED_ALL);
- break;
case 1:
- image.setSource(this.self().SHARED_ORGS);
+ image.setSource(this.self().SHARED_USER);
break;
case 2:
- image.setSource(this.self().SHARED_ME);
+ image.setSource(this.self().SHARED_ORGS);
+ break;
+ case 3:
+ image.setSource(this.self().SHARED_ALL);
break;
}
+ }
- const hint = new osparc.ui.hint.Hint(image, hintText).set({
- active: false
- });
- image.addListener("mouseover", () => hint.show(), this);
- image.addListener("mouseout", () => hint.exclude(), this);
+ if (sharedGrps.length === 0) {
+ image.setVisibility("excluded");
+ return;
+ }
- break;
+ let hintText = "";
+ for (let i=0; i 6) {
+ hintText += "...";
+ break;
+ }
+ hintText += (sharedGrps[i]["label"] + "
");
}
+ const hint = new osparc.ui.hint.Hint(image, hintText);
+ image.addListener("mouseover", () => hint.show(), this);
+ image.addListener("mouseout", () => hint.exclude(), this);
},
_applyTags: function(tags) {
diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserButtonNew.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserButtonNew.js
index 52ba5f837be..0b893d9161d 100644
--- a/services/web/client/source/class/osparc/dashboard/StudyBrowserButtonNew.js
+++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserButtonNew.js
@@ -41,7 +41,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserButtonNew", {
const desc1 = this.getChildControl("description");
desc1.setValue(this.tr("Start with a empty study").toString());
- this.setIcon("@FontAwesome5Solid/file/50");
+ this.setIcon("@FontAwesome5Solid/plus/60");
},
_onToggleChange: function(e) {
diff --git a/services/web/client/source/class/osparc/data/Permissions.js b/services/web/client/source/class/osparc/data/Permissions.js
index 6f00b10e795..8161daebbd7 100644
--- a/services/web/client/source/class/osparc/data/Permissions.js
+++ b/services/web/client/source/class/osparc/data/Permissions.js
@@ -135,12 +135,12 @@ qx.Class.define("osparc.data.Permissions", {
"studies.user.read",
"studies.user.create",
"storage.datcore.read",
- "preferences.user.update",
- "preferences.apikey.create",
- "preferences.apikey.delete",
- "preferences.token.create",
- "preferences.token.delete",
- "preferences.tag",
+ "user.user.update",
+ "user.apikey.create",
+ "user.apikey.delete",
+ "user.token.create",
+ "user.token.delete",
+ "user.tag",
"study.node.create",
"study.node.delete",
"study.node.update",
@@ -159,7 +159,8 @@ qx.Class.define("osparc.data.Permissions", {
"studies.template.update",
"studies.template.delete",
"services.all.read",
- "preferences.role.update",
+ "user.role.update",
+ "user.organizations.create",
"study.nodestree.uuid.read",
"study.filestree.uuid.read",
"study.logger.debug.read"
diff --git a/services/web/client/source/class/osparc/data/Resources.js b/services/web/client/source/class/osparc/data/Resources.js
index 0417e9d5ebc..d1d4006f00a 100644
--- a/services/web/client/source/class/osparc/data/Resources.js
+++ b/services/web/client/source/class/osparc/data/Resources.js
@@ -3,6 +3,7 @@
* Copyright: 2019 IT'IS Foundation - https://itis.swiss
* License: MIT - https://opensource.org/licenses/MIT
* Authors: Ignacio Pascual (ignapas)
+ * Odei Maiz (odeimaiz)
*/
/**
@@ -66,7 +67,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* STUDIES
*/
- studies: {
+ "studies": {
useCache: true,
endpoints: {
get: {
@@ -136,7 +137,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* TEMPLATES (actually studies flagged as templates)
*/
- templates: {
+ "templates": {
useCache: true,
endpoints: {
get: {
@@ -160,7 +161,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* SERVICES
*/
- services: {
+ "services": {
useCache: true,
endpoints: {
get: {
@@ -172,7 +173,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* GROUPS/DAGS
*/
- dags: {
+ "dags": {
usesCache: true,
endpoints: {
post: {
@@ -192,7 +193,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* CONFIG
*/
- config: {
+ "config": {
useCache: true,
endpoints: {
getOne: {
@@ -204,7 +205,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* PROFILE
*/
- profile: {
+ "profile": {
useCache: true,
endpoints: {
getOne: {
@@ -216,7 +217,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* API-KEYS
*/
- apiKeys: {
+ "apiKeys": {
endpoints: {
get: {
method: "GET",
@@ -235,7 +236,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* TOKENS
*/
- tokens: {
+ "tokens": {
idField: "service",
useCache: true,
endpoints: {
@@ -261,10 +262,66 @@ qx.Class.define("osparc.data.Resources", {
}
}
},
+ /*
+ * ORGANIZATIONS
+ */
+ "organizations": {
+ useCache: true,
+ endpoints: {
+ get: {
+ method: "GET",
+ url: statics.API + "/groups"
+ },
+ post: {
+ method: "POST",
+ url: statics.API + "/groups"
+ },
+ getOne: {
+ method: "GET",
+ url: statics.API + "/groups/{gid}"
+ },
+ delete: {
+ method: "DELETE",
+ url: statics.API + "/groups/{gid}"
+ },
+ patch: {
+ method: "PATCH",
+ url: statics.API + "/groups/{gid}"
+ }
+ }
+ },
+ /*
+ * ORGANIZATION MEMBERS
+ */
+ "organizationMembers": {
+ useCache: false,
+ endpoints: {
+ get: {
+ method: "GET",
+ url: statics.API + "/groups/{gid}/users"
+ },
+ post: {
+ method: "POST",
+ url: statics.API + "/groups/{gid}/users"
+ },
+ getOne: {
+ method: "GET",
+ url: statics.API + "/groups/{gid}/users/{uid}"
+ },
+ delete: {
+ method: "DELETE",
+ url: statics.API + "/groups/{gid}/users/{uid}"
+ },
+ patch: {
+ method: "PATCH",
+ url: statics.API + "/groups/{gid}/users/{uid}"
+ }
+ }
+ },
/*
* PASSWORD
*/
- password: {
+ "password": {
useCache: false,
endpoints: {
post: {
@@ -276,7 +333,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* HEALTHCHECK
*/
- healthCheck: {
+ "healthCheck": {
useCache: false,
endpoints: {
get: {
@@ -288,7 +345,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* AUTH
*/
- auth: {
+ "auth": {
useCache: false,
endpoints: {
postLogin: {
@@ -316,7 +373,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* STORAGE LOCATIONS
*/
- storageLocations: {
+ "storageLocations": {
useCache: true,
endpoints: {
get: {
@@ -328,7 +385,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* STORAGE DATASETS
*/
- storageDatasets: {
+ "storageDatasets": {
useCache: false,
endpoints: {
getByLocation: {
@@ -340,7 +397,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* STORAGE FILES
*/
- storageFiles: {
+ "storageFiles": {
useCache: false,
endpoints: {
getByLocationAndDataset: {
@@ -364,7 +421,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* STORAGE LINK
*/
- storageLink: {
+ "storageLink": {
useCache: false,
endpoints: {
getOne: {
@@ -380,7 +437,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* ACTIVITY
*/
- activity: {
+ "activity": {
useCache: false,
endpoints: {
getOne: {
@@ -393,7 +450,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* Test/Diagnonstic entrypoint
*/
- checkEP: {
+ "checkEP": {
useCache: false,
endpoints: {
postFail: {
@@ -410,7 +467,7 @@ qx.Class.define("osparc.data.Resources", {
/*
* TAGS
*/
- tags: {
+ "tags": {
idField: "id",
useCache: true,
endpoints: {
@@ -437,7 +494,7 @@ qx.Class.define("osparc.data.Resources", {
* STATICS
* Gets the json file containing some runtime server variables.
*/
- statics: {
+ "statics": {
useCache: true,
endpoints: {
get: {
diff --git a/services/web/client/source/class/osparc/data/model/Study.js b/services/web/client/source/class/osparc/data/model/Study.js
index 74f3e9b688a..378667cc960 100644
--- a/services/web/client/source/class/osparc/data/model/Study.js
+++ b/services/web/client/source/class/osparc/data/model/Study.js
@@ -37,7 +37,7 @@ qx.Class.define("osparc.data.model.Study", {
extend: qx.core.Object,
/**
- * @param studyData {Object} Object containing the serialized Project Data
+ * @param studyData {Object} Object containing the serialized Study Data
*/
construct: function(studyData) {
this.base(arguments);
diff --git a/services/web/client/source/class/osparc/desktop/NavigationBar.js b/services/web/client/source/class/osparc/desktop/NavigationBar.js
index 9388f7b3438..38f960a2753 100644
--- a/services/web/client/source/class/osparc/desktop/NavigationBar.js
+++ b/services/web/client/source/class/osparc/desktop/NavigationBar.js
@@ -18,7 +18,7 @@
/**
* Widget containing:
* - LogoOnOff
- * - Dashboard button
+ * - Dashboard (button)
* - List of buttons for node navigation (only study editing)
* - User menu
* - Preferences
@@ -74,6 +74,7 @@ qx.Class.define("osparc.desktop.NavigationBar", {
this.getChildControl("user-manual");
this.getChildControl("feedback");
+ this.getChildControl("theme-switch");
this.getChildControl("user-menu");
},
@@ -173,6 +174,10 @@ qx.Class.define("osparc.desktop.NavigationBar", {
});
this._add(control);
break;
+ case "theme-switch":
+ control = new osparc.ui.switch.ThemeSwitcher();
+ this._add(control);
+ break;
case "user-menu":
control = this.__createUserMenuBtn();
control.set({
diff --git a/services/web/client/source/class/osparc/desktop/StudyEditor.js b/services/web/client/source/class/osparc/desktop/StudyEditor.js
index db460734066..dfd6a02a123 100644
--- a/services/web/client/source/class/osparc/desktop/StudyEditor.js
+++ b/services/web/client/source/class/osparc/desktop/StudyEditor.js
@@ -28,7 +28,7 @@ qx.Class.define("osparc.desktop.StudyEditor", {
minWidth: 0,
width: 400
});
- osparc.utils.Utils.addBorder(sidePanel, "right");
+ osparc.utils.Utils.addBorder(sidePanel, 2, "right");
const scroll = this.__scrollContainer = new qx.ui.container.Scroll().set({
minWidth: 0
});
diff --git a/services/web/client/source/class/osparc/desktop/preferences/PreferencesWindow.js b/services/web/client/source/class/osparc/desktop/preferences/PreferencesWindow.js
index eb7d88d2190..a14194c7a58 100644
--- a/services/web/client/source/class/osparc/desktop/preferences/PreferencesWindow.js
+++ b/services/web/client/source/class/osparc/desktop/preferences/PreferencesWindow.js
@@ -59,11 +59,22 @@ qx.Class.define("osparc.desktop.preferences.PreferencesWindow", {
osparc.utils.Utils.setIdToWidget(expBtn, "preferencesExperimentalTabBtn");
tabView.add(expPage);
- if (osparc.data.Permissions.getInstance().canDo("preferences.tag")) {
+ if (osparc.data.Permissions.getInstance().canDo("user.tag")) {
const tagsPage = new osparc.desktop.preferences.pages.TagsPage();
tabView.add(tagsPage);
}
+ osparc.data.Resources.get("organizations")
+ .then(resp => {
+ const orgs = resp["organizations"];
+ if (orgs.length || osparc.data.Permissions.getInstance().canDo("user.organizations.create")) {
+ const orgsPage = new osparc.desktop.preferences.pages.OrganizationsPage();
+ const orgsBtn = orgsPage.getChildControl("button");
+ osparc.utils.Utils.setIdToWidget(orgsBtn, "preferencesOrganizationsTabBtn");
+ tabView.add(orgsPage);
+ }
+ });
+
this.add(tabView);
}
});
diff --git a/services/web/client/source/class/osparc/desktop/preferences/pages/BasePage.js b/services/web/client/source/class/osparc/desktop/preferences/pages/BasePage.js
index 2bbc259a5ed..cc5f79a088c 100644
--- a/services/web/client/source/class/osparc/desktop/preferences/pages/BasePage.js
+++ b/services/web/client/source/class/osparc/desktop/preferences/pages/BasePage.js
@@ -28,7 +28,7 @@ qx.Class.define("osparc.desktop.preferences.pages.BasePage", {
// Page title
this.add(new qx.ui.basic.Label(title + " Settings").set({
- font: qx.bom.Font.fromConfig(osparc.theme.Font.fonts["title-16"])
+ font: "title-16"
}));
// spacer
@@ -36,8 +36,8 @@ qx.Class.define("osparc.desktop.preferences.pages.BasePage", {
},
members: {
- /** Common layout of secion's box
- *
+ /**
+ * Common layout of secion's box
* @param {page section's name} sectionName
*/
_createSectionBox: function(sectionName) {
@@ -47,14 +47,14 @@ qx.Class.define("osparc.desktop.preferences.pages.BasePage", {
return box;
},
- /** Common layout for and font for tooltip label
- *
+ /**
+ * Common layout for tooltip label
*/
_createHelpLabel: function(message=null) {
let label = new qx.ui.basic.Label().set({
value: message,
rich: true,
- font: osparc.utils.Utils.getFont(12)
+ font: "text-12"
});
return label;
}
diff --git a/services/web/client/source/class/osparc/desktop/preferences/pages/OrganizationsPage.js b/services/web/client/source/class/osparc/desktop/preferences/pages/OrganizationsPage.js
new file mode 100644
index 00000000000..5613368f77e
--- /dev/null
+++ b/services/web/client/source/class/osparc/desktop/preferences/pages/OrganizationsPage.js
@@ -0,0 +1,410 @@
+/* ************************************************************************
+
+ osparc - the simcore frontend
+
+ https://osparc.io
+
+ Copyright:
+ 2018 IT'IS Foundation, https://itis.swiss
+
+ License:
+ MIT: https://opensource.org/licenses/MIT
+
+ Authors:
+ * Pedro Crespo (pcrespov)
+
+************************************************************************ */
+
+/**
+ * Organization and members in preferences dialog
+ *
+ */
+
+qx.Class.define("osparc.desktop.preferences.pages.OrganizationsPage", {
+ extend: osparc.desktop.preferences.pages.BasePage,
+
+ construct: function() {
+ const iconSrc = "@FontAwesome5Solid/sitemap/24";
+ const title = this.tr("Organizations");
+ this.base(arguments, title, iconSrc);
+
+ if (osparc.data.Permissions.getInstance().canDo("user.organizations.create")) {
+ this.add(this.__getCreateOrganizationSection());
+ }
+ this.add(this.__getOrganizationsSection());
+ this.add(this.__getMembersSection(), {
+ flex: 1
+ });
+
+ this.__reloadOrganizations();
+ },
+
+ members: {
+ __currentOrg: null,
+ __orgsModel: null,
+ __memberInvitation: null,
+ __membersModel: null,
+
+ __getCreateOrganizationSection: function() {
+ const createOrgBtn = new qx.ui.form.Button(this.tr("Create New Organization")).set({
+ allowGrowX: false
+ });
+ createOrgBtn.addListener("execute", function() {
+ const newOrg = true;
+ const orgEditor = new osparc.dashboard.OrganizationEditor(newOrg);
+ const win = osparc.dashboard.OrganizationEditor.popUpInWindow(this.tr("Organization Details Editor"), orgEditor);
+ orgEditor.addListener("createOrg", () => {
+ this.__createOrganization(win, orgEditor.getChildControl("create"), orgEditor);
+ });
+ }, this);
+ return createOrgBtn;
+ },
+
+ __getOrganizationsSection: function() {
+ const box = this._createSectionBox(this.tr("Organizations"));
+ box.add(this.__getOrganizationsList());
+ return box;
+ },
+
+ __getOrganizationsList: function() {
+ const orgsUIList = new qx.ui.form.List().set({
+ decorator: "no-border",
+ spacing: 3,
+ height: 150,
+ width: 150
+ });
+ orgsUIList.addListener("changeSelection", e => {
+ this.__organizationSelected(e.getData());
+ }, this);
+
+ const orgsModel = this.__orgsModel = new qx.data.Array();
+ const orgsCtrl = new qx.data.controller.List(orgsModel, orgsUIList, "label");
+ orgsCtrl.setDelegate({
+ createItem: () => new osparc.dashboard.OrganizationListItem(),
+ bindItem: (ctrl, item, id) => {
+ ctrl.bindProperty("gid", "model", null, item, id);
+ ctrl.bindProperty("gid", "key", null, item, id);
+ ctrl.bindProperty("thumbnail", "thumbnail", null, item, id);
+ ctrl.bindProperty("label", "title", null, item, id);
+ ctrl.bindProperty("description", "subtitle", null, item, id);
+ ctrl.bindProperty("nMembers", "contact", null, item, id);
+ ctrl.bindProperty("access_rights", "accessRights", null, item, id);
+ },
+ configureItem: item => {
+ const thumbanil = item.getChildControl("thumbnail");
+ thumbanil.getContentElement()
+ .setStyles({
+ "border-radius": "16px"
+ });
+
+ item.addListener("openEditOrganization", e => {
+ this.__openEditOrganization(e.getData());
+ });
+ }
+ });
+
+ return orgsUIList;
+ },
+
+ __getMembersSection: function() {
+ const box = this._createSectionBox(this.tr("Members"));
+ box.add(this.__getMemberInvitation());
+ box.add(this.__getMembersList(), {
+ flex: 1
+ });
+ return box;
+ },
+
+ __getMemberInvitation: function() {
+ const hBox = this.__memberInvitation = new qx.ui.container.Composite(new qx.ui.layout.HBox(10).set({
+ alignY: "middle"
+ }));
+ hBox.exclude();
+
+ const userEmail = new qx.ui.form.TextField().set({
+ required: true,
+ placeholder: this.tr("New Member's email")
+ });
+ hBox.add(userEmail, {
+ flex: 1
+ });
+
+ const validator = new qx.ui.form.validation.Manager();
+ validator.add(userEmail, qx.util.Validate.email());
+
+ const inviteBtn = new qx.ui.form.Button(this.tr("Invite"));
+ inviteBtn.addListener("execute", function() {
+ if (validator.validate()) {
+ this.__addMember(userEmail.getValue());
+ }
+ }, this);
+ hBox.add(inviteBtn);
+
+ return hBox;
+ },
+
+ __getMembersList: function() {
+ const memebersUIList = new qx.ui.form.List().set({
+ decorator: "no-border",
+ spacing: 3,
+ width: 150
+ });
+
+ const membersModel = this.__membersModel = new qx.data.Array();
+ const membersCtrl = new qx.data.controller.List(membersModel, memebersUIList, "name");
+ membersCtrl.setDelegate({
+ createItem: () => new osparc.dashboard.OrgMemberListItem(),
+ bindItem: (ctrl, item, id) => {
+ ctrl.bindProperty("id", "model", null, item, id);
+ ctrl.bindProperty("id", "key", null, item, id);
+ ctrl.bindProperty("thumbnail", "thumbnail", null, item, id);
+ ctrl.bindProperty("name", "title", null, item, id);
+ ctrl.bindProperty("access_rights", "accessRights", null, item, id);
+ ctrl.bindProperty("login", "subtitle", null, item, id);
+ ctrl.bindProperty("showOptions", "showOptions", null, item, id);
+ },
+ configureItem: item => {
+ item.getChildControl("thumbnail").getContentElement()
+ .setStyles({
+ "border-radius": "16px"
+ });
+ item.addListener("promoteOrgMember", e => {
+ const orgMember = e.getData();
+ this.__promoteMember(orgMember);
+ });
+ item.addListener("removeOrgMember", e => {
+ const orgMember = e.getData();
+ this.__deleteMember(orgMember);
+ });
+ }
+ });
+
+ return memebersUIList;
+ },
+
+ __organizationSelected: function(data) {
+ this.__memberInvitation.exclude();
+ if (data && data.length>0) {
+ this.__currentOrg = data[0];
+ } else {
+ this.__currentOrg = null;
+ }
+ this.__reloadOrgMembers();
+ },
+
+ __reloadOrganizations: function() {
+ const orgsModel = this.__orgsModel;
+ orgsModel.removeAll();
+
+ osparc.data.Resources.get("organizations")
+ .then(respOrgs => {
+ const orgs = respOrgs["organizations"];
+ orgs.forEach(org => {
+ const params = {
+ url: {
+ gid: org["gid"]
+ }
+ };
+ osparc.data.Resources.get("organizationMembers", params)
+ .then(respOrgMembers => {
+ org["nMembers"] = Object.keys(respOrgMembers).length + this.tr(" members");
+ orgsModel.append(qx.data.marshal.Json.createModel(org));
+ });
+ });
+ });
+ },
+
+ __reloadOrgMembers: function() {
+ const membersModel = this.__membersModel;
+ membersModel.removeAll();
+
+ const orgModel = this.__currentOrg;
+ if (orgModel === null) {
+ return;
+ }
+
+ const canWrite = orgModel.getAccessRights().getWrite();
+ if (canWrite) {
+ this.__memberInvitation.show();
+ }
+
+ const params = {
+ url: {
+ "gid": orgModel.getKey()
+ }
+ };
+ osparc.data.Resources.get("organizationMembers", params)
+ .then(members => {
+ members.forEach(member => {
+ member["thumbnail"] = osparc.utils.Avatar.getUrl(member["login"], 32);
+ member["name"] = osparc.utils.Utils.firstsUp(member["first_name"], member["last_name"]);
+ member["showOptions"] = canWrite;
+ membersModel.append(qx.data.marshal.Json.createModel(member));
+ });
+ });
+ },
+
+ __openEditOrganization: function(orgKey) {
+ let org = null;
+ this.__orgsModel.forEach(orgModel => {
+ if (orgModel.getGid() === parseInt(orgKey)) {
+ org = orgModel;
+ }
+ });
+ if (org === null) {
+ return;
+ }
+
+ const newOrg = false;
+ const orgEditor = new osparc.dashboard.OrganizationEditor(newOrg);
+ org.bind("gid", orgEditor, "gid");
+ org.bind("label", orgEditor, "label");
+ org.bind("description", orgEditor, "description");
+ org.bind("thumbnail", orgEditor, "thumbnail", {
+ converter: val => val ? val : ""
+ });
+ const win = osparc.dashboard.OrganizationEditor.popUpInWindow(this.tr("Organization Details Editor"), orgEditor);
+ orgEditor.addListener("updateOrg", () => {
+ this.__updateOrganization(win, orgEditor.getChildControl("save"), orgEditor);
+ });
+ },
+
+ __createOrganization: function(win, button, orgEditor) {
+ const orgKey = orgEditor.getGid();
+ const name = orgEditor.getLabel();
+ const description = orgEditor.getDescription();
+ const thumbnail = orgEditor.getThumbnail();
+ const params = {
+ url: {
+ "gid": orgKey
+ },
+ data: {
+ "label": name,
+ "description": description,
+ "thumbnail": thumbnail || null
+ }
+ };
+ osparc.data.Resources.fetch("organizations", "post", params)
+ .then(() => {
+ osparc.component.message.FlashMessenger.getInstance().logAs(name + this.tr(" successfully created"));
+ button.setFetching(false);
+ win.close();
+ osparc.store.Store.getInstance().reset("organizations");
+ this.__reloadOrganizations();
+ })
+ .catch(err => {
+ osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Something went wrong creating ") + name, "ERROR");
+ button.setFetching(false);
+ console.error(err);
+ });
+ },
+
+ __updateOrganization: function(win, button, orgEditor) {
+ const orgKey = orgEditor.getGid();
+ const name = orgEditor.getLabel();
+ const description = orgEditor.getDescription();
+ const thumbnail = orgEditor.getThumbnail();
+ const params = {
+ url: {
+ "gid": orgKey
+ },
+ data: {
+ "label": name,
+ "description": description,
+ "thumbnail": thumbnail || null
+ }
+ };
+ osparc.data.Resources.fetch("organizations", "patch", params)
+ .then(() => {
+ osparc.component.message.FlashMessenger.getInstance().logAs(name + this.tr(" successfully edited"));
+ button.setFetching(false);
+ win.close();
+ osparc.store.Store.getInstance().reset("organizations");
+ this.__reloadOrganizations();
+ })
+ .catch(err => {
+ osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Something went wrong editing ") + name, "ERROR");
+ button.setFetching(false);
+ console.error(err);
+ });
+ },
+
+ __addMember: function(orgMemberEmail) {
+ if (this.__currentOrg === null) {
+ return;
+ }
+
+ const params = {
+ url: {
+ "gid": this.__currentOrg.getKey()
+ },
+ data: {
+ "email": orgMemberEmail
+ }
+ };
+ osparc.data.Resources.fetch("organizationMembers", "post", params)
+ .then(() => {
+ osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Invitation sent to ") + orgMemberEmail);
+ osparc.store.Store.getInstance().reset("organizationMembers");
+ this.__reloadOrgMembers();
+ })
+ .catch(err => {
+ osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Something went wrong with the invitation"), "ERROR");
+ console.error(err);
+ });
+ },
+
+ __promoteMember: function(orgMember) {
+ if (this.__currentOrg === null) {
+ return;
+ }
+
+ const params = {
+ url: {
+ "gid": this.__currentOrg.getKey(),
+ "uid": orgMember["key"]
+ },
+ data: {
+ "access_rights": {
+ "read": true,
+ "write": true,
+ "delete": false
+ }
+ }
+ };
+ osparc.data.Resources.fetch("organizationMembers", "patch", params)
+ .then(() => {
+ osparc.component.message.FlashMessenger.getInstance().logAs(orgMember["name"] + this.tr(" successfully promoted"));
+ osparc.store.Store.getInstance().reset("organizationMembers");
+ this.__reloadOrgMembers();
+ })
+ .catch(err => {
+ osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Something went wrong promoting ") + orgMember["name"], "ERROR");
+ console.error(err);
+ });
+ },
+
+ __deleteMember: function(orgMember) {
+ if (this.__currentOrg === null) {
+ return;
+ }
+
+ const params = {
+ url: {
+ "gid": this.__currentOrg.getKey(),
+ "uid": orgMember["key"]
+ }
+ };
+ osparc.data.Resources.fetch("organizationMembers", "delete", params)
+ .then(() => {
+ osparc.component.message.FlashMessenger.getInstance().logAs(orgMember["name"] + this.tr(" successfully removed"));
+ osparc.store.Store.getInstance().reset("organizationMembers");
+ this.__reloadOrgMembers();
+ })
+ .catch(err => {
+ osparc.component.message.FlashMessenger.getInstance().logAs(this.tr("Something went wrong removing ") + orgMember["name"], "ERROR");
+ console.error(err);
+ });
+ }
+ }
+});
diff --git a/services/web/client/source/class/osparc/desktop/preferences/pages/ProfilePage.js b/services/web/client/source/class/osparc/desktop/preferences/pages/ProfilePage.js
index 905ba654077..e9544002885 100644
--- a/services/web/client/source/class/osparc/desktop/preferences/pages/ProfilePage.js
+++ b/services/web/client/source/class/osparc/desktop/preferences/pages/ProfilePage.js
@@ -36,7 +36,6 @@ qx.Class.define("osparc.desktop.preferences.pages.ProfilePage", {
this.__getValuesFromServer();
this.add(this.__createProfileUser());
- this.add(this.__createOrganizations());
},
members: {
@@ -61,7 +60,7 @@ qx.Class.define("osparc.desktop.preferences.pages.ProfilePage", {
let role = null;
const permissions = osparc.data.Permissions.getInstance();
- if (permissions.canDo("preferences.role.update")) {
+ if (permissions.canDo("user.role.update")) {
role = new qx.ui.form.SelectBox();
const roles = permissions.getChildrenRoles(permissions.getRole());
for (let i=0; i {
- if (!osparc.data.Permissions.getInstance().canDo("preferences.user.update", true)) {
+ if (!osparc.data.Permissions.getInstance().canDo("user.user.update", true)) {
this.__resetDataToModel();
return;
}
@@ -184,40 +183,6 @@ qx.Class.define("osparc.desktop.preferences.pages.ProfilePage", {
return box;
},
- __createOrganizations: function() {
- // layout
- const box = this._createSectionBox(this.tr("Organizations"));
-
- const orgsUIList = new qx.ui.form.List().set({
- spacing: 3,
- height: 150,
- width: 150
- });
-
- const orgsModel = new qx.data.Array();
- const orgsCtrl = new qx.data.controller.List(orgsModel, orgsUIList, "label");
- orgsCtrl.setDelegate({
- createItem: () => new osparc.component.widget.OrganizationListItem(),
- bindItem: (ctrl, item, id) => {
- ctrl.bindProperty("gid", "model", null, item, id);
- ctrl.bindProperty("gid", "gid", null, item, id);
- ctrl.bindProperty("label", "label", null, item, id);
- ctrl.bindProperty("description", "description", null, item, id);
- }
- });
-
- box.add(orgsUIList);
-
- const store = osparc.store.Store.getInstance();
- store.getGroupsOrganizations()
- .then(orgs => {
- orgsModel.removeAll();
- orgs.forEach(org => orgsModel.append(qx.data.marshal.Json.createModel(org)));
- });
-
- return box;
- },
-
__getValuesFromServer: function() {
// get values from server
const request = new osparc.io.request.ApiRequest("/me", "GET");
diff --git a/services/web/client/source/class/osparc/desktop/preferences/pages/SecurityPage.js b/services/web/client/source/class/osparc/desktop/preferences/pages/SecurityPage.js
index 4843cffd3c0..0ff307619d7 100644
--- a/services/web/client/source/class/osparc/desktop/preferences/pages/SecurityPage.js
+++ b/services/web/client/source/class/osparc/desktop/preferences/pages/SecurityPage.js
@@ -130,7 +130,7 @@ qx.Class.define("osparc.desktop.preferences.pages.SecurityPage", {
},
__requestAPIKey: function() {
- if (!osparc.data.Permissions.getInstance().canDo("preferences.apikey.create", true)) {
+ if (!osparc.data.Permissions.getInstance().canDo("user.apikey.create", true)) {
return;
}
@@ -197,7 +197,7 @@ qx.Class.define("osparc.desktop.preferences.pages.SecurityPage", {
},
__deleteAPIKey: function(apiKeyLabel) {
- if (!osparc.data.Permissions.getInstance().canDo("preferences.apikey.delete", true)) {
+ if (!osparc.data.Permissions.getInstance().canDo("user.apikey.delete", true)) {
return;
}
const params = {
@@ -272,7 +272,7 @@ qx.Class.define("osparc.desktop.preferences.pages.SecurityPage", {
const addTokenBtn = new qx.ui.form.Button(this.tr("Add"));
addTokenBtn.setWidth(100);
addTokenBtn.addListener("execute", e => {
- if (!osparc.data.Permissions.getInstance().canDo("preferences.token.create", true)) {
+ if (!osparc.data.Permissions.getInstance().canDo("user.token.create", true)) {
return;
}
const params = {
@@ -321,7 +321,7 @@ qx.Class.define("osparc.desktop.preferences.pages.SecurityPage", {
},
__deleteToken: function(service) {
- if (!osparc.data.Permissions.getInstance().canDo("preferences.token.delete", true)) {
+ if (!osparc.data.Permissions.getInstance().canDo("user.token.delete", true)) {
return;
}
const params = {
diff --git a/services/web/client/source/class/osparc/store/Store.js b/services/web/client/source/class/osparc/store/Store.js
index b0cc765f5d0..941aadd94f7 100644
--- a/services/web/client/source/class/osparc/store/Store.js
+++ b/services/web/client/source/class/osparc/store/Store.js
@@ -81,6 +81,18 @@ qx.Class.define("osparc.store.Store", {
check: "Array",
init: []
},
+ organizations: {
+ check: "Object",
+ init: {}
+ },
+ organizationMembers: {
+ check: "Object",
+ init: {}
+ },
+ reachableMembers: {
+ check: "Object",
+ init: {}
+ },
services: {
check: "Array",
init: []
@@ -158,6 +170,33 @@ qx.Class.define("osparc.store.Store", {
}
},
+ /**
+ * Invalidates the cache for the given resources.
+ * If resource is a string, it will invalidate that resource.
+ * If it is an array, it will try to invalidate every resource in the array.
+ * If it is not provided, it will invalidate all resources.
+ *
+ * @param {(string|string[])} [resources] Property or array of property names that must be reset
+ */
+ invalidate: function(resources) {
+ if (typeof resources === "string" || resources instanceof String) {
+ this.reset(resources);
+ } else {
+ let propertyArray;
+ if (resources == null) {
+ propertyArray = Object.keys(qx.util.PropertyUtil.getProperties(osparc.store.Store));
+ } else if (Array.isArray(resources)) {
+ propertyArray = resources;
+ }
+ propertyArray.forEach(propName => {
+ this.reset(propName);
+ // Not sure reset actually works
+ const initVal = qx.util.PropertyUtil.getInitValue(this, propName);
+ qx.util.PropertyUtil.getUserValue(this, propName, initVal);
+ });
+ }
+ },
+
/**
* This functions does the needed processing in order to have a working list of services and DAGs.
* @param {Boolean} reload ?
@@ -230,26 +269,40 @@ qx.Class.define("osparc.store.Store", {
});
},
- /**
- * Invalidates the cache for the given resources.
- * If resource is a string, it will invalidate that resource.
- * If it is an array, it will try to invalidate every resource in the array.
- * If it is not provided, it will invalidate all resources.
- *
- * @param {(string|string[])} [resources] Property or array of property names that must be reset
- */
- invalidate: function(resources) {
- if (typeof resources === "string" || resources instanceof String) {
- this.reset(resources);
- } else {
- let propertyArray;
- if (resources == null) {
- propertyArray = Object.keys(qx.util.PropertyUtil.getProperties(osparc.store.Store));
- } else if (Array.isArray(resources)) {
- propertyArray = resources;
- }
- propertyArray.forEach(propName => this.reset(propName));
- }
+ getVisibleMembers: function() {
+ const reachableMembers = this.getReachableMembers();
+ return new Promise((resolve, reject) => {
+ osparc.data.Resources.get("organizations")
+ .then(resp => {
+ const orgMembersPromises = [];
+ const orgs = resp["organizations"];
+ orgs.forEach(org => {
+ orgMembersPromises.push(
+ new Promise((resolve2, reject2) => {
+ const params = {
+ url: {
+ "gid": org["gid"]
+ }
+ };
+ osparc.data.Resources.get("organizationMembers", params)
+ .then(orgMembers => {
+ resolve2(orgMembers);
+ });
+ })
+ );
+ });
+ Promise.all(orgMembersPromises)
+ .then(orgMemberss => {
+ orgMemberss.forEach(orgMembers => {
+ orgMembers.forEach(orgMember => {
+ orgMember["label"] = osparc.utils.Utils.firstsUp(orgMember["first_name"], orgMember["last_name"]);
+ reachableMembers[orgMember["gid"]] = orgMember;
+ });
+ });
+ resolve(reachableMembers);
+ });
+ });
+ });
},
_applyStudy: function(newStudy) {
diff --git a/services/web/client/source/class/osparc/ui/basic/Tag.js b/services/web/client/source/class/osparc/ui/basic/Tag.js
index a596fd31cca..c0dd4535673 100644
--- a/services/web/client/source/class/osparc/ui/basic/Tag.js
+++ b/services/web/client/source/class/osparc/ui/basic/Tag.js
@@ -20,10 +20,10 @@ qx.Class.define("osparc.ui.basic.Tag", {
*/
construct: function(value, color, filterGroupId) {
this.base(arguments, value);
+ this.setFont("text-11");
if (color) {
this.setColor(color);
}
- this.setFont(osparc.utils.Utils.getFont(11));
if (filterGroupId) {
this.setCursor("pointer");
this.addListener("tap", e => {
diff --git a/services/web/client/source/class/osparc/ui/hint/Hint.js b/services/web/client/source/class/osparc/ui/hint/Hint.js
index b447ef0f37c..4c2b9ce5a18 100644
--- a/services/web/client/source/class/osparc/ui/hint/Hint.js
+++ b/services/web/client/source/class/osparc/ui/hint/Hint.js
@@ -65,7 +65,7 @@ qx.Class.define("osparc.ui.hint.Hint", {
active: {
check: "Boolean",
nullable: false,
- init: true
+ init: false
},
orientation: {
check: "Integer",
diff --git a/services/web/client/source/class/osparc/ui/markdown/Markdown.js b/services/web/client/source/class/osparc/ui/markdown/Markdown.js
index 8e8effce1ed..ec5ae9dc011 100644
--- a/services/web/client/source/class/osparc/ui/markdown/Markdown.js
+++ b/services/web/client/source/class/osparc/ui/markdown/Markdown.js
@@ -147,7 +147,7 @@ qx.Class.define("osparc.ui.markdown.Markdown", {
},
__getDomElement: function() {
- if (!this.getContentElement) {
+ if (!this.getContentElement || this.getContentElement() === null) {
return null;
}
const domElement = this.getContentElement().getDomElement();
diff --git a/services/web/client/source/class/osparc/ui/switch/Switch.js b/services/web/client/source/class/osparc/ui/switch/Switch.js
new file mode 100644
index 00000000000..be309414cf1
--- /dev/null
+++ b/services/web/client/source/class/osparc/ui/switch/Switch.js
@@ -0,0 +1,51 @@
+/*
+ * oSPARC - The SIMCORE frontend - https://osparc.io
+ * Copyright: 2020 IT'IS Foundation - https://itis.swiss
+ * License: MIT - https://opensource.org/licenses/MIT
+ * Authors: Odei Maiz (odeimaiz)
+ */
+
+/**
+ * Switch button
+ */
+
+qx.Class.define("osparc.ui.switch.Switch", {
+ extend: qx.ui.basic.Image,
+
+ construct: function() {
+ this.base(arguments);
+
+ this.set({
+ cursor: "pointer",
+ backgroundColor: "transparent",
+ source: "@FontAwesome5Solid/toggle-on/22"
+ });
+
+ this.addListener("tap", () => {
+ this.toggleChecked();
+ });
+
+ this.initChecked();
+ },
+
+ properties: {
+ checked: {
+ check: "Boolean",
+ init: false,
+ event: "changeChecked",
+ apply: "_applyChecked"
+ }
+ },
+
+ members: {
+ __slider: null,
+
+ _applyChecked: function(newVal) {
+ if (newVal) {
+ this.getContentElement().addClass("rotated");
+ } else {
+ this.getContentElement().removeClass("rotated");
+ }
+ }
+ }
+});
diff --git a/services/web/client/source/class/osparc/ui/switch/ThemeSwitcher.js b/services/web/client/source/class/osparc/ui/switch/ThemeSwitcher.js
new file mode 100644
index 00000000000..2a4ee5d912f
--- /dev/null
+++ b/services/web/client/source/class/osparc/ui/switch/ThemeSwitcher.js
@@ -0,0 +1,41 @@
+/*
+ * oSPARC - The SIMCORE frontend - https://osparc.io
+ * Copyright: 2020 IT'IS Foundation - https://itis.swiss
+ * License: MIT - https://opensource.org/licenses/MIT
+ * Authors: Odei Maiz (odeimaiz)
+ */
+
+/**
+ * Switch button for controlling the theme
+ */
+
+qx.Class.define("osparc.ui.switch.ThemeSwitcher", {
+ extend: osparc.ui.switch.Switch,
+
+ construct: function() {
+ this.base(arguments);
+
+ const validThemes = [];
+ const themes = qx.Theme.getAll();
+ for (const key in themes) {
+ const theme = themes[key];
+ if (theme.type === "meta") {
+ validThemes.push(theme);
+ }
+ }
+ if (validThemes.length !== 2) {
+ this.setVisibility("exluded");
+ return;
+ }
+
+ this.addListener("changeChecked", () => {
+ const themeMgr = qx.theme.manager.Meta.getInstance();
+ const currentTheme = themeMgr.getTheme();
+ if (currentTheme === validThemes[0]) {
+ themeMgr.setTheme(validThemes[1]);
+ } else {
+ themeMgr.setTheme(validThemes[0]);
+ }
+ });
+ }
+});
diff --git a/services/web/client/source/class/osparc/utils/Utils.js b/services/web/client/source/class/osparc/utils/Utils.js
index 3c1f2d359bd..64955e5e64a 100644
--- a/services/web/client/source/class/osparc/utils/Utils.js
+++ b/services/web/client/source/class/osparc/utils/Utils.js
@@ -50,8 +50,8 @@ qx.Class.define("osparc.utils.Utils", {
return loadingUri;
},
- addBorder: function(sidePanel, where = "right") {
- sidePanel.getContentElement().setStyle("border-"+where, "1px solid " + qx.theme.manager.Color.getInstance().resolve("material-button-background"));
+ addBorder: function(sidePanel, width = 1, where = "right") {
+ sidePanel.getContentElement().setStyle("border-"+where, width+"px solid " + qx.theme.manager.Color.getInstance().resolve("material-button-background"));
},
__setStyleToIFrame: function(domEl) {
@@ -362,6 +362,12 @@ qx.Class.define("osparc.utils.Utils", {
fetchJSON: function() {
return fetch.apply(null, arguments).then(response => response.json());
+ },
+
+ firstsUp: function(...args) {
+ const labels = [];
+ args.forEach(arg => labels.push(qx.lang.String.firstUp(arg)));
+ return labels.join(" ");
}
}
});
diff --git a/services/web/client/source/resource/common/common.css b/services/web/client/source/resource/common/common.css
index 49a4bcc00a0..45d71fa3c25 100644
--- a/services/web/client/source/resource/common/common.css
+++ b/services/web/client/source/resource/common/common.css
@@ -18,3 +18,7 @@
-moz-transform: rotate(-90deg);
-o-transform: rotate(-90deg);
}
+
+.rotated {
+ transform: rotateZ(180deg);
+}
diff --git a/services/web/client/source/resource/form/service.json b/services/web/client/source/resource/form/service.json
index 2ed9429830e..11da25ff6aa 100644
--- a/services/web/client/source/resource/form/service.json
+++ b/services/web/client/source/resource/form/service.json
@@ -129,7 +129,7 @@
"name": {
"type": "string",
"title": "Name",
- "description": "Input's ID (variable name)."
+ "description": "Output's ID (variable name)."
},
"type": {
"type": "string",
diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml
index d7380c324d3..7e9f28a8dc5 100644
--- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml
+++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml
@@ -2108,7 +2108,6 @@ paths:
last_name:
type: string
example:
- login: pcrespov@foo.com
first_name: Pedro
last_name: Crespo
- type: object
@@ -2125,67 +2124,182 @@ paths:
type: object
properties:
gid:
+ description: the group ID
type: string
label:
+ description: the group name
type: string
description:
+ description: the group description
type: string
+ thumbnail:
+ description: url to the group thumbnail
+ type: string
+ format: uri
+ access_rights:
+ description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ - read: true
+ write: false
+ delete: false
+ - read: true
+ write: true
+ delete: false
+ - read: true
+ write: true
+ delete: true
+ required:
+ - gid
+ - label
+ - description
+ - access_rights
example:
- gid: '27'
label: A user
description: A very special user
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
- gid: '1'
label: ITIS Foundation
description: The Foundation for Research on Information Technologies in Society
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
- gid: '0'
label: All
description: Open to all users
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
organizations:
type: array
items:
type: object
properties:
gid:
+ description: the group ID
type: string
label:
+ description: the group name
type: string
description:
+ description: the group description
+ type: string
+ thumbnail:
+ description: url to the group thumbnail
type: string
+ format: uri
+ access_rights:
+ description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ - read: true
+ write: false
+ delete: false
+ - read: true
+ write: true
+ delete: false
+ - read: true
+ write: true
+ delete: true
+ required:
+ - gid
+ - label
+ - description
+ - access_rights
example:
- gid: '27'
label: A user
description: A very special user
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
- gid: '1'
label: ITIS Foundation
description: The Foundation for Research on Information Technologies in Society
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
- gid: '0'
label: All
description: Open to all users
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
all:
type: object
properties:
gid:
+ description: the group ID
type: string
label:
+ description: the group name
type: string
description:
+ description: the group description
type: string
+ thumbnail:
+ description: url to the group thumbnail
+ type: string
+ format: uri
+ access_rights:
+ description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ - read: true
+ write: false
+ delete: false
+ - read: true
+ write: true
+ delete: false
+ - read: true
+ write: true
+ delete: true
+ required:
+ - gid
+ - label
+ - description
+ - access_rights
example:
- gid: '27'
label: A user
description: A very special user
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
- gid: '1'
label: ITIS Foundation
description: The Foundation for Research on Information Technologies in Society
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
- gid: '0'
label: All
description: Open to all users
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
gravatar_id:
type: string
example:
login: pcrespov@foo.com
- first_name: Pedro
- last_name: Crespo
role: Admin
gravatar_id: 205e460b479e2e5b48aec07710c08d50
error:
@@ -2290,7 +2404,6 @@ paths:
last_name:
type: string
example:
- login: pcrespov@foo.com
first_name: Pedro
last_name: Crespo
example:
@@ -2390,7 +2503,1798 @@ paths:
- user
responses:
'200':
- description: list of tokens
+ description: list of tokens
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ type: array
+ items:
+ description: api keys for third party services
+ type: object
+ properties:
+ service:
+ description: uniquely identifies the service where this token is used
+ type: string
+ token_key:
+ description: basic token key
+ type: string
+ format: uuid
+ token_secret:
+ type: string
+ format: uuid
+ required:
+ - service
+ - token_key
+ example:
+ service: github-api-v1
+ token_key: N1BP5ZSpB
+ error:
+ nullable: true
+ default: null
+ default:
+ description: Default http error response body
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - error
+ properties:
+ data:
+ nullable: true
+ default: null
+ error:
+ type: object
+ nullable: true
+ properties:
+ logs:
+ description: log messages
+ type: array
+ items:
+ type: object
+ properties:
+ level:
+ description: log level
+ type: string
+ default: INFO
+ enum:
+ - DEBUG
+ - WARNING
+ - INFO
+ - ERROR
+ message:
+ description: 'log message. If logger is USER, then it MUST be human readable'
+ type: string
+ logger:
+ description: name of the logger receiving this message
+ type: string
+ required:
+ - message
+ example:
+ message: 'Hi there, Mr user'
+ level: INFO
+ logger: user-logger
+ errors:
+ description: errors metadata
+ type: array
+ items:
+ type: object
+ required:
+ - code
+ - message
+ properties:
+ code:
+ type: string
+ description: Typically the name of the exception that produced it otherwise some known error code
+ message:
+ type: string
+ description: Error message specific to this item
+ resource:
+ type: string
+ description: API resource affected by this error
+ field:
+ type: string
+ description: Specific field within the resource
+ status:
+ description: HTTP error code
+ type: integer
+ example:
+ BadRequestError:
+ logs:
+ - message: Requested information is incomplete or malformed
+ level: ERROR
+ - message: Invalid email and password
+ level: ERROR
+ logger: USER
+ errors:
+ - code: InvalidEmail
+ message: Email is malformed
+ field: email
+ - code: UnsavePassword
+ message: Password is not secure
+ field: pasword
+ status: 400
+ post:
+ summary: Create tokens
+ operationId: create_tokens
+ tags:
+ - user
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ description: api keys for third party services
+ type: object
+ properties:
+ service:
+ description: uniquely identifies the service where this token is used
+ type: string
+ token_key:
+ description: basic token key
+ type: string
+ format: uuid
+ token_secret:
+ type: string
+ format: uuid
+ required:
+ - service
+ - token_key
+ example:
+ service: github-api-v1
+ token_key: N1BP5ZSpB
+ error:
+ nullable: true
+ default: null
+ responses:
+ '201':
+ description: token created
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ description: api keys for third party services
+ type: object
+ properties:
+ service:
+ description: uniquely identifies the service where this token is used
+ type: string
+ token_key:
+ description: basic token key
+ type: string
+ format: uuid
+ token_secret:
+ type: string
+ format: uuid
+ required:
+ - service
+ - token_key
+ example:
+ service: github-api-v1
+ token_key: N1BP5ZSpB
+ error:
+ nullable: true
+ default: null
+ default:
+ description: Default http error response body
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - error
+ properties:
+ data:
+ nullable: true
+ default: null
+ error:
+ type: object
+ nullable: true
+ properties:
+ logs:
+ description: log messages
+ type: array
+ items:
+ type: object
+ properties:
+ level:
+ description: log level
+ type: string
+ default: INFO
+ enum:
+ - DEBUG
+ - WARNING
+ - INFO
+ - ERROR
+ message:
+ description: 'log message. If logger is USER, then it MUST be human readable'
+ type: string
+ logger:
+ description: name of the logger receiving this message
+ type: string
+ required:
+ - message
+ example:
+ message: 'Hi there, Mr user'
+ level: INFO
+ logger: user-logger
+ errors:
+ description: errors metadata
+ type: array
+ items:
+ type: object
+ required:
+ - code
+ - message
+ properties:
+ code:
+ type: string
+ description: Typically the name of the exception that produced it otherwise some known error code
+ message:
+ type: string
+ description: Error message specific to this item
+ resource:
+ type: string
+ description: API resource affected by this error
+ field:
+ type: string
+ description: Specific field within the resource
+ status:
+ description: HTTP error code
+ type: integer
+ example:
+ BadRequestError:
+ logs:
+ - message: Requested information is incomplete or malformed
+ level: ERROR
+ - message: Invalid email and password
+ level: ERROR
+ logger: USER
+ errors:
+ - code: InvalidEmail
+ message: Email is malformed
+ field: email
+ - code: UnsavePassword
+ message: Password is not secure
+ field: pasword
+ status: 400
+ '/me/tokens/{service}':
+ parameters:
+ - name: service
+ in: path
+ required: true
+ schema:
+ type: string
+ get:
+ summary: Gets specific token
+ operationId: get_token
+ tags:
+ - user
+ responses:
+ '200':
+ description: got detailed token
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ description: api keys for third party services
+ type: object
+ properties:
+ service:
+ description: uniquely identifies the service where this token is used
+ type: string
+ token_key:
+ description: basic token key
+ type: string
+ format: uuid
+ token_secret:
+ type: string
+ format: uuid
+ required:
+ - service
+ - token_key
+ example:
+ service: github-api-v1
+ token_key: N1BP5ZSpB
+ error:
+ nullable: true
+ default: null
+ put:
+ summary: Updates token
+ operationId: update_token
+ tags:
+ - user
+ responses:
+ '204':
+ description: token has been successfully updated
+ delete:
+ summary: Delete token
+ operationId: delete_token
+ tags:
+ - user
+ responses:
+ '204':
+ description: token has been successfully deleted
+ /groups:
+ get:
+ summary: List my groups
+ operationId: list_groups
+ tags:
+ - group
+ responses:
+ '200':
+ description: list of the groups I belonged to
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ type: object
+ properties:
+ me:
+ type: object
+ properties:
+ gid:
+ description: the group ID
+ type: string
+ label:
+ description: the group name
+ type: string
+ description:
+ description: the group description
+ type: string
+ thumbnail:
+ description: url to the group thumbnail
+ type: string
+ format: uri
+ access_rights:
+ description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ - read: true
+ write: false
+ delete: false
+ - read: true
+ write: true
+ delete: false
+ - read: true
+ write: true
+ delete: true
+ required:
+ - gid
+ - label
+ - description
+ - access_rights
+ example:
+ - gid: '27'
+ label: A user
+ description: A very special user
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ - gid: '1'
+ label: ITIS Foundation
+ description: The Foundation for Research on Information Technologies in Society
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ - gid: '0'
+ label: All
+ description: Open to all users
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ organizations:
+ type: array
+ items:
+ type: object
+ properties:
+ gid:
+ description: the group ID
+ type: string
+ label:
+ description: the group name
+ type: string
+ description:
+ description: the group description
+ type: string
+ thumbnail:
+ description: url to the group thumbnail
+ type: string
+ format: uri
+ access_rights:
+ description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ - read: true
+ write: false
+ delete: false
+ - read: true
+ write: true
+ delete: false
+ - read: true
+ write: true
+ delete: true
+ required:
+ - gid
+ - label
+ - description
+ - access_rights
+ example:
+ - gid: '27'
+ label: A user
+ description: A very special user
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ - gid: '1'
+ label: ITIS Foundation
+ description: The Foundation for Research on Information Technologies in Society
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ - gid: '0'
+ label: All
+ description: Open to all users
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ all:
+ type: object
+ properties:
+ gid:
+ description: the group ID
+ type: string
+ label:
+ description: the group name
+ type: string
+ description:
+ description: the group description
+ type: string
+ thumbnail:
+ description: url to the group thumbnail
+ type: string
+ format: uri
+ access_rights:
+ description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ - read: true
+ write: false
+ delete: false
+ - read: true
+ write: true
+ delete: false
+ - read: true
+ write: true
+ delete: true
+ required:
+ - gid
+ - label
+ - description
+ - access_rights
+ example:
+ - gid: '27'
+ label: A user
+ description: A very special user
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ - gid: '1'
+ label: ITIS Foundation
+ description: The Foundation for Research on Information Technologies in Society
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ - gid: '0'
+ label: All
+ description: Open to all users
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ error:
+ nullable: true
+ default: null
+ default:
+ description: Default http error response body
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - error
+ properties:
+ data:
+ nullable: true
+ default: null
+ error:
+ type: object
+ nullable: true
+ properties:
+ logs:
+ description: log messages
+ type: array
+ items:
+ type: object
+ properties:
+ level:
+ description: log level
+ type: string
+ default: INFO
+ enum:
+ - DEBUG
+ - WARNING
+ - INFO
+ - ERROR
+ message:
+ description: 'log message. If logger is USER, then it MUST be human readable'
+ type: string
+ logger:
+ description: name of the logger receiving this message
+ type: string
+ required:
+ - message
+ example:
+ message: 'Hi there, Mr user'
+ level: INFO
+ logger: user-logger
+ errors:
+ description: errors metadata
+ type: array
+ items:
+ type: object
+ required:
+ - code
+ - message
+ properties:
+ code:
+ type: string
+ description: Typically the name of the exception that produced it otherwise some known error code
+ message:
+ type: string
+ description: Error message specific to this item
+ resource:
+ type: string
+ description: API resource affected by this error
+ field:
+ type: string
+ description: Specific field within the resource
+ status:
+ description: HTTP error code
+ type: integer
+ example:
+ BadRequestError:
+ logs:
+ - message: Requested information is incomplete or malformed
+ level: ERROR
+ - message: Invalid email and password
+ level: ERROR
+ logger: USER
+ errors:
+ - code: InvalidEmail
+ message: Email is malformed
+ field: email
+ - code: UnsavePassword
+ message: Password is not secure
+ field: pasword
+ status: 400
+ post:
+ summary: Create a new group
+ operationId: create_group
+ tags:
+ - group
+ requestBody:
+ required: true
+ description: the group to create
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ gid:
+ description: the group ID
+ type: string
+ label:
+ description: the group name
+ type: string
+ description:
+ description: the group description
+ type: string
+ thumbnail:
+ description: url to the group thumbnail
+ type: string
+ format: uri
+ access_rights:
+ description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ - read: true
+ write: false
+ delete: false
+ - read: true
+ write: true
+ delete: false
+ - read: true
+ write: true
+ delete: true
+ required:
+ - gid
+ - label
+ - description
+ - access_rights
+ example:
+ - gid: '27'
+ label: A user
+ description: A very special user
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ - gid: '1'
+ label: ITIS Foundation
+ description: The Foundation for Research on Information Technologies in Society
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ - gid: '0'
+ label: All
+ description: Open to all users
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ responses:
+ '201':
+ description: group created
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ type: object
+ properties:
+ gid:
+ description: the group ID
+ type: string
+ label:
+ description: the group name
+ type: string
+ description:
+ description: the group description
+ type: string
+ thumbnail:
+ description: url to the group thumbnail
+ type: string
+ format: uri
+ access_rights:
+ description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ - read: true
+ write: false
+ delete: false
+ - read: true
+ write: true
+ delete: false
+ - read: true
+ write: true
+ delete: true
+ required:
+ - gid
+ - label
+ - description
+ - access_rights
+ example:
+ - gid: '27'
+ label: A user
+ description: A very special user
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ - gid: '1'
+ label: ITIS Foundation
+ description: The Foundation for Research on Information Technologies in Society
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ - gid: '0'
+ label: All
+ description: Open to all users
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ error:
+ nullable: true
+ default: null
+ default:
+ description: Default http error response body
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - error
+ properties:
+ data:
+ nullable: true
+ default: null
+ error:
+ type: object
+ nullable: true
+ properties:
+ logs:
+ description: log messages
+ type: array
+ items:
+ type: object
+ properties:
+ level:
+ description: log level
+ type: string
+ default: INFO
+ enum:
+ - DEBUG
+ - WARNING
+ - INFO
+ - ERROR
+ message:
+ description: 'log message. If logger is USER, then it MUST be human readable'
+ type: string
+ logger:
+ description: name of the logger receiving this message
+ type: string
+ required:
+ - message
+ example:
+ message: 'Hi there, Mr user'
+ level: INFO
+ logger: user-logger
+ errors:
+ description: errors metadata
+ type: array
+ items:
+ type: object
+ required:
+ - code
+ - message
+ properties:
+ code:
+ type: string
+ description: Typically the name of the exception that produced it otherwise some known error code
+ message:
+ type: string
+ description: Error message specific to this item
+ resource:
+ type: string
+ description: API resource affected by this error
+ field:
+ type: string
+ description: Specific field within the resource
+ status:
+ description: HTTP error code
+ type: integer
+ example:
+ BadRequestError:
+ logs:
+ - message: Requested information is incomplete or malformed
+ level: ERROR
+ - message: Invalid email and password
+ level: ERROR
+ logger: USER
+ errors:
+ - code: InvalidEmail
+ message: Email is malformed
+ field: email
+ - code: UnsavePassword
+ message: Password is not secure
+ field: pasword
+ status: 400
+ '/groups/{gid}':
+ parameters:
+ - name: gid
+ in: path
+ required: true
+ schema:
+ type: string
+ get:
+ tags:
+ - group
+ summary: Gets one group details
+ operationId: get_group
+ responses:
+ '200':
+ description: got group
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ type: object
+ properties:
+ gid:
+ description: the group ID
+ type: string
+ label:
+ description: the group name
+ type: string
+ description:
+ description: the group description
+ type: string
+ thumbnail:
+ description: url to the group thumbnail
+ type: string
+ format: uri
+ access_rights:
+ description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ - read: true
+ write: false
+ delete: false
+ - read: true
+ write: true
+ delete: false
+ - read: true
+ write: true
+ delete: true
+ required:
+ - gid
+ - label
+ - description
+ - access_rights
+ example:
+ - gid: '27'
+ label: A user
+ description: A very special user
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ - gid: '1'
+ label: ITIS Foundation
+ description: The Foundation for Research on Information Technologies in Society
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ - gid: '0'
+ label: All
+ description: Open to all users
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ error:
+ nullable: true
+ default: null
+ default:
+ description: Default http error response body
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - error
+ properties:
+ data:
+ nullable: true
+ default: null
+ error:
+ type: object
+ nullable: true
+ properties:
+ logs:
+ description: log messages
+ type: array
+ items:
+ type: object
+ properties:
+ level:
+ description: log level
+ type: string
+ default: INFO
+ enum:
+ - DEBUG
+ - WARNING
+ - INFO
+ - ERROR
+ message:
+ description: 'log message. If logger is USER, then it MUST be human readable'
+ type: string
+ logger:
+ description: name of the logger receiving this message
+ type: string
+ required:
+ - message
+ example:
+ message: 'Hi there, Mr user'
+ level: INFO
+ logger: user-logger
+ errors:
+ description: errors metadata
+ type: array
+ items:
+ type: object
+ required:
+ - code
+ - message
+ properties:
+ code:
+ type: string
+ description: Typically the name of the exception that produced it otherwise some known error code
+ message:
+ type: string
+ description: Error message specific to this item
+ resource:
+ type: string
+ description: API resource affected by this error
+ field:
+ type: string
+ description: Specific field within the resource
+ status:
+ description: HTTP error code
+ type: integer
+ example:
+ BadRequestError:
+ logs:
+ - message: Requested information is incomplete or malformed
+ level: ERROR
+ - message: Invalid email and password
+ level: ERROR
+ logger: USER
+ errors:
+ - code: InvalidEmail
+ message: Email is malformed
+ field: email
+ - code: UnsavePassword
+ message: Password is not secure
+ field: pasword
+ status: 400
+ patch:
+ summary: Update one group
+ operationId: update_group
+ tags:
+ - group
+ requestBody:
+ required: true
+ description: the group to update
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ gid:
+ description: the group ID
+ type: string
+ label:
+ description: the group name
+ type: string
+ description:
+ description: the group description
+ type: string
+ thumbnail:
+ description: url to the group thumbnail
+ type: string
+ format: uri
+ access_rights:
+ description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ - read: true
+ write: false
+ delete: false
+ - read: true
+ write: true
+ delete: false
+ - read: true
+ write: true
+ delete: true
+ required:
+ - gid
+ - label
+ - description
+ - access_rights
+ example:
+ - gid: '27'
+ label: A user
+ description: A very special user
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ - gid: '1'
+ label: ITIS Foundation
+ description: The Foundation for Research on Information Technologies in Society
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ - gid: '0'
+ label: All
+ description: Open to all users
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ responses:
+ '200':
+ description: the modified group
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ type: object
+ properties:
+ gid:
+ description: the group ID
+ type: string
+ label:
+ description: the group name
+ type: string
+ description:
+ description: the group description
+ type: string
+ thumbnail:
+ description: url to the group thumbnail
+ type: string
+ format: uri
+ access_rights:
+ description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ - read: true
+ write: false
+ delete: false
+ - read: true
+ write: true
+ delete: false
+ - read: true
+ write: true
+ delete: true
+ required:
+ - gid
+ - label
+ - description
+ - access_rights
+ example:
+ - gid: '27'
+ label: A user
+ description: A very special user
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ - gid: '1'
+ label: ITIS Foundation
+ description: The Foundation for Research on Information Technologies in Society
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ - gid: '0'
+ label: All
+ description: Open to all users
+ thumbnail: 'https://user-images.githubusercontent.com/32800795/61083844-ff48fb00-a42c-11e9-8e63-fa2d709c8baf.png'
+ error:
+ nullable: true
+ default: null
+ default:
+ description: Default http error response body
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - error
+ properties:
+ data:
+ nullable: true
+ default: null
+ error:
+ type: object
+ nullable: true
+ properties:
+ logs:
+ description: log messages
+ type: array
+ items:
+ type: object
+ properties:
+ level:
+ description: log level
+ type: string
+ default: INFO
+ enum:
+ - DEBUG
+ - WARNING
+ - INFO
+ - ERROR
+ message:
+ description: 'log message. If logger is USER, then it MUST be human readable'
+ type: string
+ logger:
+ description: name of the logger receiving this message
+ type: string
+ required:
+ - message
+ example:
+ message: 'Hi there, Mr user'
+ level: INFO
+ logger: user-logger
+ errors:
+ description: errors metadata
+ type: array
+ items:
+ type: object
+ required:
+ - code
+ - message
+ properties:
+ code:
+ type: string
+ description: Typically the name of the exception that produced it otherwise some known error code
+ message:
+ type: string
+ description: Error message specific to this item
+ resource:
+ type: string
+ description: API resource affected by this error
+ field:
+ type: string
+ description: Specific field within the resource
+ status:
+ description: HTTP error code
+ type: integer
+ example:
+ BadRequestError:
+ logs:
+ - message: Requested information is incomplete or malformed
+ level: ERROR
+ - message: Invalid email and password
+ level: ERROR
+ logger: USER
+ errors:
+ - code: InvalidEmail
+ message: Email is malformed
+ field: email
+ - code: UnsavePassword
+ message: Password is not secure
+ field: pasword
+ status: 400
+ delete:
+ tags:
+ - group
+ summary: Deletes one group
+ operationId: delete_group
+ responses:
+ '204':
+ description: group has been successfully deleted
+ default:
+ description: Default http error response body
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - error
+ properties:
+ data:
+ nullable: true
+ default: null
+ error:
+ type: object
+ nullable: true
+ properties:
+ logs:
+ description: log messages
+ type: array
+ items:
+ type: object
+ properties:
+ level:
+ description: log level
+ type: string
+ default: INFO
+ enum:
+ - DEBUG
+ - WARNING
+ - INFO
+ - ERROR
+ message:
+ description: 'log message. If logger is USER, then it MUST be human readable'
+ type: string
+ logger:
+ description: name of the logger receiving this message
+ type: string
+ required:
+ - message
+ example:
+ message: 'Hi there, Mr user'
+ level: INFO
+ logger: user-logger
+ errors:
+ description: errors metadata
+ type: array
+ items:
+ type: object
+ required:
+ - code
+ - message
+ properties:
+ code:
+ type: string
+ description: Typically the name of the exception that produced it otherwise some known error code
+ message:
+ type: string
+ description: Error message specific to this item
+ resource:
+ type: string
+ description: API resource affected by this error
+ field:
+ type: string
+ description: Specific field within the resource
+ status:
+ description: HTTP error code
+ type: integer
+ example:
+ BadRequestError:
+ logs:
+ - message: Requested information is incomplete or malformed
+ level: ERROR
+ - message: Invalid email and password
+ level: ERROR
+ logger: USER
+ errors:
+ - code: InvalidEmail
+ message: Email is malformed
+ field: email
+ - code: UnsavePassword
+ message: Password is not secure
+ field: pasword
+ status: 400
+ '/groups/{gid}/users':
+ parameters:
+ - name: gid
+ in: path
+ required: true
+ schema:
+ type: string
+ get:
+ tags:
+ - group
+ summary: Gets list of users in group
+ operationId: get_group_users
+ responses:
+ '200':
+ description: got list of users and their respective rights
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ type: array
+ items:
+ type: object
+ allOf:
+ - type: object
+ properties:
+ first_name:
+ type: string
+ description: the user first name
+ last_name:
+ type: string
+ description: the user last name
+ login:
+ type: string
+ format: email
+ description: the user login email
+ gravatar_id:
+ type: string
+ description: the user gravatar id hash
+ id:
+ type: string
+ description: the user id
+ gid:
+ type: string
+ description: the user primary gid
+ example:
+ first_name: Mr
+ last_name: Smith
+ login: mr.smith@matrix.com
+ gravatar_id: a1af5c6ecc38e81f29695f01d6ceb540
+ id: '1'
+ gid: '3'
+ - description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ - read: true
+ write: false
+ delete: false
+ - read: true
+ write: true
+ delete: false
+ - read: true
+ write: true
+ delete: true
+ error:
+ nullable: true
+ default: null
+ default:
+ description: Default http error response body
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - error
+ properties:
+ data:
+ nullable: true
+ default: null
+ error:
+ type: object
+ nullable: true
+ properties:
+ logs:
+ description: log messages
+ type: array
+ items:
+ type: object
+ properties:
+ level:
+ description: log level
+ type: string
+ default: INFO
+ enum:
+ - DEBUG
+ - WARNING
+ - INFO
+ - ERROR
+ message:
+ description: 'log message. If logger is USER, then it MUST be human readable'
+ type: string
+ logger:
+ description: name of the logger receiving this message
+ type: string
+ required:
+ - message
+ example:
+ message: 'Hi there, Mr user'
+ level: INFO
+ logger: user-logger
+ errors:
+ description: errors metadata
+ type: array
+ items:
+ type: object
+ required:
+ - code
+ - message
+ properties:
+ code:
+ type: string
+ description: Typically the name of the exception that produced it otherwise some known error code
+ message:
+ type: string
+ description: Error message specific to this item
+ resource:
+ type: string
+ description: API resource affected by this error
+ field:
+ type: string
+ description: Specific field within the resource
+ status:
+ description: HTTP error code
+ type: integer
+ example:
+ BadRequestError:
+ logs:
+ - message: Requested information is incomplete or malformed
+ level: ERROR
+ - message: Invalid email and password
+ level: ERROR
+ logger: USER
+ errors:
+ - code: InvalidEmail
+ message: Email is malformed
+ field: email
+ - code: UnsavePassword
+ message: Password is not secure
+ field: pasword
+ status: 400
+ post:
+ tags:
+ - group
+ summary: Adds a user in the group
+ operationId: add_group_user
+ requestBody:
+ required: true
+ description: the user to add
+ content:
+ application/json:
+ schema:
+ anyOf:
+ - type: object
+ required:
+ - uid
+ properties:
+ uid:
+ type: string
+ description: the user id
+ - type: object
+ required:
+ - email
+ properties:
+ email:
+ type: string
+ format: email
+ description: the user email
+ responses:
+ '204':
+ description: user successfully added
+ default:
+ description: Default http error response body
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - error
+ properties:
+ data:
+ nullable: true
+ default: null
+ error:
+ type: object
+ nullable: true
+ properties:
+ logs:
+ description: log messages
+ type: array
+ items:
+ type: object
+ properties:
+ level:
+ description: log level
+ type: string
+ default: INFO
+ enum:
+ - DEBUG
+ - WARNING
+ - INFO
+ - ERROR
+ message:
+ description: 'log message. If logger is USER, then it MUST be human readable'
+ type: string
+ logger:
+ description: name of the logger receiving this message
+ type: string
+ required:
+ - message
+ example:
+ message: 'Hi there, Mr user'
+ level: INFO
+ logger: user-logger
+ errors:
+ description: errors metadata
+ type: array
+ items:
+ type: object
+ required:
+ - code
+ - message
+ properties:
+ code:
+ type: string
+ description: Typically the name of the exception that produced it otherwise some known error code
+ message:
+ type: string
+ description: Error message specific to this item
+ resource:
+ type: string
+ description: API resource affected by this error
+ field:
+ type: string
+ description: Specific field within the resource
+ status:
+ description: HTTP error code
+ type: integer
+ example:
+ BadRequestError:
+ logs:
+ - message: Requested information is incomplete or malformed
+ level: ERROR
+ - message: Invalid email and password
+ level: ERROR
+ logger: USER
+ errors:
+ - code: InvalidEmail
+ message: Email is malformed
+ field: email
+ - code: UnsavePassword
+ message: Password is not secure
+ field: pasword
+ status: 400
+ '/groups/{gid}/users/{uid}':
+ parameters:
+ - name: gid
+ in: path
+ required: true
+ schema:
+ type: string
+ - name: uid
+ in: path
+ required: true
+ schema:
+ type: string
+ get:
+ tags:
+ - group
+ summary: Gets specific user in group
+ operationId: get_group_user
+ responses:
+ '200':
+ description: got user
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ type: object
+ allOf:
+ - type: object
+ properties:
+ first_name:
+ type: string
+ description: the user first name
+ last_name:
+ type: string
+ description: the user last name
+ login:
+ type: string
+ format: email
+ description: the user login email
+ gravatar_id:
+ type: string
+ description: the user gravatar id hash
+ id:
+ type: string
+ description: the user id
+ gid:
+ type: string
+ description: the user primary gid
+ example:
+ first_name: Mr
+ last_name: Smith
+ login: mr.smith@matrix.com
+ gravatar_id: a1af5c6ecc38e81f29695f01d6ceb540
+ id: '1'
+ gid: '3'
+ - description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ - read: true
+ write: false
+ delete: false
+ - read: true
+ write: true
+ delete: false
+ - read: true
+ write: true
+ delete: true
+ error:
+ nullable: true
+ default: null
+ default:
+ description: Default http error response body
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - error
+ properties:
+ data:
+ nullable: true
+ default: null
+ error:
+ type: object
+ nullable: true
+ properties:
+ logs:
+ description: log messages
+ type: array
+ items:
+ type: object
+ properties:
+ level:
+ description: log level
+ type: string
+ default: INFO
+ enum:
+ - DEBUG
+ - WARNING
+ - INFO
+ - ERROR
+ message:
+ description: 'log message. If logger is USER, then it MUST be human readable'
+ type: string
+ logger:
+ description: name of the logger receiving this message
+ type: string
+ required:
+ - message
+ example:
+ message: 'Hi there, Mr user'
+ level: INFO
+ logger: user-logger
+ errors:
+ description: errors metadata
+ type: array
+ items:
+ type: object
+ required:
+ - code
+ - message
+ properties:
+ code:
+ type: string
+ description: Typically the name of the exception that produced it otherwise some known error code
+ message:
+ type: string
+ description: Error message specific to this item
+ resource:
+ type: string
+ description: API resource affected by this error
+ field:
+ type: string
+ description: Specific field within the resource
+ status:
+ description: HTTP error code
+ type: integer
+ example:
+ BadRequestError:
+ logs:
+ - message: Requested information is incomplete or malformed
+ level: ERROR
+ - message: Invalid email and password
+ level: ERROR
+ logger: USER
+ errors:
+ - code: InvalidEmail
+ message: Email is malformed
+ field: email
+ - code: UnsavePassword
+ message: Password is not secure
+ field: pasword
+ status: 400
+ patch:
+ tags:
+ - group
+ summary: Modify specific user in group
+ operationId: update_group_user
+ requestBody:
+ required: true
+ description: the user rights to modify
+ content:
+ application/json:
+ schema:
+ description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ - read: true
+ write: false
+ delete: false
+ - read: true
+ write: true
+ delete: false
+ - read: true
+ write: true
+ delete: true
+ responses:
+ '200':
+ description: modified user
content:
application/json:
schema:
@@ -2399,27 +4303,59 @@ paths:
- data
properties:
data:
- type: array
- items:
- description: api keys for third party services
- type: object
- properties:
- service:
- description: uniquely identifies the service where this token is used
- type: string
- token_key:
- description: basic token key
- type: string
- format: uuid
- token_secret:
- type: string
- format: uuid
- required:
- - service
- - token_key
- example:
- service: github-api-v1
- token_key: N1BP5ZSpB
+ type: object
+ allOf:
+ - type: object
+ properties:
+ first_name:
+ type: string
+ description: the user first name
+ last_name:
+ type: string
+ description: the user last name
+ login:
+ type: string
+ format: email
+ description: the user login email
+ gravatar_id:
+ type: string
+ description: the user gravatar id hash
+ id:
+ type: string
+ description: the user id
+ gid:
+ type: string
+ description: the user primary gid
+ example:
+ first_name: Mr
+ last_name: Smith
+ login: mr.smith@matrix.com
+ gravatar_id: a1af5c6ecc38e81f29695f01d6ceb540
+ id: '1'
+ gid: '3'
+ - description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ - read: true
+ write: false
+ delete: false
+ - read: true
+ write: true
+ delete: false
+ - read: true
+ write: true
+ delete: true
error:
nullable: true
default: null
@@ -2506,75 +4442,14 @@ paths:
message: Password is not secure
field: pasword
status: 400
- post:
- summary: Create tokens
- operationId: create_tokens
+ delete:
tags:
- - user
- requestBody:
- content:
- application/json:
- schema:
- type: object
- required:
- - data
- properties:
- data:
- description: api keys for third party services
- type: object
- properties:
- service:
- description: uniquely identifies the service where this token is used
- type: string
- token_key:
- description: basic token key
- type: string
- format: uuid
- token_secret:
- type: string
- format: uuid
- required:
- - service
- - token_key
- example:
- service: github-api-v1
- token_key: N1BP5ZSpB
- error:
- nullable: true
- default: null
+ - group
+ summary: Delete specific user in group
+ operationId: delete_group_user
responses:
- '201':
- description: token created
- content:
- application/json:
- schema:
- type: object
- required:
- - data
- properties:
- data:
- description: api keys for third party services
- type: object
- properties:
- service:
- description: uniquely identifies the service where this token is used
- type: string
- token_key:
- description: basic token key
- type: string
- format: uuid
- token_secret:
- type: string
- format: uuid
- required:
- - service
- - token_key
- example:
- service: github-api-v1
- token_key: N1BP5ZSpB
- error:
- nullable: true
- default: null
+ '204':
+ description: successfully removed user
default:
description: Default http error response body
content:
@@ -2658,67 +4533,6 @@ paths:
message: Password is not secure
field: pasword
status: 400
- '/me/tokens/{service}':
- parameters:
- - name: service
- in: path
- required: true
- schema:
- type: string
- get:
- summary: Gets specific token
- operationId: get_token
- tags:
- - user
- responses:
- '200':
- description: got detailed token
- content:
- application/json:
- schema:
- type: object
- required:
- - data
- properties:
- data:
- description: api keys for third party services
- type: object
- properties:
- service:
- description: uniquely identifies the service where this token is used
- type: string
- token_key:
- description: basic token key
- type: string
- format: uuid
- token_secret:
- type: string
- format: uuid
- required:
- - service
- - token_key
- example:
- service: github-api-v1
- token_key: N1BP5ZSpB
- error:
- nullable: true
- default: null
- put:
- summary: Updates token
- operationId: update_token
- tags:
- - user
- responses:
- '204':
- description: token has been successfully updated
- delete:
- summary: Delete token
- operationId: delete_token
- tags:
- - user
- responses:
- '204':
- description: token has been successfully deleted
/storage/locations:
get:
summary: Get available storage locations
@@ -4361,6 +6175,26 @@ paths:
accessRights:
type: object
description: object containing the GroupID as key and read/write/execution permissions as value
+ additionalProperties: false
+ x-patternProperties:
+ ^\d+$:
+ type: object
+ description: the group id
+ additionalProperties: false
+ required:
+ - read
+ - write
+ - delete
+ properties:
+ read:
+ type: boolean
+ description: gives read access
+ write:
+ type: boolean
+ description: gives write access
+ delete:
+ type: boolean
+ description: gives deletion rights
creationDate:
type: string
description: project creation date
@@ -4678,6 +6512,26 @@ paths:
accessRights:
type: object
description: object containing the GroupID as key and read/write/execution permissions as value
+ additionalProperties: false
+ x-patternProperties:
+ ^\d+$:
+ type: object
+ description: the group id
+ additionalProperties: false
+ required:
+ - read
+ - write
+ - delete
+ properties:
+ read:
+ type: boolean
+ description: gives read access
+ write:
+ type: boolean
+ description: gives write access
+ delete:
+ type: boolean
+ description: gives deletion rights
creationDate:
type: string
description: project creation date
@@ -4900,6 +6754,26 @@ paths:
accessRights:
type: object
description: object containing the GroupID as key and read/write/execution permissions as value
+ additionalProperties: false
+ x-patternProperties:
+ ^\d+$:
+ type: object
+ description: the group id
+ additionalProperties: false
+ required:
+ - read
+ - write
+ - delete
+ properties:
+ read:
+ type: boolean
+ description: gives read access
+ write:
+ type: boolean
+ description: gives write access
+ delete:
+ type: boolean
+ description: gives deletion rights
creationDate:
type: string
description: project creation date
@@ -5214,6 +7088,26 @@ paths:
accessRights:
type: object
description: object containing the GroupID as key and read/write/execution permissions as value
+ additionalProperties: false
+ x-patternProperties:
+ ^\d+$:
+ type: object
+ description: the group id
+ additionalProperties: false
+ required:
+ - read
+ - write
+ - delete
+ properties:
+ read:
+ type: boolean
+ description: gives read access
+ write:
+ type: boolean
+ description: gives write access
+ delete:
+ type: boolean
+ description: gives deletion rights
creationDate:
type: string
description: project creation date
@@ -5534,6 +7428,26 @@ paths:
accessRights:
type: object
description: object containing the GroupID as key and read/write/execution permissions as value
+ additionalProperties: false
+ x-patternProperties:
+ ^\d+$:
+ type: object
+ description: the group id
+ additionalProperties: false
+ required:
+ - read
+ - write
+ - delete
+ properties:
+ read:
+ type: boolean
+ description: gives read access
+ write:
+ type: boolean
+ description: gives write access
+ delete:
+ type: boolean
+ description: gives deletion rights
creationDate:
type: string
description: project creation date
@@ -5846,6 +7760,26 @@ paths:
accessRights:
type: object
description: object containing the GroupID as key and read/write/execution permissions as value
+ additionalProperties: false
+ x-patternProperties:
+ ^\d+$:
+ type: object
+ description: the group id
+ additionalProperties: false
+ required:
+ - read
+ - write
+ - delete
+ properties:
+ read:
+ type: boolean
+ description: gives read access
+ write:
+ type: boolean
+ description: gives write access
+ delete:
+ type: boolean
+ description: gives deletion rights
creationDate:
type: string
description: project creation date
@@ -6068,6 +8002,26 @@ paths:
accessRights:
type: object
description: object containing the GroupID as key and read/write/execution permissions as value
+ additionalProperties: false
+ x-patternProperties:
+ ^\d+$:
+ type: object
+ description: the group id
+ additionalProperties: false
+ required:
+ - read
+ - write
+ - delete
+ properties:
+ read:
+ type: boolean
+ description: gives read access
+ write:
+ type: boolean
+ description: gives write access
+ delete:
+ type: boolean
+ description: gives deletion rights
creationDate:
type: string
description: project creation date
@@ -6404,6 +8358,26 @@ paths:
accessRights:
type: object
description: object containing the GroupID as key and read/write/execution permissions as value
+ additionalProperties: false
+ x-patternProperties:
+ ^\d+$:
+ type: object
+ description: the group id
+ additionalProperties: false
+ required:
+ - read
+ - write
+ - delete
+ properties:
+ read:
+ type: boolean
+ description: gives read access
+ write:
+ type: boolean
+ description: gives write access
+ delete:
+ type: boolean
+ description: gives deletion rights
creationDate:
type: string
description: project creation date
@@ -7420,6 +9394,26 @@ paths:
accessRights:
type: object
description: object containing the GroupID as key and read/write/execution permissions as value
+ additionalProperties: false
+ x-patternProperties:
+ ^\d+$:
+ type: object
+ description: the group id
+ additionalProperties: false
+ required:
+ - read
+ - write
+ - delete
+ properties:
+ read:
+ type: boolean
+ description: gives read access
+ write:
+ type: boolean
+ description: gives write access
+ delete:
+ type: boolean
+ description: gives deletion rights
creationDate:
type: string
description: project creation date
@@ -7733,6 +9727,26 @@ paths:
accessRights:
type: object
description: object containing the GroupID as key and read/write/execution permissions as value
+ additionalProperties: false
+ x-patternProperties:
+ ^\d+$:
+ type: object
+ description: the group id
+ additionalProperties: false
+ required:
+ - read
+ - write
+ - delete
+ properties:
+ read:
+ type: boolean
+ description: gives read access
+ write:
+ type: boolean
+ description: gives write access
+ delete:
+ type: boolean
+ description: gives deletion rights
creationDate:
type: string
description: project creation date
diff --git a/services/web/server/src/simcore_service_webserver/api/v0/schemas/project-v0.0.1.json b/services/web/server/src/simcore_service_webserver/api/v0/schemas/project-v0.0.1.json
index d40e6ceb1a4..4ecd40e4248 100644
--- a/services/web/server/src/simcore_service_webserver/api/v0/schemas/project-v0.0.1.json
+++ b/services/web/server/src/simcore_service_webserver/api/v0/schemas/project-v0.0.1.json
@@ -46,7 +46,34 @@
},
"accessRights": {
"type": "object",
- "description": "object containing the GroupID as key and read/write/execution permissions as value"
+ "description": "object containing the GroupID as key and read/write/execution permissions as value",
+ "additionalProperties": false,
+ "patternProperties": {
+ "^\\d+$": {
+ "type": "object",
+ "description": "the group id",
+ "additionalProperties": false,
+ "required": [
+ "read",
+ "write",
+ "delete"
+ ],
+ "properties": {
+ "read": {
+ "type": "boolean",
+ "description": "gives read access"
+ },
+ "write": {
+ "type": "boolean",
+ "description": "gives write access"
+ },
+ "delete": {
+ "type": "boolean",
+ "description": "gives deletion rights"
+ }
+ }
+ }
+ }
},
"creationDate": {
"type": "string",
@@ -307,4 +334,4 @@
}
}
}
-}
\ No newline at end of file
+}
diff --git a/services/web/server/src/simcore_service_webserver/application.py b/services/web/server/src/simcore_service_webserver/application.py
index be91bc0eced..ee1e8e1f05b 100644
--- a/services/web/server/src/simcore_service_webserver/application.py
+++ b/services/web/server/src/simcore_service_webserver/application.py
@@ -9,7 +9,6 @@
from servicelib.application import create_safe_application
-
from .activity import setup_activity
from .application_proxy import setup_app_proxy
from .catalog import setup_catalog
@@ -18,8 +17,10 @@
from .diagnostics_plugin import setup_diagnostics
from .director import setup_director
from .email import setup_email
+from .groups import setup_groups
from .login import setup_login
from .projects import setup_projects
+from .publications import setup_publications
from .resource_manager import setup_resource_manager
from .rest import setup_rest
from .security import setup_security
@@ -31,7 +32,6 @@
from .tags import setup_tags
from .tracing import setup_app_tracing
from .users import setup_users
-from .publications import setup_publications
log = logging.getLogger(__name__)
@@ -63,6 +63,7 @@ def create_application(config: Dict) -> web.Application:
setup_director(app)
setup_storage(app)
setup_users(app)
+ setup_groups(app)
setup_projects(app) # needs storage
setup_studies_access(app)
setup_activity(app)
diff --git a/services/web/server/src/simcore_service_webserver/application_config.py b/services/web/server/src/simcore_service_webserver/application_config.py
index 706373f86f2..1c3698eb905 100644
--- a/services/web/server/src/simcore_service_webserver/application_config.py
+++ b/services/web/server/src/simcore_service_webserver/application_config.py
@@ -91,6 +91,7 @@ def create_schema() -> T.Dict:
addon_section("reverse_proxy", optional=True): minimal_addon_schema(),
addon_section("application_proxy", optional=True): minimal_addon_schema(),
addon_section("users", optional=True): minimal_addon_schema(),
+ addon_section("groups", optional=True): minimal_addon_schema(),
addon_section("studies_access", optional=True): minimal_addon_schema(),
addon_section("tags", optional=True): minimal_addon_schema(),
addon_section("publications", optional=True): minimal_addon_schema(),
diff --git a/services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py b/services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py
index 8f203345110..f5ae1349a91 100644
--- a/services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py
+++ b/services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py
@@ -9,12 +9,15 @@
from aiohttp import web
from aiopg.sa import Engine
+from aiopg.sa.result import RowProxy
from sqlalchemy.sql import select
from servicelib.application_keys import APP_DB_ENGINE_KEY
+from servicelib.utils import logged_gather
+from simcore_postgres_database.webserver_models import user_to_groups
from .projects import projects_api, projects_exceptions
-from .projects.projects_models import projects, user_to_projects
+from .projects.projects_models import projects
from .socketio.events import post_messages
log = logging.getLogger(__name__)
@@ -78,28 +81,55 @@ async def listen(app: web.Application):
task_output = node_data["outputs"]
node_id = node_data["node_id"]
project_id = node_data["project_id"]
+ # FIXME: we do not know who triggered these changes. we assume the user had the rights to do so
+ # therefore we'll use the prj_owner user id. This should be fixed when the new sidecar comes in
+ # and comp_tasks/comp_pipeline get deprecated.
+
# find the user(s) linked to that project
- joint_table = user_to_projects.join(projects)
- query = (
- select([user_to_projects])
- .select_from(joint_table)
- .where(projects.c.uuid == project_id)
+ result = await conn.execute(
+ select([projects]).where(projects.c.uuid == project_id)
+ )
+ the_project: RowProxy = result.fetchone()
+ if not the_project:
+ log.warning(
+ "Project %s was not found and cannot be updated", project_id
+ )
+ continue
+ the_project_owner = the_project["prj_owner"]
+
+ # update the project
+ try:
+ node_data = await projects_api.update_project_node_outputs(
+ app, the_project_owner, project_id, node_id, data=task_output
+ )
+ except projects_exceptions.ProjectNotFoundError:
+ log.warning(
+ "Project %s was not found and cannot be updated", project_id
+ )
+ continue
+ except projects_exceptions.NodeNotFoundError:
+ log.warning(
+ "Node %s ib project %s not found and cannot be updated",
+ node_id,
+ project_id,
+ )
+ continue
+ # notify the client(s), the owner + any one with read writes
+ clients = [the_project_owner]
+ for gid, access_rights in the_project["access_rights"].items():
+ if not access_rights["read"]:
+ continue
+ # let's get the users in that group
+ async for user in conn.execute(
+ select([user_to_groups.c.uid]).where(user_to_groups.c.gid == gid)
+ ):
+ clients.append(user["uid"])
+
+ messages = {"nodeUpdated": {"Node": node_id, "Data": node_data}}
+
+ await logged_gather(
+ *[post_messages(app, client, messages) for client in clients], False
)
- async for row in conn.execute(query):
- user_id = row["user_id"]
- try:
- node_data = await projects_api.update_project_node_outputs(
- app, user_id, project_id, node_id, data=task_output
- )
- except projects_exceptions.ProjectNotFoundError:
- log.exception("Project %s not found", project_id)
- except projects_exceptions.NodeNotFoundError:
- log.exception(
- "Node %s ib project %s not found", node_id, project_id
- )
-
- messages = {"nodeUpdated": {"Node": node_id, "Data": node_data}}
- await post_messages(app, user_id, messages)
async def comp_tasks_listening_task(app: web.Application) -> None:
diff --git a/services/web/server/src/simcore_service_webserver/computation_handlers.py b/services/web/server/src/simcore_service_webserver/computation_handlers.py
index 8340e8eaabe..1c74009dae7 100644
--- a/services/web/server/src/simcore_service_webserver/computation_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/computation_handlers.py
@@ -83,7 +83,7 @@ async def start_pipeline(request: web.Request) -> web.Response:
log.debug(
"Task (user_id=%s, project_id=%s) submitted for execution.", user_id, project_id
)
-
+
# answer the client while task has been spawned
data = {
# TODO: PC->SAN: some name with task id. e.g. to distinguish two projects with identical pipeline?
diff --git a/services/web/server/src/simcore_service_webserver/computation_subscribe.py b/services/web/server/src/simcore_service_webserver/computation_subscribe.py
index b00ac056f62..837a08cdafa 100644
--- a/services/web/server/src/simcore_service_webserver/computation_subscribe.py
+++ b/services/web/server/src/simcore_service_webserver/computation_subscribe.py
@@ -81,9 +81,13 @@ async def instrumentation_message_handler(
) -> None:
data = json.loads(message.body)
if data["metrics"] == "service_started":
- service_started(app, **{key:value for key, value in data.items() if key != "metrics"})
+ service_started(
+ app, **{key: value for key, value in data.items() if key != "metrics"}
+ )
elif data["metrics"] == "service_stopped":
- service_stopped(app, **{key:value for key, value in data.items() if key != "metrics"})
+ service_stopped(
+ app, **{key: value for key, value in data.items() if key != "metrics"}
+ )
await message.ack()
diff --git a/services/web/server/src/simcore_service_webserver/data/fake-template-projects.isan.json b/services/web/server/src/simcore_service_webserver/data/fake-template-projects.isan.json
index 66666f04841..178ae71fcf7 100644
--- a/services/web/server/src/simcore_service_webserver/data/fake-template-projects.isan.json
+++ b/services/web/server/src/simcore_service_webserver/data/fake-template-projects.isan.json
@@ -7,6 +7,7 @@
"prjOwner": "maiz",
"creationDate": "2019-05-24T10:36:57.813Z",
"lastChangeDate": "2019-05-24T11:36:12.015Z",
+ "tags": [],
"accessRights": {},
"workbench": {
"template-uuid-48eb-a9d2-aaad6b72400a": {
@@ -59,6 +60,7 @@
"prjOwner": "maiz",
"creationDate": "2019-05-24T10:36:57.813Z",
"lastChangeDate": " 2019-05-24T10:38:12.888Z",
+ "tags": [],
"accessRights": {},
"workbench": {
"template-uuid-403e-865a-8c5ca30671c6": {
@@ -135,6 +137,7 @@
"prjOwner": "MattWard",
"creationDate": "2019-04-30T08:52:20.937Z",
"lastChangeDate": "2019-04-30T08:59:26.090Z",
+ "tags": [],
"accessRights": {},
"workbench": {
"template-uuid-4021-b2ef-b2e163bfbd16": {
@@ -161,6 +164,7 @@
"prjOwner": "Colleen Clancy",
"creationDate": "2018-10-22T09:13:13.360Z",
"lastChangeDate": "2018-10-22T09:33:41.858Z",
+ "tags": [],
"accessRights": {},
"workbench": {
"template-uuid-4674-b758-946151cae351": {
@@ -242,4 +246,4 @@
}
}
}
-]
\ No newline at end of file
+]
diff --git a/services/web/server/src/simcore_service_webserver/groups.py b/services/web/server/src/simcore_service_webserver/groups.py
new file mode 100644
index 00000000000..e447dcb5af9
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/groups.py
@@ -0,0 +1,42 @@
+""" users management subsystem
+
+"""
+import logging
+
+from aiohttp import web
+
+from servicelib.application_setup import ModuleCategory, app_module_setup
+from servicelib.rest_routing import (
+ get_handlers_from_namespace,
+ iter_path_operations,
+ map_handlers_with_operations,
+)
+
+from . import groups_handlers
+from .rest_config import APP_OPENAPI_SPECS_KEY
+
+logger = logging.getLogger(__name__)
+
+
+@app_module_setup(
+ __name__,
+ ModuleCategory.ADDON,
+ depends=["simcore_service_webserver.rest", "simcore_service_webserver.users"],
+ logger=logger,
+)
+def setup(app: web.Application):
+
+ # routes
+ specs = app[APP_OPENAPI_SPECS_KEY]
+ routes = map_handlers_with_operations(
+ get_handlers_from_namespace(groups_handlers),
+ filter(lambda o: "groups" in o[1].split("/"), iter_path_operations(specs)),
+ strict=True,
+ )
+ app.router.add_routes(routes)
+
+
+# alias
+setup_groups = setup
+
+__all__ = "setup_groups"
diff --git a/services/web/server/src/simcore_service_webserver/groups_api.py b/services/web/server/src/simcore_service_webserver/groups_api.py
new file mode 100644
index 00000000000..722fb264997
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/groups_api.py
@@ -0,0 +1,310 @@
+import logging
+from typing import Dict, List, Optional, Tuple
+
+import sqlalchemy as sa
+from aiohttp import web
+from aiopg.sa import SAConnection
+from aiopg.sa.result import RowProxy
+from sqlalchemy import and_, literal_column
+
+from servicelib.application_keys import APP_DB_ENGINE_KEY
+
+from .db_models import GroupType, groups, user_to_groups, users
+from .groups_exceptions import (
+ GroupNotFoundError,
+ GroupsException,
+ UserInGroupNotFoundError,
+)
+from .groups_utils import (
+ check_group_permissions,
+ convert_groups_db_to_schema,
+ convert_groups_schema_to_db,
+ convert_user_in_group_to_schema,
+)
+from .users_exceptions import UserNotFoundError
+
+logger = logging.getLogger(__name__)
+
+DEFAULT_GROUP_READ_ACCESS_RIGHTS = {"read": True, "write": False, "delete": False}
+DEFAULT_GROUP_OWNER_ACCESS_RIGHTS = {"read": True, "write": True, "delete": True}
+
+
+async def list_user_groups(
+ app: web.Application, user_id: int
+) -> Tuple[Dict[str, str], List[Dict[str, str]], Dict[str, str]]:
+ """returns the user groups
+ Returns:
+ Tuple[List[Dict[str, str]]] -- [returns the user primary group, standard groups and the all group]
+ """
+ engine = app[APP_DB_ENGINE_KEY]
+ primary_group = {}
+ user_groups = []
+ all_group = {}
+ async with engine.acquire() as conn:
+ query = (
+ sa.select([groups, user_to_groups.c.access_rights])
+ .select_from(
+ user_to_groups.join(groups, user_to_groups.c.gid == groups.c.gid),
+ )
+ .where(user_to_groups.c.uid == user_id)
+ )
+ async for row in conn.execute(query):
+ if row["type"] == GroupType.EVERYONE:
+ all_group = convert_groups_db_to_schema(row)
+ elif row["type"] == GroupType.PRIMARY:
+ primary_group = convert_groups_db_to_schema(row)
+ else:
+ # only add if user has read access
+ if row.access_rights["read"]:
+ user_groups.append(convert_groups_db_to_schema(row))
+
+ return (primary_group, user_groups, all_group)
+
+
+async def _get_user_group(conn: SAConnection, user_id: int, gid: int) -> RowProxy:
+ result = await conn.execute(
+ sa.select([groups, user_to_groups.c.access_rights])
+ .select_from(user_to_groups.join(groups, user_to_groups.c.gid == groups.c.gid))
+ .where(and_(user_to_groups.c.uid == user_id, user_to_groups.c.gid == gid))
+ )
+ group = await result.fetchone()
+ if not group:
+ raise GroupNotFoundError(gid)
+ return group
+
+
+async def _get_user_from_email(app: web.Application, email: str) -> RowProxy:
+ engine = app[APP_DB_ENGINE_KEY]
+ async with engine.acquire() as conn:
+ result = await conn.execute(sa.select([users]).where(users.c.email == email))
+ user: RowProxy = await result.fetchone()
+ if not user:
+ raise UserNotFoundError(email=email)
+ return user
+
+
+async def get_user_group(
+ app: web.Application, user_id: int, gid: int
+) -> Dict[str, str]:
+ engine = app[APP_DB_ENGINE_KEY]
+ async with engine.acquire() as conn:
+ group: RowProxy = await _get_user_group(conn, user_id, gid)
+ check_group_permissions(group, user_id, gid, "read")
+ return convert_groups_db_to_schema(group)
+
+
+async def create_user_group(
+ app: web.Application, user_id: int, new_group: Dict
+) -> Dict[str, str]:
+ engine = app[APP_DB_ENGINE_KEY]
+ async with engine.acquire() as conn:
+ result = await conn.execute(
+ sa.select([users.c.primary_gid]).where(users.c.id == user_id)
+ )
+ user: RowProxy = await result.fetchone()
+ if not user:
+ raise UserNotFoundError(uid=user_id)
+ result = await conn.execute(
+ # pylint: disable=no-value-for-parameter
+ groups.insert()
+ .values(**convert_groups_schema_to_db(new_group))
+ .returning(literal_column("*"))
+ )
+ group: RowProxy = await result.fetchone()
+ await conn.execute(
+ # pylint: disable=no-value-for-parameter
+ user_to_groups.insert().values(
+ uid=user_id,
+ gid=group.gid,
+ access_rights=DEFAULT_GROUP_OWNER_ACCESS_RIGHTS,
+ )
+ )
+ return convert_groups_db_to_schema(
+ group, access_rights=DEFAULT_GROUP_OWNER_ACCESS_RIGHTS
+ )
+
+
+async def update_user_group(
+ app: web.Application, user_id: int, gid: int, new_group_values: Dict[str, str]
+) -> Dict[str, str]:
+ new_values = {
+ k: v for k, v in convert_groups_schema_to_db(new_group_values).items() if v
+ }
+
+ engine = app[APP_DB_ENGINE_KEY]
+ async with engine.acquire() as conn:
+ group = await _get_user_group(conn, user_id, gid)
+ check_group_permissions(group, user_id, gid, "write")
+
+ result = await conn.execute(
+ # pylint: disable=no-value-for-parameter
+ groups.update()
+ .values(**new_values)
+ .where(groups.c.gid == group.gid)
+ .returning(literal_column("*"))
+ )
+ updated_group = await result.fetchone()
+ return convert_groups_db_to_schema(
+ updated_group, access_rights=group.access_rights
+ )
+
+
+async def delete_user_group(app: web.Application, user_id: int, gid: int) -> None:
+ engine = app[APP_DB_ENGINE_KEY]
+ async with engine.acquire() as conn:
+ group = await _get_user_group(conn, user_id, gid)
+ check_group_permissions(group, user_id, gid, "delete")
+
+ await conn.execute(
+ # pylint: disable=no-value-for-parameter
+ groups.delete().where(groups.c.gid == group.gid)
+ )
+
+
+async def list_users_in_group(
+ app: web.Application, user_id: int, gid: int
+) -> List[Dict[str, str]]:
+ engine = app[APP_DB_ENGINE_KEY]
+
+ async with engine.acquire() as conn:
+ # first check if the group exists
+ group: RowProxy = await _get_user_group(conn, user_id, gid)
+ check_group_permissions(group, user_id, gid, "read")
+ # now get the list
+ query = (
+ sa.select([users, user_to_groups.c.access_rights])
+ .select_from(users.join(user_to_groups))
+ .where(user_to_groups.c.gid == gid)
+ )
+ users_list = [
+ convert_user_in_group_to_schema(row) async for row in conn.execute(query)
+ ]
+ return users_list
+
+
+async def add_user_in_group(
+ app: web.Application,
+ user_id: int,
+ gid: int,
+ *,
+ new_user_id: Optional[int] = None,
+ new_user_email: Optional[str] = None,
+ access_rights: Optional[Dict[str, bool]] = None,
+) -> None:
+ if not new_user_id and not new_user_email:
+ raise GroupsException("Invalid method call, missing user id or user email")
+
+ if new_user_email:
+ user: RowProxy = await _get_user_from_email(app, new_user_email)
+ new_user_id = user["id"]
+
+ engine = app[APP_DB_ENGINE_KEY]
+ async with engine.acquire() as conn:
+ # first check if the group exists
+ group: RowProxy = await _get_user_group(conn, user_id, gid)
+ check_group_permissions(group, user_id, gid, "write")
+ # now check the new user exists
+ users_count = await conn.scalar(
+ # pylint: disable=no-value-for-parameter
+ sa.select([sa.func.count()]).where(users.c.id == new_user_id)
+ )
+ if not users_count:
+ raise UserInGroupNotFoundError(new_user_id, gid) # type: ignore
+ # add the new user to the group now
+ user_access_rights = DEFAULT_GROUP_READ_ACCESS_RIGHTS
+ if access_rights:
+ user_access_rights.update(access_rights)
+ await conn.execute(
+ # pylint: disable=no-value-for-parameter
+ user_to_groups.insert().values(
+ uid=new_user_id, gid=group.gid, access_rights=user_access_rights
+ )
+ )
+
+
+async def _get_user_in_group_permissions(
+ conn: SAConnection, gid: int, the_user_id_in_group: int
+) -> RowProxy:
+
+ # now get the user
+ result = await conn.execute(
+ sa.select([users, user_to_groups.c.access_rights])
+ .select_from(users.join(user_to_groups, users.c.id == user_to_groups.c.uid))
+ .where(and_(user_to_groups.c.gid == gid, users.c.id == the_user_id_in_group))
+ )
+ the_user: RowProxy = await result.fetchone()
+ if not the_user:
+ raise UserInGroupNotFoundError(the_user_id_in_group, gid)
+ return the_user
+
+
+async def get_user_in_group(
+ app: web.Application, user_id: int, gid: int, the_user_id_in_group: int
+) -> Dict[str, str]:
+ engine = app[APP_DB_ENGINE_KEY]
+
+ async with engine.acquire() as conn:
+ # first check if the group exists
+ group: RowProxy = await _get_user_group(conn, user_id, gid)
+ check_group_permissions(group, user_id, gid, "read")
+ # get the user with its permissions
+ the_user: RowProxy = await _get_user_in_group_permissions(
+ conn, gid, the_user_id_in_group
+ )
+ return convert_user_in_group_to_schema(the_user)
+
+
+async def update_user_in_group(
+ app: web.Application,
+ user_id: int,
+ gid: int,
+ the_user_id_in_group: int,
+ new_values_for_user_in_group: Dict,
+) -> Dict[str, str]:
+ engine = app[APP_DB_ENGINE_KEY]
+ async with engine.acquire() as conn:
+ # first check if the group exists
+ group: RowProxy = await _get_user_group(conn, user_id, gid)
+ check_group_permissions(group, user_id, gid, "write")
+ # now check the user exists
+ the_user: RowProxy = await _get_user_in_group_permissions(
+ conn, gid, the_user_id_in_group
+ )
+ # modify the user access rights
+ await conn.execute(
+ # pylint: disable=no-value-for-parameter
+ user_to_groups.update()
+ .values(**new_values_for_user_in_group)
+ .where(
+ and_(
+ user_to_groups.c.uid == the_user_id_in_group,
+ user_to_groups.c.gid == gid,
+ )
+ )
+ )
+ the_user = dict(the_user)
+ the_user.update(**new_values_for_user_in_group)
+ return convert_user_in_group_to_schema(the_user)
+
+
+async def delete_user_in_group(
+ app: web.Application, user_id: int, gid: int, the_user_id_in_group: int
+) -> None:
+ engine = app[APP_DB_ENGINE_KEY]
+
+ async with engine.acquire() as conn:
+ # first check if the group exists
+ group: RowProxy = await _get_user_group(conn, user_id, gid)
+ check_group_permissions(group, user_id, gid, "write")
+ # check the user exists
+ await _get_user_in_group_permissions(conn, gid, the_user_id_in_group)
+ # delete him/her
+ await conn.execute(
+ # pylint: disable=no-value-for-parameter
+ user_to_groups.delete().where(
+ and_(
+ user_to_groups.c.uid == the_user_id_in_group,
+ user_to_groups.c.gid == gid,
+ )
+ )
+ )
diff --git a/services/web/server/src/simcore_service_webserver/groups_exceptions.py b/services/web/server/src/simcore_service_webserver/groups_exceptions.py
new file mode 100644
index 00000000000..224fbbd7a96
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/groups_exceptions.py
@@ -0,0 +1,32 @@
+"""Defines the different exceptions that may arise in the projects subpackage"""
+
+
+class GroupsException(Exception):
+ """Basic exception for errors raised in projects"""
+
+ def __init__(self, msg: str = None):
+ super().__init__(msg or "Unexpected error occured in projects subpackage")
+
+
+class GroupNotFoundError(GroupsException):
+ """Group was not found in DB"""
+
+ def __init__(self, gid: int):
+ super().__init__(f"Group with id {gid} not found")
+ self.gid = gid
+
+
+class UserInsufficientRightsError(GroupsException):
+ """User has not sufficient rights"""
+
+ def __init__(self, msg: str):
+ super().__init__(msg)
+
+
+class UserInGroupNotFoundError(GroupsException):
+ """User in group was not found in DB"""
+
+ def __init__(self, gid: int, uid: int):
+ super().__init__(f"User id {uid} in Group {gid} not found")
+ self.gid = gid
+ self.uid = uid
diff --git a/services/web/server/src/simcore_service_webserver/groups_handlers.py b/services/web/server/src/simcore_service_webserver/groups_handlers.py
new file mode 100644
index 00000000000..4e5629ff8f9
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/groups_handlers.py
@@ -0,0 +1,190 @@
+# pylint: disable=no-value-for-parameter
+
+import json
+import logging
+
+from aiohttp import web
+
+from . import groups_api
+from .groups_exceptions import (
+ GroupNotFoundError,
+ UserInGroupNotFoundError,
+ UserInsufficientRightsError,
+)
+from .login.decorators import RQT_USERID_KEY, login_required
+from .security_decorators import permission_required
+from .users_exceptions import UserNotFoundError
+
+logger = logging.getLogger(__name__)
+
+
+# groups/ ------------------------------------------------------
+@login_required
+@permission_required("groups.*")
+async def list_groups(request: web.Request):
+ user_id = request[RQT_USERID_KEY]
+ primary_group, user_groups, all_group = await groups_api.list_user_groups(
+ request.app, user_id
+ )
+ return {"me": primary_group, "organizations": user_groups, "all": all_group}
+
+
+@login_required
+@permission_required("groups.*")
+async def get_group(request: web.Request):
+ user_id = request[RQT_USERID_KEY]
+ gid = request.match_info["gid"]
+ try:
+ return await groups_api.get_user_group(request.app, user_id, gid)
+ except GroupNotFoundError:
+ raise web.HTTPNotFound(reason=f"Group {gid} not found")
+
+
+@login_required
+@permission_required("groups.*")
+async def create_group(request: web.Request):
+ user_id = request[RQT_USERID_KEY]
+ new_group = await request.json()
+
+ try:
+ new_group = await groups_api.create_user_group(request.app, user_id, new_group)
+ raise web.HTTPCreated(
+ text=json.dumps({"data": new_group}), content_type="application/json"
+ )
+ except UserNotFoundError:
+ raise web.HTTPNotFound(reason=f"User {user_id} not found")
+
+
+@login_required
+@permission_required("groups.*")
+async def update_group(request: web.Request):
+ user_id = request[RQT_USERID_KEY]
+ gid = request.match_info["gid"]
+ new_group_values = await request.json()
+
+ try:
+ return await groups_api.update_user_group(
+ request.app, user_id, gid, new_group_values
+ )
+ except GroupNotFoundError:
+ raise web.HTTPNotFound(reason=f"Group {gid} not found")
+ except UserInsufficientRightsError:
+ raise web.HTTPForbidden()
+
+
+@login_required
+@permission_required("groups.*")
+async def delete_group(request: web.Request):
+ user_id = request[RQT_USERID_KEY]
+ gid = request.match_info["gid"]
+ try:
+ await groups_api.delete_user_group(request.app, user_id, gid)
+ raise web.HTTPNoContent()
+ except GroupNotFoundError:
+ raise web.HTTPNotFound(reason=f"Group {gid} not found")
+ except UserInsufficientRightsError:
+ raise web.HTTPForbidden()
+
+
+# groups/{gid}/users --------------------------------------------
+@login_required
+@permission_required("groups.*")
+async def get_group_users(request: web.Request):
+ user_id = request[RQT_USERID_KEY]
+ gid = request.match_info["gid"]
+ try:
+ return await groups_api.list_users_in_group(request.app, user_id, gid)
+ except GroupNotFoundError:
+ raise web.HTTPNotFound(reason=f"Group {gid} not found")
+ except UserInsufficientRightsError:
+ raise web.HTTPForbidden()
+
+
+@login_required
+@permission_required("groups.*")
+async def add_group_user(request: web.Request):
+ user_id = request[RQT_USERID_KEY]
+ gid = request.match_info["gid"]
+ new_user_in_group = await request.json()
+ # TODO: validate!!
+ assert "uid" in new_user_in_group or "email" in new_user_in_group # nosec
+ try:
+ new_user_id = new_user_in_group["uid"] if "uid" in new_user_in_group else None
+ new_user_email = (
+ new_user_in_group["email"] if "email" in new_user_in_group else None
+ )
+
+ await groups_api.add_user_in_group(
+ request.app,
+ user_id,
+ gid,
+ new_user_id=new_user_id,
+ new_user_email=new_user_email,
+ )
+ raise web.HTTPNoContent()
+ except GroupNotFoundError:
+ raise web.HTTPNotFound(reason=f"Group {gid} not found")
+ except UserInGroupNotFoundError:
+ raise web.HTTPNotFound(reason=f"User not found in group {gid}")
+ except UserInsufficientRightsError:
+ raise web.HTTPForbidden()
+
+
+@login_required
+@permission_required("groups.*")
+async def get_group_user(request: web.Request):
+ user_id = request[RQT_USERID_KEY]
+ gid = request.match_info["gid"]
+ the_user_id_in_group = request.match_info["uid"]
+ try:
+ return await groups_api.get_user_in_group(
+ request.app, user_id, gid, the_user_id_in_group
+ )
+ except GroupNotFoundError:
+ raise web.HTTPNotFound(reason=f"Group {gid} not found")
+ except UserInGroupNotFoundError:
+ raise web.HTTPNotFound(reason=f"User {the_user_id_in_group} not found")
+ except UserInsufficientRightsError:
+ raise web.HTTPForbidden()
+
+
+@login_required
+@permission_required("groups.*")
+async def update_group_user(request: web.Request):
+ user_id = request[RQT_USERID_KEY]
+ gid = request.match_info["gid"]
+ the_user_id_in_group = request.match_info["uid"]
+ new_values_for_user_in_group = await request.json()
+ try:
+ return await groups_api.update_user_in_group(
+ request.app,
+ user_id,
+ gid,
+ the_user_id_in_group,
+ new_values_for_user_in_group,
+ )
+ except GroupNotFoundError:
+ raise web.HTTPNotFound(reason=f"Group {gid} not found")
+ except UserInGroupNotFoundError:
+ raise web.HTTPNotFound(reason=f"User {the_user_id_in_group} not found")
+ except UserInsufficientRightsError:
+ raise web.HTTPForbidden()
+
+
+@login_required
+@permission_required("groups.*")
+async def delete_group_user(request: web.Request):
+ user_id = request[RQT_USERID_KEY]
+ gid = request.match_info["gid"]
+ the_user_id_in_group = request.match_info["uid"]
+ try:
+ await groups_api.delete_user_in_group(
+ request.app, user_id, gid, the_user_id_in_group
+ )
+ raise web.HTTPNoContent()
+ except GroupNotFoundError:
+ raise web.HTTPNotFound(reason=f"Group {gid} not found")
+ except UserInGroupNotFoundError:
+ raise web.HTTPNotFound(reason=f"User {the_user_id_in_group} not found")
+ except UserInsufficientRightsError:
+ raise web.HTTPForbidden()
diff --git a/services/web/server/src/simcore_service_webserver/groups_utils.py b/services/web/server/src/simcore_service_webserver/groups_utils.py
new file mode 100644
index 00000000000..5ac3a0eafa8
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/groups_utils.py
@@ -0,0 +1,54 @@
+import logging
+from typing import Dict, Optional, Union
+
+from aiopg.sa.result import RowProxy
+
+from .groups_exceptions import UserInsufficientRightsError
+from .users_utils import convert_user_db_to_schema
+
+logger = logging.getLogger(__name__)
+
+GROUPS_SCHEMA_TO_DB = {
+ "gid": "gid",
+ "label": "name",
+ "description": "description",
+ "thumbnail": "thumbnail",
+ "access_rights": "access_rights",
+}
+
+
+def check_group_permissions(
+ group: RowProxy, user_id: int, gid: int, permission: str
+) -> None:
+ if not group.access_rights[permission]:
+ raise UserInsufficientRightsError(
+ f"User {user_id} has insufficient rights for {permission} access to group {gid}"
+ )
+
+
+def convert_groups_db_to_schema(
+ db_row: RowProxy, *, prefix: Optional[str] = "", **kwargs
+) -> Dict:
+ converted_dict = {
+ k: db_row[f"{prefix}{v}"]
+ for k, v in GROUPS_SCHEMA_TO_DB.items()
+ if f"{prefix}{v}" in db_row
+ }
+ converted_dict.update(**kwargs)
+ return converted_dict
+
+
+def convert_groups_schema_to_db(schema: Dict) -> Dict:
+ return {
+ v: schema[k]
+ for k, v in GROUPS_SCHEMA_TO_DB.items()
+ if k in schema and k != "gid"
+ }
+
+
+def convert_user_in_group_to_schema(row: Union[RowProxy, Dict]) -> Dict[str, str]:
+ group_user = convert_user_db_to_schema(row)
+ group_user.pop("role")
+ group_user["access_rights"] = row["access_rights"]
+ group_user["gid"] = row["primary_gid"]
+ return group_user
diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py
index 23f7502f01b..35896cdaa03 100644
--- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py
+++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py
@@ -22,10 +22,11 @@
from ..computation_api import delete_pipeline_db
from ..director import director_api
-from ..storage_api import \
- copy_data_folders_from_project # mocked in unit-tests
-from ..storage_api import (delete_data_folders_of_project,
- delete_data_folders_of_project_node)
+from ..storage_api import copy_data_folders_from_project # mocked in unit-tests
+from ..storage_api import (
+ delete_data_folders_of_project,
+ delete_data_folders_of_project_node,
+)
from .config import CONFIG_SECTION_NAME
from .projects_db import APP_PROJECT_DBAPI
from .projects_exceptions import NodeNotFoundError
@@ -291,7 +292,9 @@ async def update_project_node_outputs(
if data:
# NOTE: update outputs (not required) if necessary as the UI expects a
# dataset/label field that is missing
- outputs: Dict[str,Any] = project["workbench"][node_id].setdefault("outputs", {})
+ outputs: Dict[str, Any] = project["workbench"][node_id].setdefault(
+ "outputs", {}
+ )
outputs.update(data)
for output_key in outputs.keys():
@@ -308,6 +311,7 @@ async def update_project_node_outputs(
await db.update_user_project(project, user_id, project_id)
return project["workbench"][node_id]
+
async def get_workbench_node_ids_from_project_uuid(
app: web.Application, project_uuid: str,
) -> Set[str]:
diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_db.py b/services/web/server/src/simcore_service_webserver/projects/projects_db.py
index b8735ea5388..51f473e938c 100644
--- a/services/web/server/src/simcore_service_webserver/projects/projects_db.py
+++ b/services/web/server/src/simcore_service_webserver/projects/projects_db.py
@@ -8,7 +8,8 @@
import logging
import uuid as uuidlib
from datetime import datetime
-from typing import Dict, List, Mapping, Optional, Set
+from enum import Enum
+from typing import Dict, List, Mapping, Optional, Set, Union
import psycopg2.errors
import sqlalchemy as sa
@@ -16,14 +17,13 @@
from aiopg.sa import Engine
from aiopg.sa.connection import SAConnection
from aiopg.sa.result import ResultProxy, RowProxy
-
from change_case import ChangeCase
-from psycopg2 import IntegrityError
+from sqlalchemy import literal_column
from sqlalchemy.sql import and_, select
from servicelib.application_keys import APP_DB_ENGINE_KEY
-from ..db_models import study_tags, users, user_to_groups
+from ..db_models import GroupType, groups, study_tags, user_to_groups, users
from ..utils import format_datetime, now_str
from .projects_exceptions import (
ProjectInvalidRightsError,
@@ -31,13 +31,60 @@
ProjectsException,
)
from .projects_fakes import Fake
-from .projects_models import ProjectType, projects, user_to_projects
+from .projects_models import ProjectType, projects
log = logging.getLogger(__name__)
APP_PROJECT_DBAPI = __name__ + ".ProjectDBAPI"
DB_EXCLUSIVE_COLUMNS = ["type", "id", "published"]
+
+class ProjectAccessRights(Enum):
+ OWNER = {"read": True, "write": True, "delete": True}
+ COLLABORATOR = {"read": True, "write": True, "delete": False}
+ VIEWER = {"read": True, "write": False, "delete": False}
+
+
+def _check_project_permissions(
+ project: Union[RowProxy, Dict],
+ user_id: int,
+ user_groups: List[RowProxy],
+ permission: str,
+) -> None:
+ # compute access rights by order of priority all group > organizations > primary
+ primary_group = next(filter(lambda x: x.type == GroupType.PRIMARY, user_groups))
+ standard_groups = filter(lambda x: x.type == GroupType.STANDARD, user_groups)
+ all_group = next(filter(lambda x: x.type == GroupType.EVERYONE, user_groups))
+
+ if f"{primary_group.gid}" in project["access_rights"]:
+ if not project["access_rights"][f"{primary_group.gid}"][permission]:
+ raise ProjectInvalidRightsError(user_id, project["uuid"])
+ return
+ # let's check if standard groups are in there and take the most liberal rights
+ standard_groups_permissions = []
+ for group in standard_groups:
+ if f"{group.gid}" in project["access_rights"]:
+ standard_groups_permissions.append(
+ project["access_rights"][f"{group.gid}"][permission]
+ )
+ if standard_groups_permissions:
+ if not any(standard_groups_permissions):
+ raise ProjectInvalidRightsError(user_id, project["uuid"])
+ return
+
+ if (
+ not f"{all_group.gid}" in project["access_rights"]
+ or not project["access_rights"][f"{all_group.gid}"][permission]
+ ):
+ raise ProjectInvalidRightsError(user_id, project["uuid"])
+
+
+def _create_project_access_rights(
+ gid: int, access: ProjectAccessRights
+) -> Dict[str, Dict[str, bool]]:
+ return {f"{gid}": access.value}
+
+
# TODO: check here how schema to model db works!?
def _convert_to_db_names(project_document_data: Dict) -> Dict:
converted_args = {}
@@ -51,7 +98,9 @@ def _convert_to_db_names(project_document_data: Dict) -> Dict:
return converted_args
-def _convert_to_schema_names(project_database_data: Mapping, user_email: str) -> Dict:
+def _convert_to_schema_names(
+ project_database_data: Mapping, user_email: str, **kwargs
+) -> Dict:
converted_args = {}
for key, value in project_database_data.items():
if key in DB_EXCLUSIVE_COLUMNS:
@@ -63,6 +112,7 @@ def _convert_to_schema_names(project_database_data: Mapping, user_email: str) ->
# this entry has to be converted to the owner e-mail address
converted_value = user_email
converted_args[ChangeCase.snake_to_camel(key)] = converted_value
+ converted_args.update(**kwargs)
return converted_args
@@ -98,7 +148,7 @@ def engine(self) -> Engine:
self._init_engine()
return self._engine
- async def add_projects(self, projects_list: List[Dict], user_id: str) -> List[str]:
+ async def add_projects(self, projects_list: List[Dict], user_id: int) -> List[str]:
"""
adds all projects and assigns to a user
@@ -114,7 +164,7 @@ async def add_projects(self, projects_list: List[Dict], user_id: str) -> List[st
async def add_project(
self,
prj: Dict,
- user_id: str,
+ user_id: int,
*,
force_project_uuid=False,
force_as_template=False,
@@ -127,7 +177,7 @@ async def add_project(
:param prj: schema-compliant project data
:type prj: Dict
:param user_id: database's user identifier
- :type user_id: str
+ :type user_id: int
:param force_project_uuid: enforces valid uuid, defaults to False
:type force_project_uuid: bool, optional
:param force_as_template: adds data as template, defaults to False
@@ -153,6 +203,14 @@ async def add_project(
"prj_owner": user_id if user_id else None,
}
)
+ # validate access_rights. are the gids valid? also ensure prj_owner is in there
+ if user_id:
+ primary_gid = await self._get_user_primary_group_gid(conn, user_id)
+ kargs["access_rights"].update(
+ _create_project_access_rights(
+ primary_gid, ProjectAccessRights.OWNER
+ )
+ )
# must be valid uuid
try:
@@ -164,13 +222,11 @@ async def add_project(
# insert project
retry = True
- project_id = None
while retry:
try:
query = projects.insert().values(**kargs)
result = await conn.execute(query)
- row = await result.first()
- project_id = row[projects.c.id]
+ await result.first()
retry = False
except psycopg2.errors.UniqueViolation as err: # pylint: disable=no-member
if (
@@ -181,21 +237,6 @@ async def add_project(
kargs["uuid"] = str(uuidlib.uuid1())
retry = True
- if user_id is not None:
- try:
- query = user_to_projects.insert().values(
- user_id=user_id, project_id=project_id
- )
- await conn.execute(query)
- except IntegrityError as exc:
- log.exception("Cannot associate project %d to user %d", project_id, user_id)
-
- # rollback projects database
- query = projects.delete().where(projects.c.id == project_id)
- await conn.execute(query)
-
- raise ProjectInvalidRightsError(user_id, prj["uuid"]) from exc
-
# Updated values
user_email = await self._get_user_email(conn, user_id)
prj = _convert_to_schema_names(kargs, user_email)
@@ -203,27 +244,25 @@ async def add_project(
prj["tags"] = []
return prj
- async def load_user_projects(self, user_id: str) -> List[Dict]:
+ async def load_user_projects(self, user_id: int) -> List[Dict]:
log.info("Loading projects for user %s", user_id)
-
- query = (
- select([projects])
- .select_from(user_to_projects.join(projects))
- .where(
- and_(
- user_to_projects.c.user_id == user_id,
- projects.c.type != ProjectType.TEMPLATE,
- )
- )
- )
-
async with self.engine.acquire() as conn:
- projects_list = await self.__load_projects(conn, query)
+ user_groups: List[RowProxy] = await self.__load_user_groups(conn, user_id)
+ query = f"""
+ SELECT *
+ FROM projects
+ WHERE projects.type != 'TEMPLATE'
+ AND (jsonb_exists_any(projects.access_rights, array[{', '.join(f"'{group.gid}'" for group in user_groups)}])
+ OR prj_owner = {user_id})
+ """
+ projects_list = await self.__load_projects(
+ conn, query, user_id, user_groups
+ )
return projects_list
async def load_template_projects(
- self, user_id: str, *, only_published=False
+ self, user_id: int, *, only_published=False
) -> List[Dict]:
log.info("Loading public template projects")
@@ -231,32 +270,44 @@ async def load_template_projects(
projects_list = [prj.data for prj in Fake.projects.values() if prj.template]
async with self.engine.acquire() as conn:
- user_groups: List[str] = await self.__load_user_groups(conn, user_id)
+ user_groups: List[RowProxy] = await self.__load_user_groups(conn, user_id)
# NOTE: in order to use specific postgresql function jsonb_exists_any we use raw call here
query = f"""
SELECT *
FROM projects
WHERE projects.type = 'TEMPLATE'
{'AND projects.published ' if only_published else ''}
-AND (jsonb_exists_any(projects.access_rights, array[{', '.join(f"'{group}'" for group in user_groups)}])
+AND (jsonb_exists_any(projects.access_rights, array[{', '.join(f"'{group.gid}'" for group in user_groups)}])
OR prj_owner = {user_id})
"""
- db_projects = await self.__load_projects(conn, query)
+ db_projects = await self.__load_projects(conn, query, user_id, user_groups)
projects_list.extend(db_projects)
return projects_list
- async def __load_user_groups(self, conn: SAConnection, user_id: str) -> List[str]:
- user_groups: List[str] = []
- query = select([user_to_groups.c.gid]).where(user_to_groups.c.uid == user_id)
+ async def __load_user_groups(
+ self, conn: SAConnection, user_id: int
+ ) -> List[RowProxy]:
+ user_groups: List[RowProxy] = []
+ query = (
+ select([groups])
+ .select_from(groups.join(user_to_groups))
+ .where(user_to_groups.c.uid == user_id)
+ )
async for row in conn.execute(query):
- user_groups.append(row[user_to_groups.c.gid])
+ user_groups.append(row)
return user_groups
- async def __load_projects(self, conn: SAConnection, query) -> List[Dict]:
+ async def __load_projects(
+ self, conn: SAConnection, query: str, user_id: int, user_groups: List[RowProxy]
+ ) -> List[Dict]:
api_projects: List[Dict] = [] # API model-compatible projects
db_projects: List[Dict] = [] # DB model-compatible projects
async for row in conn.execute(query):
+ try:
+ _check_project_permissions(row, user_id, user_groups, "read")
+ except ProjectInvalidRightsError:
+ continue
prj = dict(row.items())
log.debug("found project: %s", prj)
db_projects.append(prj)
@@ -273,27 +324,31 @@ async def __load_projects(self, conn: SAConnection, query) -> List[Dict]:
return api_projects
async def _get_project(
- self, user_id: str, project_uuid: str, exclude_foreign: Optional[List] = None
+ self, user_id: int, project_uuid: str, exclude_foreign: Optional[List] = None
) -> Dict:
exclude_foreign = exclude_foreign or []
async with self.engine.acquire() as conn:
- joint_table = user_to_projects.join(projects)
- query = (
- select([projects])
- .select_from(joint_table)
- .where(
- and_(
- projects.c.uuid == project_uuid,
- user_to_projects.c.user_id == user_id,
- )
- )
- )
+ # this retrieves the projects where user is owner
+ user_groups: List[RowProxy] = await self.__load_user_groups(conn, user_id)
+
+ # NOTE: in order to use specific postgresql function jsonb_exists_any we use raw call here
+ query = f"""
+SELECT *
+FROM projects
+WHERE projects.type != 'TEMPLATE'
+AND uuid = '{project_uuid}'
+AND (jsonb_exists_any(projects.access_rights, array[{', '.join(f"'{group.gid}'" for group in user_groups)}])
+OR prj_owner = {user_id})
+"""
result = await conn.execute(query)
project_row = await result.first()
if not project_row:
raise ProjectNotFoundError(project_uuid)
+ # now carefuly check the access rights
+ _check_project_permissions(project_row, user_id, user_groups, "read")
+
project = dict(project_row.items())
if "tags" not in exclude_foreign:
@@ -302,7 +357,7 @@ async def _get_project(
return project
- async def add_tag(self, user_id: str, project_uuid: str, tag_id: int) -> Dict:
+ async def add_tag(self, user_id: int, project_uuid: str, tag_id: int) -> Dict:
project = await self._get_project(user_id, project_uuid)
async with self.engine.acquire() as conn:
# pylint: disable=no-value-for-parameter
@@ -314,7 +369,7 @@ async def add_tag(self, user_id: str, project_uuid: str, tag_id: int) -> Dict:
return _convert_to_schema_names(project, user_email)
raise ProjectsException()
- async def remove_tag(self, user_id: str, project_uuid: str, tag_id: int) -> Dict:
+ async def remove_tag(self, user_id: int, project_uuid: str, tag_id: int) -> Dict:
project = await self._get_project(user_id, project_uuid)
async with self.engine.acquire() as conn:
user_email = await self._get_user_email(conn, user_id)
@@ -330,12 +385,12 @@ async def remove_tag(self, user_id: str, project_uuid: str, tag_id: int) -> Dict
project["tags"].remove(tag_id)
return _convert_to_schema_names(project, user_email)
- async def get_user_project(self, user_id: str, project_uuid: str) -> Dict:
+ async def get_user_project(self, user_id: int, project_uuid: str) -> Dict:
""" Returns all projects *owned* by the user
- - A project is owned with it is mapped in user_to_projects list
- - prj_owner field is not
+ - prj_owner
- Notice that a user can have access to a template but he might not onw it
+ - Notice that a user can have access to a project where he/she has read access
:raises ProjectNotFoundError: project is not assigned to user
:return: schema-compliant project
@@ -349,7 +404,7 @@ async def get_user_project(self, user_id: str, project_uuid: str) -> Dict:
project = await self._get_project(user_id, project_uuid)
async with self.engine.acquire() as conn:
# pylint: disable=no-value-for-parameter
- user_email = await self._get_user_email(conn, user_id)
+ user_email = await self._get_user_email(conn, project["prj_owner"])
return _convert_to_schema_names(project, user_email)
async def get_template_project(
@@ -360,7 +415,7 @@ async def get_template_project(
if prj and prj.template:
return prj.data
- template_prj = None
+ template_prj = {}
async with self.engine.acquire() as conn:
if only_published:
condition = and_(
@@ -386,19 +441,8 @@ async def get_template_project(
return template_prj
- async def get_project_workbench(self, project_uuid: str):
- async with self.engine.acquire() as conn:
- query = select([projects.c.workbench]).where(
- projects.c.uuid == project_uuid
- )
- result = await conn.execute(query)
- row = await result.first()
- if row:
- return row[projects.c.workbench]
- return {}
-
async def update_user_project(
- self, project_data: Dict, user_id: str, project_uuid: str
+ self, project_data: Dict, user_id: int, project_uuid: str
):
""" updates a project from a user
@@ -409,69 +453,50 @@ async def update_user_project(
row = await self._get_project(
user_id, project_uuid, exclude_foreign=["tags"]
)
-
+ user_groups: List[RowProxy] = await self.__load_user_groups(conn, user_id)
+ _check_project_permissions(row, user_id, user_groups, "write")
# uuid can ONLY be set upon creation
if row[projects.c.uuid.key] != project_data["uuid"]:
- # TODO: add message
raise ProjectInvalidRightsError(user_id, project_data["uuid"])
- # TODO: should also take ownership???
+ # ensure the prj owner is always in the access rights
+ owner_primary_gid = await self._get_user_primary_group_gid(
+ conn, row[projects.c.prj_owner.key]
+ )
+ project_data["accessRights"].update(
+ _create_project_access_rights(
+ owner_primary_gid, ProjectAccessRights.OWNER
+ )
+ )
# update timestamps
project_data["lastChangeDate"] = now_str()
-
# now update it
- # FIXME: E1120:No value for argument 'dml' in method call
- # pylint: disable=E1120
- query = (
+ result = await conn.execute(
+ # pylint: disable=no-value-for-parameter
projects.update()
.values(**_convert_to_db_names(project_data))
.where(projects.c.id == row[projects.c.id.key])
+ .returning(literal_column("*"))
+ )
+ project: RowProxy = await result.fetchone()
+ user_email = await self._get_user_email(conn, project.prj_owner)
+
+ tags = await self._get_tags_by_project(
+ conn, project_id=project[projects.c.id]
)
- await conn.execute(query)
+ return _convert_to_schema_names(project, user_email, tags=tags)
async def delete_user_project(self, user_id: int, project_uuid: str):
log.info("Deleting project %s for user %s", project_uuid, user_id)
+ project = await self._get_project(user_id, project_uuid)
async with self.engine.acquire() as conn:
- joint_table = user_to_projects.join(projects)
- query = (
- select([projects.c.id, user_to_projects.c.id], use_labels=True)
- .select_from(joint_table)
- .where(
- and_(
- projects.c.uuid == project_uuid,
- user_to_projects.c.user_id == user_id,
- )
- )
+ # if we have delete access we delete the project
+ user_groups: List[RowProxy] = await self.__load_user_groups(conn, user_id)
+ _check_project_permissions(project, user_id, user_groups, "delete")
+ await conn.execute(
+ # pylint: disable=no-value-for-parameter
+ projects.delete().where(projects.c.uuid == project_uuid)
)
- result = await conn.execute(query)
- # ensure we have found one
- rows = await result.fetchall()
-
- if not rows:
- # no project found
- raise ProjectNotFoundError(project_uuid)
-
- if len(rows) == 1:
- row = rows[0]
- # now let's delete the link to the user
- # FIXME: E1120:No value for argument 'dml' in method call
- # pylint: disable=E1120
- project_id = row[user_to_projects.c.id]
- log.info("will delete row with project_id %s", project_id)
- query = user_to_projects.delete().where(
- user_to_projects.c.id == project_id
- )
- await conn.execute(query)
-
- query = user_to_projects.select().where(
- user_to_projects.c.project_id == row[projects.c.id]
- )
- result = await conn.execute(query)
- remaining_users = await result.fetchall()
- if not remaining_users:
- # only delete project if there are no other user mapped
- query = projects.delete().where(projects.c.id == row[projects.c.id])
- await conn.execute(query)
async def make_unique_project_uuid(self) -> str:
""" Generates a project identifier still not used in database
@@ -494,20 +519,27 @@ async def make_unique_project_uuid(self) -> str:
break
return project_uuid
- async def _get_user_email(self, conn: SAConnection, user_id: Optional[str]) -> str:
+ async def _get_user_email(self, conn: SAConnection, user_id: Optional[int]) -> str:
if not user_id:
return "not_a_user@unknown.com"
- stmt = sa.select([users.c.email]).where(users.c.id == user_id)
- result: ResultProxy = await conn.execute(stmt)
- row: RowProxy = await result.first()
- return row[users.c.email] if row else "Unknown"
+ email: ResultProxy = await conn.scalar(
+ sa.select([users.c.email]).where(users.c.id == user_id)
+ )
+ return email or "Unknown"
+
+ async def _get_user_primary_group_gid(
+ self, conn: SAConnection, user_id: int
+ ) -> int:
+ primary_gid: int = await conn.scalar(
+ sa.select([users.c.primary_gid]).where(users.c.id == str(user_id))
+ )
+ return primary_gid
async def _get_tags_by_project(self, conn: SAConnection, project_id: str) -> List:
query = sa.select([study_tags.c.tag_id]).where(
study_tags.c.study_id == project_id
)
- rows = await (await conn.execute(query)).fetchall()
- return [row.tag_id for row in rows]
+ return [row.tag_id async for row in conn.execute(query)]
async def get_all_node_ids_from_workbenches(
self, project_uuid: str = None
diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py
index 4fa7db1fd61..9507566d572 100644
--- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py
@@ -13,25 +13,32 @@
from ..login.decorators import RQT_USERID_KEY, login_required
from ..resource_manager.websocket_manager import managed_resource
from ..security_api import check_permission
+from ..security_decorators import permission_required
from . import projects_api
from .projects_db import APP_PROJECT_DBAPI
from .projects_exceptions import ProjectInvalidRightsError, ProjectNotFoundError
-OVERRIDABLE_DOCUMENT_KEYS = ["name", "description", "thumbnail", "prjOwner", "accessRights"]
+OVERRIDABLE_DOCUMENT_KEYS = [
+ "name",
+ "description",
+ "thumbnail",
+ "prjOwner",
+ "accessRights",
+]
# TODO: validate these against api/specs/webserver/v0/components/schemas/project-v0.0.1.json
log = logging.getLogger(__name__)
@login_required
+@permission_required("project.create")
+@permission_required("services.pipeline.*") # due to update_pipeline_db
async def create_projects(request: web.Request):
from .projects_api import (
clone_project,
) # TODO: keep here since is async and parser thinks it is a handler
# pylint: disable=too-many-branches
- await check_permission(request, "project.create")
- await check_permission(request, "services.pipeline.*") # due to update_pipeline_db
user_id = request[RQT_USERID_KEY]
db = request.config_dict[APP_PROJECT_DBAPI]
@@ -63,12 +70,13 @@ async def create_projects(request: web.Request):
)
project = await clone_project(request, template_prj, user_id)
+ # remove template access rights
+ project["accessRights"] = {}
# FIXME: parameterized inputs should get defaults provided by service
# overrides with body
if request.has_body:
predefined = await request.json()
-
if project:
for key in OVERRIDABLE_DOCUMENT_KEYS:
non_null_value = predefined.get(key)
@@ -101,8 +109,8 @@ async def create_projects(request: web.Request):
@login_required
+@permission_required("project.read")
async def list_projects(request: web.Request):
- await check_permission(request, "project.read")
# TODO: implement all query parameters as
# in https://www.ibm.com/support/knowledgecenter/en/SSCRJU_3.2.0/com.ibm.swg.im.infosphere.streams.rest.api.doc/doc/restapis-queryparms-list.html
@@ -138,12 +146,13 @@ async def list_projects(request: web.Request):
@login_required
+@permission_required("project.read")
async def get_project(request: web.Request):
""" Returns all projects accessible to a user (not necesarly owned)
"""
# TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead!
- await check_permission(request, "project.read")
+ user_id = request[RQT_USERID_KEY]
from .projects_api import get_project_for_user
project_uuid = request.match_info.get("project_id")
@@ -156,11 +165,16 @@ async def get_project(request: web.Request):
)
return {"data": project}
+ except ProjectInvalidRightsError:
+ raise web.HTTPForbidden(
+ reason=f"User {user_id} has no right to read {project_uuid}"
+ )
except ProjectNotFoundError:
raise web.HTTPNotFound(reason=f"Project {project_uuid} not found")
@login_required
+@permission_required("services.pipeline.*") # due to update_pipeline_db
async def replace_project(request: web.Request):
""" Implements PUT /projects
@@ -176,8 +190,6 @@ async def replace_project(request: web.Request):
:raises web.HTTPNotFound: cannot find project id in repository
"""
- await check_permission(request, "services.pipeline.*") # due to update_pipeline_db
-
user_id = request[RQT_USERID_KEY]
project_uuid = request.match_info.get("project_id")
replace_pipeline = request.query.get(
@@ -198,9 +210,19 @@ async def replace_project(request: web.Request):
)
try:
- projects_api.validate_project(request.app, new_project)
+ # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead!
+ from .projects_api import get_project_for_user
- await db.update_user_project(new_project, user_id, project_uuid)
+ projects_api.validate_project(request.app, new_project)
+ current_project = await get_project_for_user(
+ request.app,
+ project_uuid=project_uuid,
+ user_id=user_id,
+ include_templates=False,
+ )
+ if current_project["accessRights"] != new_project["accessRights"]:
+ await check_permission(request, "project.access_rights.update")
+ new_project = await db.update_user_project(new_project, user_id, project_uuid)
await update_pipeline_db(
request.app, project_uuid, new_project["workbench"], replace_pipeline
)
@@ -208,6 +230,10 @@ async def replace_project(request: web.Request):
except ValidationError:
raise web.HTTPBadRequest
+ except ProjectInvalidRightsError:
+ raise web.HTTPForbidden(
+ reason=f"User {user_id} has no rights to write to project {project_uuid}"
+ )
except ProjectNotFoundError:
raise web.HTTPNotFound
@@ -215,10 +241,8 @@ async def replace_project(request: web.Request):
@login_required
+@permission_required("project.delete")
async def delete_project(request: web.Request):
- # TODO: replace by decorator since it checks again authentication
- await check_permission(request, "project.delete")
-
# first check if the project exists
user_id = request[RQT_USERID_KEY]
project_uuid = request.match_info.get("project_id")
@@ -239,6 +263,10 @@ async def delete_project(request: web.Request):
raise web.HTTPForbidden(reason=message)
await projects_api.delete_project(request, project_uuid, user_id)
+ except ProjectInvalidRightsError:
+ raise web.HTTPForbidden(
+ reason=f"User {user_id} has no rights to delete project"
+ )
except ProjectNotFoundError:
raise web.HTTPNotFound(reason=f"Project {project_uuid} not found")
@@ -246,9 +274,8 @@ async def delete_project(request: web.Request):
@login_required
+@permission_required("project.open")
async def open_project(request: web.Request) -> web.Response:
- # TODO: replace by decorator since it checks again authentication
- await check_permission(request, "project.open")
user_id = request[RQT_USERID_KEY]
project_uuid = request.match_info.get("project_id")
@@ -275,10 +302,8 @@ async def open_project(request: web.Request) -> web.Response:
@login_required
+@permission_required("project.close")
async def close_project(request: web.Request) -> web.Response:
- # TODO: replace by decorator since it checks again authentication
- await check_permission(request, "project.close")
-
user_id = request[RQT_USERID_KEY]
project_uuid = request.match_info.get("project_id")
client_session_id = await request.json()
@@ -311,8 +336,8 @@ async def close_project(request: web.Request) -> web.Response:
@login_required
+@permission_required("project.read")
async def get_active_project(request: web.Request) -> web.Response:
- await check_permission(request, "project.read")
user_id = request[RQT_USERID_KEY]
client_session_id = request.query["client_session_id"]
@@ -338,9 +363,8 @@ async def get_active_project(request: web.Request) -> web.Response:
@login_required
+@permission_required("project.node.create")
async def create_node(request: web.Request) -> web.Response:
- # TODO: replace by decorator since it checks again authentication
- await check_permission(request, "project.node.create")
user_id = request[RQT_USERID_KEY]
project_uuid = request.match_info.get("project_id")
body = await request.json()
@@ -372,9 +396,8 @@ async def create_node(request: web.Request) -> web.Response:
@login_required
+@permission_required("project.node.read")
async def get_node(request: web.Request) -> web.Response:
- # TODO: replace by decorator since it checks again authentication
- await check_permission(request, "project.node.read")
user_id = request[RQT_USERID_KEY]
project_uuid = request.match_info.get("project_id")
node_uuid = request.match_info.get("node_id")
@@ -399,9 +422,8 @@ async def get_node(request: web.Request) -> web.Response:
@login_required
+@permission_required("project.node.delete")
async def delete_node(request: web.Request) -> web.Response:
- # TODO: replace by decorator since it checks again authentication
- await check_permission(request, "project.node.delete")
user_id = request[RQT_USERID_KEY]
project_uuid = request.match_info.get("project_id")
node_uuid = request.match_info.get("node_id")
@@ -427,8 +449,8 @@ async def delete_node(request: web.Request) -> web.Response:
@login_required
+@permission_required("project.tag.*")
async def add_tag(request: web.Request):
- await check_permission(request, "project.tag.*")
uid, db = request[RQT_USERID_KEY], request.config_dict[APP_PROJECT_DBAPI]
tag_id, study_uuid = (
request.match_info.get("tag_id"),
@@ -438,8 +460,8 @@ async def add_tag(request: web.Request):
@login_required
+@permission_required("project.tag.*")
async def remove_tag(request: web.Request):
- await check_permission(request, "project.tag.*")
uid, db = request[RQT_USERID_KEY], request.config_dict[APP_PROJECT_DBAPI]
tag_id, study_uuid = (
request.match_info.get("tag_id"),
diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_models.py b/services/web/server/src/simcore_service_webserver/projects/projects_models.py
index 82d7e2e6191..3952c803d07 100644
--- a/services/web/server/src/simcore_service_webserver/projects/projects_models.py
+++ b/services/web/server/src/simcore_service_webserver/projects/projects_models.py
@@ -5,11 +5,9 @@
from simcore_postgres_database.webserver_models import (
ProjectType,
projects,
- user_to_projects,
)
__all__ = [
"projects",
"ProjectType",
- "user_to_projects",
]
diff --git a/services/web/server/src/simcore_service_webserver/publication_handlers.py b/services/web/server/src/simcore_service_webserver/publication_handlers.py
index 6720b2451d0..74b041c58f6 100644
--- a/services/web/server/src/simcore_service_webserver/publication_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/publication_handlers.py
@@ -25,7 +25,7 @@ async def service_submission(request: web.Request):
# Read multipart email
while True:
- part = await reader.next() # pylint: disable=not-callable
+ part = await reader.next() # pylint: disable=not-callable
if part is None:
break
if part.headers[hdrs.CONTENT_TYPE] == "application/json":
diff --git a/services/web/server/src/simcore_service_webserver/security_decorators.py b/services/web/server/src/simcore_service_webserver/security_decorators.py
new file mode 100644
index 00000000000..c1663fb22f7
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/security_decorators.py
@@ -0,0 +1,28 @@
+from functools import wraps
+from typing import Callable
+
+from aiohttp import web
+
+from .security_api import check_permission
+
+
+def permission_required(permissions: str):
+ """Decorator that checks whether user permissions are fulfilled.
+ The function will throw an exception in case of disallowance.
+
+ :param handler: the function to check syntax must have web.Request as parameter
+ If user is not authorized - raises HTTPUnauthorized,
+ if user is authorized and does not have permission -
+ raises HTTPForbidden.
+ """
+
+ def decorator(handler: Callable):
+ @wraps(handler)
+ async def wrapped(request: web.Request):
+ await check_permission(request, permissions)
+ ret = await handler(request)
+ return ret
+
+ return wrapped
+
+ return decorator
diff --git a/services/web/server/src/simcore_service_webserver/security_roles.py b/services/web/server/src/simcore_service_webserver/security_roles.py
index 3478c54a166..368fafde3ab 100644
--- a/services/web/server/src/simcore_service_webserver/security_roles.py
+++ b/services/web/server/src/simcore_service_webserver/security_roles.py
@@ -47,6 +47,7 @@
"project.create", # "studies.user.create",
"project.close",
"project.delete", # "study.node.create",
+ "project.access_rights.update",
# "study.node.delete",
# "study.node.rename",
# "study.edge.create",
@@ -54,25 +55,21 @@
"project.node.create",
"project.node.delete",
"project.tag.*", # "study.tag"
- "user.profile.update", # "preferences.user.update",
- # "preferences.role.update"
- "user.apikey.*", # "preferences.apikey.create",
- # "preferences.apikey.delete"
- "user.tokens.*", # "preferences.token.create",
- # "preferences.token.delete"
- "tag.crud.*" # "preferences.tag"
+ "user.profile.update", # "user.user.update",
+ # "user.role.update"
+ "user.apikey.*", # "user.apikey.create",
+ # "user.apikey.delete"
+ "user.tokens.*", # "user.token.create",
+ # "user.token.delete"
+ "groups.*",
+ "tag.crud.*" # "user.tag"
# NOTE: All services* are not necessary since it only requires login
# and there is no distinction among logged in users.
# TODO: kept temporarily as a way to denote resources
],
"inherits": [UserRole.GUEST, UserRole.ANONYMOUS],
},
- UserRole.TESTER: {
- "can": [
- "project.template.create"
- ],
- "inherits": [UserRole.USER]
- },
+ UserRole.TESTER: {"can": ["project.template.create"], "inherits": [UserRole.USER]},
}
#
@@ -91,11 +88,11 @@
### "studies.user.read",
### "studies.user.create",
### "storage.datcore.read",
-### "preferences.user.update",
-### "preferences.apikey.create",
-### "preferences.apikey.delete",
-### "preferences.token.create",
-### "preferences.token.delete",
+### "user.user.update",
+### "user.apikey.create",
+### "user.apikey.delete",
+### "user.token.create",
+### "user.token.delete",
### "study.node.create",
### "study.node.delete",
### "study.node.rename",
@@ -107,7 +104,7 @@
# ],
# "tester": [
# "services.all.read", <----------???
-### "preferences.role.update",
+### "user.role.update",
# "study.nodestree.uuid.read", <----------???
# "study.logger.debug.read" <----------???
# ],
diff --git a/services/web/server/src/simcore_service_webserver/users_api.py b/services/web/server/src/simcore_service_webserver/users_api.py
index 136544c9b43..1679a7033e6 100644
--- a/services/web/server/src/simcore_service_webserver/users_api.py
+++ b/services/web/server/src/simcore_service_webserver/users_api.py
@@ -1,57 +1,96 @@
-
import logging
-from typing import Dict, List, Tuple
+from typing import Any, Dict, List
import sqlalchemy as sa
from aiohttp import web
from aiopg.sa.result import RowProxy
+from sqlalchemy import and_, literal_column
from servicelib.application_keys import APP_DB_ENGINE_KEY
from simcore_postgres_database.models.users import UserRole
from simcore_service_webserver.login.cfg import get_storage
-from .db_models import groups, user_to_groups, GroupType
+from .db_models import GroupType, groups, tokens, user_to_groups, users
+from .groups_api import convert_groups_db_to_schema
+from .users_utils import convert_user_db_to_schema
+from .users_exceptions import UserNotFoundError
logger = logging.getLogger(__name__)
-def _convert_to_schema(db_row: RowProxy) -> Dict:
- return {
- "gid": db_row["gid"],
- "label": db_row["name"],
- "description": db_row["description"],
- }
-
-async def list_user_groups(
- app: web.Application, user_id: str
-) -> Tuple[Dict[str, str], List[Dict[str, str]], Dict[str, str]]:
- """returns the user groups
- Returns:
- Tuple[List[Dict[str, str]]] -- [returns the user primary group, groups and all group]
- """
+async def get_user_profile(app: web.Application, user_id: int) -> Dict[str, Any]:
engine = app[APP_DB_ENGINE_KEY]
- primary_group = {}
- user_groups = []
- all_group = {}
+ user_profile: Dict[str, Any] = {}
+ user_primary_group = all_group = {}
+ user_standard_groups = []
async with engine.acquire() as conn:
- query = (
+ async for row in conn.execute(
sa.select(
- [groups.c.gid, groups.c.name, groups.c.description, groups.c.type]
+ [users, groups, user_to_groups.c.access_rights,], use_labels=True,
)
.select_from(
- user_to_groups.join(groups, user_to_groups.c.gid == groups.c.gid),
+ users.join(
+ user_to_groups.join(groups, user_to_groups.c.gid == groups.c.gid),
+ users.c.id == user_to_groups.c.uid,
+ )
)
- .where(user_to_groups.c.uid == user_id)
- )
- async for row in conn.execute(query):
- if row["type"] == GroupType.EVERYONE:
- all_group = _convert_to_schema(row)
- elif row["type"] == GroupType.PRIMARY:
- primary_group = _convert_to_schema(row)
+ .where(users.c.id == user_id)
+ .order_by(sa.asc(groups.c.name))
+ ):
+ user_profile.update(convert_user_db_to_schema(row, prefix="users_"))
+ if row["groups_type"] == GroupType.EVERYONE:
+ all_group = convert_groups_db_to_schema(
+ row,
+ prefix="groups_",
+ access_rights=row["user_to_groups_access_rights"],
+ )
+ elif row["groups_type"] == GroupType.PRIMARY:
+ user_primary_group = convert_groups_db_to_schema(
+ row,
+ prefix="groups_",
+ access_rights=row["user_to_groups_access_rights"],
+ )
else:
- user_groups.append(_convert_to_schema(row))
+ user_standard_groups.append(
+ convert_groups_db_to_schema(
+ row,
+ prefix="groups_",
+ access_rights=row["user_to_groups_access_rights"],
+ )
+ )
+ if not user_profile:
+ raise UserNotFoundError(uid=user_id)
+
+ user_profile["groups"] = {
+ "me": user_primary_group,
+ "organizations": user_standard_groups,
+ "all": all_group,
+ }
+ return user_profile
+
+
+async def update_user_profile(
+ app: web.Application, user_id: int, profile: Dict
+) -> None:
+ engine = app[APP_DB_ENGINE_KEY]
+ async with engine.acquire() as conn:
+ default_name = await conn.scalar(
+ sa.select([users.c.name]).where(users.c.id == user_id)
+ )
+ parts = default_name.split(".") + [""]
+ name = (
+ profile.get("first_name", parts[0])
+ + "."
+ + profile.get("last_name", parts[1])
+ )
+ resp = await conn.execute(
+ # pylint: disable=no-value-for-parameter
+ users.update()
+ .where(users.c.id == user_id)
+ .values(name=name)
+ )
+ assert resp.rowcount == 1 # nosec
- return (primary_group, user_groups, all_group)
async def is_user_guest(app: web.Application, user_id: int) -> bool:
"""Returns True if the user exists and is a GUEST"""
@@ -61,7 +100,7 @@ async def is_user_guest(app: web.Application, user_id: int) -> bool:
logger.warning("Could not find user with id '%s'", user_id)
return False
- return UserRole(user["role"]) == UserRole.GUEST
+ return bool(UserRole(user["role"]) == UserRole.GUEST)
async def delete_user(app: web.Application, user_id: int) -> None:
@@ -75,3 +114,85 @@ async def delete_user(app: web.Application, user_id: int) -> None:
return
await db.delete_user(user)
+
+
+# TOKEN -------------------------------------------
+async def create_token(
+ app: web.Application, user_id: int, token_data: Dict[str, str]
+) -> Dict[str, str]:
+ engine = app[APP_DB_ENGINE_KEY]
+ async with engine.acquire() as conn:
+ await conn.execute(
+ # pylint: disable=no-value-for-parameter
+ tokens.insert().values(
+ user_id=user_id,
+ token_service=token_data["service"],
+ token_data=token_data,
+ )
+ )
+ return token_data
+
+
+async def list_tokens(app: web.Application, user_id: int) -> List[Dict[str, str]]:
+ engine = app[APP_DB_ENGINE_KEY]
+ user_tokens = []
+ async with engine.acquire() as conn:
+ async for row in conn.execute(
+ sa.select([tokens.c.token_data]).where(tokens.c.user_id == user_id)
+ ):
+ user_tokens.append(row["token_data"])
+ return user_tokens
+
+
+async def get_token(
+ app: web.Application, user_id: int, service_id: str
+) -> Dict[str, str]:
+ engine = app[APP_DB_ENGINE_KEY]
+ async with engine.acquire() as conn:
+ result = await conn.execute(
+ sa.select([tokens.c.token_data]).where(
+ and_(tokens.c.user_id == user_id, tokens.c.token_service == service_id)
+ )
+ )
+ row: RowProxy = await result.first()
+ return dict(row["token_data"])
+
+
+async def update_token(
+ app: web.Application, user_id: int, service_id: str, token_data: Dict[str, str]
+) -> Dict[str, str]:
+ engine = app[APP_DB_ENGINE_KEY]
+ # TODO: optimize to a single call?
+ async with engine.acquire() as conn:
+ result = await conn.execute(
+ sa.select([tokens.c.token_data, tokens.c.token_id]).where(
+ and_(tokens.c.user_id == user_id, tokens.c.token_service == service_id)
+ )
+ )
+ row = await result.first()
+
+ data = dict(row["token_data"])
+ tid = row["token_id"]
+ data.update(token_data)
+
+ resp = await conn.execute(
+ # pylint: disable=no-value-for-parameter
+ tokens.update()
+ .where(tokens.c.token_id == tid)
+ .values(token_data=data)
+ .returning(literal_column("*"))
+ )
+ assert resp.rowcount == 1 # nosec
+ updated_token: RowProxy = await resp.fetchone()
+ return dict(updated_token["token_data"])
+
+
+async def delete_token(app: web.Application, user_id: int, service_id: str) -> None:
+ engine = app[APP_DB_ENGINE_KEY]
+ async with engine.acquire() as conn:
+ await conn.execute(
+ # pylint: disable=no-value-for-parameter
+ tokens.delete().where(
+ and_(tokens.c.user_id == user_id, tokens.c.token_service == service_id)
+ )
+ )
diff --git a/services/web/server/src/simcore_service_webserver/users_exceptions.py b/services/web/server/src/simcore_service_webserver/users_exceptions.py
new file mode 100644
index 00000000000..fe04503749d
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/users_exceptions.py
@@ -0,0 +1,29 @@
+"""Defines the different exceptions that may arise in the projects subpackage"""
+
+from typing import Optional
+
+
+class UsersException(Exception):
+ """Basic exception for errors raised in projects"""
+
+ def __init__(self, msg: str = None):
+ super().__init__(msg or "Unexpected error occured in projects subpackage")
+
+
+class UserNotFoundError(UsersException):
+ """User in group was not found in DB"""
+
+ def __init__(self, *, uid: Optional[int] = None, email: Optional[str] = None):
+ super().__init__(
+ f"User id {uid} not found" if uid else f"User with email {email} not found"
+ )
+ self.uid = uid
+ self.email = email
+
+
+class TokenNotFoundError(UsersException):
+ """Token was not found in DB"""
+
+ def __init__(self, service_id: str):
+ super().__init__(f"Token for service {service_id} not found")
+ self.service_id = service_id
diff --git a/services/web/server/src/simcore_service_webserver/users_handlers.py b/services/web/server/src/simcore_service_webserver/users_handlers.py
index b27e48001f3..b2eacf6fbfc 100644
--- a/services/web/server/src/simcore_service_webserver/users_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/users_handlers.py
@@ -2,22 +2,16 @@
import json
import logging
-from typing import List
-import sqlalchemy as sa
-import sqlalchemy.sql as sql
from aiohttp import web
-from aiopg.sa import Engine
-from aiopg.sa.result import RowProxy
-from tenacity import retry
-from servicelib.aiopg_utils import PostgresRetryPolicyUponOperation
-from servicelib.application_keys import APP_DB_ENGINE_KEY
-
-from .db_models import GroupType, groups, tokens, user_to_groups, users
+from . import users_api
from .login.decorators import RQT_USERID_KEY, login_required
-from .security_api import check_permission
-from .utils import gravatar_hash
+from .security_decorators import permission_required
+from .users_exceptions import (
+ TokenNotFoundError,
+ UserNotFoundError,
+)
logger = logging.getLogger(__name__)
@@ -26,219 +20,86 @@
@login_required
async def get_my_profile(request: web.Request):
# NOTE: ONLY login required to see its profile. E.g. anonymous can never see its profile
-
- @retry(**PostgresRetryPolicyUponOperation(logger).kwargs)
- async def _query_db(uid: str, engine: Engine) -> List[RowProxy]:
- async with engine.acquire() as conn:
- query = (
- sa.select(
- [
- users.c.email,
- users.c.role,
- users.c.name,
- users.c.primary_gid,
- groups.c.gid,
- groups.c.name,
- groups.c.description,
- groups.c.type,
- ],
- use_labels=True,
- )
- .select_from(
- users.join(
- user_to_groups.join(
- groups, user_to_groups.c.gid == groups.c.gid
- ),
- users.c.id == user_to_groups.c.uid,
- )
- )
- .where(users.c.id == uid)
- .order_by(sa.asc(groups.c.name))
- )
- result = await conn.execute(query)
- return await result.fetchall()
-
- # here we get all user_group combinations but only the group changes
- user_groups: List[RowProxy] = await _query_db(
- uid=request[RQT_USERID_KEY], engine=request.app[APP_DB_ENGINE_KEY]
- )
-
- if not user_groups:
+ uid = request[RQT_USERID_KEY]
+ try:
+ return await users_api.get_user_profile(request.app, uid)
+ except UserNotFoundError:
raise web.HTTPServerError(reason="could not find profile!")
- # get the primary group and the all group
- user_primary_group = all_group = {}
- other_groups = []
- for user_group in user_groups:
- if user_group["users_primary_gid"] == user_group["groups_gid"]:
- user_primary_group = user_group
- elif user_group["groups_type"] == GroupType.EVERYONE:
- all_group = user_group
- else:
- other_groups.append(user_group)
-
- parts = user_primary_group["users_name"].split(".") + [""]
- return {
- "login": user_primary_group["users_email"],
- "first_name": parts[0],
- "last_name": parts[1],
- "role": user_primary_group["users_role"].name.capitalize(),
- "gravatar_id": gravatar_hash(user_primary_group["users_email"]),
- "groups": {
- "me": {
- "gid": user_primary_group["groups_gid"],
- "label": user_primary_group["groups_name"],
- "description": user_primary_group["groups_description"],
- },
- "organizations": [
- {
- "gid": group["groups_gid"],
- "label": group["groups_name"],
- "description": group["groups_description"],
- }
- for group in other_groups
- ],
- "all": {
- "gid": all_group["groups_gid"],
- "label": all_group["groups_name"],
- "description": all_group["groups_description"],
- },
- },
- }
-
@login_required
+@permission_required("user.profile.update")
async def update_my_profile(request: web.Request):
- await check_permission(request, "user.profile.update")
-
- uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY]
+ uid = request[RQT_USERID_KEY]
# TODO: validate
body = await request.json()
- async with engine.acquire() as conn:
- query = sa.select([users.c.name]).where(users.c.id == uid)
- default_name = await conn.scalar(query)
- parts = default_name.split(".") + [""]
-
- name = body.get("first_name", parts[0]) + "." + body.get("last_name", parts[1])
-
- async with engine.acquire() as conn:
- query = users.update().where(users.c.id == uid).values(name=name)
- resp = await conn.execute(query)
- assert resp.rowcount == 1 # nosec
-
+ await users_api.update_user_profile(request.app, uid, body)
raise web.HTTPNoContent(content_type="application/json")
# me/tokens/ ------------------------------------------------------
@login_required
+@permission_required("user.tokens.*")
async def create_tokens(request: web.Request):
- await check_permission(request, "user.tokens.*")
-
- uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY]
+ uid = request[RQT_USERID_KEY]
# TODO: validate
body = await request.json()
# TODO: what it service exists already!?
# TODO: if service already, then IntegrityError is raised! How to deal with db exceptions??
- async with engine.acquire() as conn:
- stmt = tokens.insert().values(
- user_id=uid, token_service=body["service"], token_data=body
- )
- await conn.execute(stmt)
-
- raise web.HTTPCreated(
- text=json.dumps({"data": body}), content_type="application/json"
- )
+ await users_api.create_token(request.app, uid, body)
+ raise web.HTTPCreated(
+ text=json.dumps({"data": body}), content_type="application/json"
+ )
@login_required
+@permission_required("user.tokens.*")
async def list_tokens(request: web.Request):
- await check_permission(request, "user.tokens.*")
-
# TODO: start = request.match_info.get('start', 0)
# TODO: count = request.match_info.get('count', None)
- uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY]
-
- user_tokens = []
- async with engine.acquire() as conn:
- query = sa.select([tokens.c.token_data]).where(tokens.c.user_id == uid)
- async for row in conn.execute(query):
- user_tokens.append(row["token_data"])
-
- return user_tokens
+ uid = request[RQT_USERID_KEY]
+ return await users_api.list_tokens(request.app, uid)
@login_required
+@permission_required("user.tokens.*")
async def get_token(request: web.Request):
- await check_permission(request, "user.tokens.*")
-
- uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY]
+ uid = request[RQT_USERID_KEY]
service_id = request.match_info["service"]
- async with engine.acquire() as conn:
- query = sa.select([tokens.c.token_data]).where(
- sql.and_(tokens.c.user_id == uid, tokens.c.token_service == service_id)
- )
- result = await conn.execute(query)
- row = await result.first()
- return row["token_data"]
+ return await users_api.get_token(request.app, uid, service_id)
@login_required
+@permission_required("user.tokens.*")
async def update_token(request: web.Request):
""" updates token_data of a given user service
WARNING: token_data has to be complete!
"""
- await check_permission(request, "user.tokens.*")
-
- uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY]
+ uid = request[RQT_USERID_KEY]
service_id = request.match_info["service"]
# TODO: validate
body = await request.json()
- # TODO: optimize to a single call?
- async with engine.acquire() as conn:
- query = sa.select([tokens.c.token_data, tokens.c.token_id]).where(
- sql.and_(tokens.c.user_id == uid, tokens.c.token_service == service_id)
- )
- result = await conn.execute(query)
- row = await result.first()
-
- data = dict(row["token_data"])
- tid = row["token_id"]
- data.update(body)
-
- query = tokens.update().where(tokens.c.token_id == tid).values(token_data=data)
- resp = await conn.execute(query)
- assert resp.rowcount == 1 # nosec
+ await users_api.update_token(request.app, uid, service_id, body)
raise web.HTTPNoContent(content_type="application/json")
@login_required
+@permission_required("user.tokens.*")
async def delete_token(request: web.Request):
- await check_permission(request, "user.tokens.*")
-
- uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY]
+ uid = request[RQT_USERID_KEY]
service_id = request.match_info.get("service")
- async with engine.acquire() as conn:
- query = tokens.delete().where(
- sql.and_(tokens.c.user_id == uid, tokens.c.token_service == service_id)
- )
- await conn.execute(query)
-
- raise web.HTTPNoContent(content_type="application/json")
-
-
-# @login_required
-# async def list_groups(request: web.Request) -> List[Dict[str, str]]:
-# await check_permission(request, "user.groups.*")
-# uid = request[RQT_USERID_KEY]
-# primary_group, user_groups, all_group = users_api.list_user_groups(request.app, uid)
-# return {"me": primary_group, "organizations": user_groups, "all": all_group}
+ try:
+ await users_api.delete_token(request.app, uid, service_id)
+ raise web.HTTPNoContent(content_type="application/json")
+ except TokenNotFoundError:
+ raise web.HTTPNotFound(reason=f"Token for {service_id} not found")
diff --git a/services/web/server/src/simcore_service_webserver/users_utils.py b/services/web/server/src/simcore_service_webserver/users_utils.py
new file mode 100644
index 00000000000..cc7af5cb18a
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/users_utils.py
@@ -0,0 +1,22 @@
+import logging
+from typing import Dict, Optional
+
+from aiopg.sa.result import RowProxy
+
+from .utils import gravatar_hash
+
+logger = logging.getLogger(__name__)
+
+
+def convert_user_db_to_schema(
+ row: RowProxy, prefix: Optional[str] = ""
+) -> Dict[str, str]:
+ parts = row[f"{prefix}name"].split(".") + [""]
+ return {
+ "id": row[f"{prefix}id"],
+ "login": row[f"{prefix}email"],
+ "first_name": parts[0],
+ "last_name": parts[1],
+ "role": row[f"{prefix}role"].name.capitalize(),
+ "gravatar_id": gravatar_hash(row[f"{prefix}email"]),
+ }
diff --git a/services/web/server/tests/integration/conftest.py b/services/web/server/tests/integration/conftest.py
index 4df31d2dcca..1625de18c10 100644
--- a/services/web/server/tests/integration/conftest.py
+++ b/services/web/server/tests/integration/conftest.py
@@ -14,19 +14,26 @@
import logging
import sys
+from asyncio import Future
from copy import deepcopy
from pathlib import Path
from pprint import pprint
-from typing import Dict
-from asyncio import Future
+from typing import Dict, List
import pytest
import trafaret_config
import yaml
from pytest_simcore.helpers.utils_docker import get_service_published_port
+from pytest_simcore.helpers.utils_login import NewUser
from simcore_service_webserver.application_config import app_schema
from simcore_service_webserver.cli import create_environ
+from simcore_service_webserver.groups_api import (
+ add_user_in_group,
+ create_user_group,
+ delete_user_group,
+ list_user_groups,
+)
from simcore_service_webserver.resources import resources as app_resources
current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
@@ -169,3 +176,57 @@ def mock_orphaned_services(mocker):
)
remove_orphaned_services.return_value.set_result("")
return remove_orphaned_services
+
+
+@pytest.fixture
+async def primary_group(client, logged_user) -> Dict[str, str]:
+ primary_group, _, _ = await list_user_groups(client.app, logged_user["id"])
+ return primary_group
+
+
+@pytest.fixture
+async def standard_groups(client, logged_user: Dict) -> List[Dict[str, str]]:
+ # create a separate admin account to create some standard groups for the logged user
+ sparc_group = {
+ "gid": "5", # this will be replaced
+ "label": "SPARC",
+ "description": "Stimulating Peripheral Activity to Relieve Conditions",
+ "thumbnail": "https://commonfund.nih.gov/sites/default/files/sparc-image-homepage500px.png",
+ }
+ team_black_group = {
+ "gid": "5", # this will be replaced
+ "label": "team Black",
+ "description": "THE incredible black team",
+ "thumbnail": None,
+ }
+ async with NewUser(
+ {"name": f"{logged_user['name']}_admin", "role": "USER"}, client.app
+ ) as admin_user:
+ sparc_group = await create_user_group(client.app, admin_user["id"], sparc_group)
+ team_black_group = await create_user_group(
+ client.app, admin_user["id"], team_black_group
+ )
+ await add_user_in_group(
+ client.app,
+ admin_user["id"],
+ sparc_group["gid"],
+ new_user_id=logged_user["id"],
+ )
+ await add_user_in_group(
+ client.app,
+ admin_user["id"],
+ team_black_group["gid"],
+ new_user_email=logged_user["email"],
+ )
+
+ _, standard_groups, _ = await list_user_groups(client.app, logged_user["id"])
+ yield standard_groups
+ # clean groups
+ await delete_user_group(client.app, admin_user["id"], sparc_group["gid"])
+ await delete_user_group(client.app, admin_user["id"], team_black_group["gid"])
+
+
+@pytest.fixture
+async def all_group(client, logged_user) -> Dict[str, str]:
+ _, _, all_group = await list_user_groups(client.app, logged_user["id"])
+ return all_group
diff --git a/services/web/server/tests/integration/test_project_workflow.py b/services/web/server/tests/integration/test_project_workflow.py
index cfd06181bbd..e40f0df35e8 100644
--- a/services/web/server/tests/integration/test_project_workflow.py
+++ b/services/web/server/tests/integration/test_project_workflow.py
@@ -50,7 +50,7 @@ def client(
loop,
mock_orphaned_services,
aiohttp_client,
- app_config, ## waits until swarm with *_services are up
+ app_config, # waits until swarm with *_services are up
):
assert app_config["rest"]["version"] == API_VERSION
@@ -227,6 +227,8 @@ async def test_workflow(
client,
fake_project_data,
logged_user,
+ primary_group: Dict[str, str],
+ standard_groups: List[Dict[str, str]],
computational_system_mock,
storage_subsystem_mock,
):
@@ -241,8 +243,11 @@ async def test_workflow(
projects = await _request_list(client)
assert len(projects) == 1
for key in projects[0].keys():
- if key not in ("uuid", "prjOwner", "creationDate", "lastChangeDate"):
+ if key not in ("uuid", "prjOwner", "creationDate", "lastChangeDate", "accessRights"):
assert projects[0][key] == fake_project_data[key]
+ assert projects[0]["prjOwner"] == logged_user["email"]
+ assert projects[0]["accessRights"] == {
+ str(primary_group["gid"]): {"read": True, "write": True, "delete": True}}
modified_project = deepcopy(projects[0])
modified_project["name"] = "some other name"
@@ -251,6 +256,9 @@ async def test_workflow(
list(modified_project["workbench"].keys())[0]
)
modified_project["workbench"]["ReNamed"]["position"]["x"] = 0
+ # share with some group
+ modified_project["accessRights"].update(
+ {str(standard_groups[0]["gid"]): {"read": True, "write": True, "delete": False}})
# modify
pid = modified_project["uuid"]
await _request_update(client, modified_project, pid)
@@ -291,14 +299,16 @@ async def test_get_invalid_project(client, logged_user):
async def test_update_invalid_project(client, logged_user):
- url = client.app.router["replace_project"].url_for(project_id="some-fake-id")
+ url = client.app.router["replace_project"].url_for(
+ project_id="some-fake-id")
resp = await client.get(url)
await assert_status(resp, web.HTTPNotFound)
async def test_delete_invalid_project(client, logged_user):
- url = client.app.router["delete_project"].url_for(project_id="some-fake-id")
+ url = client.app.router["delete_project"].url_for(
+ project_id="some-fake-id")
resp = await client.delete(url)
await assert_status(resp, web.HTTPNotFound)
diff --git a/services/web/server/tests/unit/test_projects_models.py b/services/web/server/tests/unit/test_projects_models.py
index 75111351261..052f10726ce 100644
--- a/services/web/server/tests/unit/test_projects_models.py
+++ b/services/web/server/tests/unit/test_projects_models.py
@@ -78,6 +78,8 @@ def create_engine(mock_result):
mock_connection = mocker.patch("aiopg.sa.SAConnection", spec=True)
mock_connection.execute.return_value = Future()
mock_connection.execute.return_value.set_result(mock_result)
+ mock_connection.scalar.return_value = Future()
+ mock_connection.scalar.return_value.set_result(mock_result)
mock_context_manager = MockAsyncContextManager()
mock_context_manager.mock_object = mock_connection
@@ -103,8 +105,8 @@ async def test_add_projects(fake_project, user_id, mocker, mock_db_engine):
await db.add_projects([fake_project], user_id=user_id)
db_engine.acquire.assert_called()
+ mock_connection.scalar.assert_called()
mock_connection.execute.assert_called()
- assert mock_connection.execute.call_count == 3
# not sure this is useful...
diff --git a/services/web/server/tests/unit/with_dbs/config-devel.yml b/services/web/server/tests/unit/with_dbs/config-devel.yml
index 15b6ffa7950..31e1d3f3861 100644
--- a/services/web/server/tests/unit/with_dbs/config-devel.yml
+++ b/services/web/server/tests/unit/with_dbs/config-devel.yml
@@ -67,7 +67,7 @@ rest:
reverse_proxy:
enabled: false
session:
- secret_key: 'TODO: Replace with a key of at least length 32'
+ secret_key: "TODO: Replace with a key of at least length 32"
smtp:
host: mail.foo.com
password: null
@@ -97,4 +97,6 @@ tracing:
zipkin_endpoint: http://jaeger:9411
users:
enabled: true
-version: '1.0'
+groups:
+ enabled: true
+version: "1.0"
diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py
index a4376dda928..eefc459f8c0 100644
--- a/services/web/server/tests/unit/with_dbs/conftest.py
+++ b/services/web/server/tests/unit/with_dbs/conftest.py
@@ -27,13 +27,19 @@
import simcore_service_webserver.db_models as orm
import simcore_service_webserver.utils
+from pytest_simcore.helpers.utils_login import NewUser
from servicelib.aiopg_utils import DSN
from servicelib.rest_responses import unwrap_envelope
from simcore_service_webserver.application import create_application
from simcore_service_webserver.application_config import app_schema as app_schema
-from simcore_service_webserver.users_api import list_user_groups
-
-## current directory
+from simcore_service_webserver.groups_api import (
+ add_user_in_group,
+ create_user_group,
+ delete_user_group,
+ list_user_groups,
+)
+
+# current directory
current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
@@ -351,9 +357,45 @@ async def primary_group(client, logged_user) -> Dict[str, str]:
@pytest.fixture
-async def standard_groups(client, logged_user) -> List[Dict[str, str]]:
- _, standard_groups, _ = await list_user_groups(client.app, logged_user["id"])
- return standard_groups
+async def standard_groups(client, logged_user: Dict) -> List[Dict[str, str]]:
+ # create a separate admin account to create some standard groups for the logged user
+ sparc_group = {
+ "gid": "5", # this will be replaced
+ "label": "SPARC",
+ "description": "Stimulating Peripheral Activity to Relieve Conditions",
+ "thumbnail": "https://commonfund.nih.gov/sites/default/files/sparc-image-homepage500px.png",
+ }
+ team_black_group = {
+ "gid": "5", # this will be replaced
+ "label": "team Black",
+ "description": "THE incredible black team",
+ "thumbnail": None,
+ }
+ async with NewUser(
+ {"name": f"{logged_user['name']}_admin", "role": "USER"}, client.app
+ ) as admin_user:
+ sparc_group = await create_user_group(client.app, admin_user["id"], sparc_group)
+ team_black_group = await create_user_group(
+ client.app, admin_user["id"], team_black_group
+ )
+ await add_user_in_group(
+ client.app,
+ admin_user["id"],
+ sparc_group["gid"],
+ new_user_id=logged_user["id"],
+ )
+ await add_user_in_group(
+ client.app,
+ admin_user["id"],
+ team_black_group["gid"],
+ new_user_email=logged_user["email"],
+ )
+
+ _, standard_groups, _ = await list_user_groups(client.app, logged_user["id"])
+ yield standard_groups
+ # clean groups
+ await delete_user_group(client.app, admin_user["id"], sparc_group["gid"])
+ await delete_user_group(client.app, admin_user["id"], team_black_group["gid"])
@pytest.fixture
diff --git a/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml b/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml
index 534b32a8edc..3771f537596 100644
--- a/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml
+++ b/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml
@@ -1,4 +1,4 @@
-version: '3.4'
+version: "3.7"
services:
postgres:
image: postgres:10.11
@@ -9,7 +9,7 @@ services:
POSTGRES_PASSWORD: ${TEST_POSTGRES_PASSWORD:-admin}
POSTGRES_DB: ${TEST_POSTGRES_DB:-test}
ports:
- - '5432:5432'
+ - "5432:5432"
# NOTES: this is not yet compatible with portainer deployment but could work also for other containers
# works with Docker 19.03 and not yet with Portainer 1.23.0 (see https://github.com/portainer/portainer/issues/3551)
# in the meantime postgres allows to set a configuration through CLI.
@@ -31,7 +31,7 @@ services:
redis:
image: redis:5.0-alpine
ports:
- - '6379:6379'
+ - "6379:6379"
redis-commander:
init: true
image: rediscommander/redis-commander:latest
diff --git a/services/web/server/tests/unit/with_dbs/docker-compose.yml b/services/web/server/tests/unit/with_dbs/docker-compose.yml
index 72c8d43851c..f7ed5638d04 100644
--- a/services/web/server/tests/unit/with_dbs/docker-compose.yml
+++ b/services/web/server/tests/unit/with_dbs/docker-compose.yml
@@ -1,4 +1,4 @@
-version: '3.4'
+version: "3.7"
services:
postgres:
image: postgres:10.11
@@ -8,12 +8,12 @@ services:
POSTGRES_USER: ${TEST_POSTGRES_USER}
POSTGRES_PASSWORD: ${TEST_POSTGRES_PASSWORD}
ports:
- - '5432:5432'
+ - "5432:5432"
# NOTES: this is not yet compatible with portainer deployment but could work also for other containers
# works with Docker 19.03 and not yet with Portainer 1.23.0 (see https://github.com/portainer/portainer/issues/3551)
# in the meantime postgres allows to set a configuration through CLI.
# sysctls:
- # # NOTES: these values are needed here because docker swarm kills long running idle
+ # # NOTES: these values are needed here because docker swarm kills long running idle
# # connections by default after 15 minutes see https://github.com/moby/moby/issues/31208
# # info about these values are here https://tldp.org/HOWTO/TCP-Keepalive-HOWTO/usingkeepalive.html
# - net.ipv4.tcp_keepalive_intvl=600
@@ -24,4 +24,4 @@ services:
redis:
image: redis:5.0-alpine
ports:
- - '6379:6379'
+ - "6379:6379"
diff --git a/services/web/server/tests/unit/with_dbs/test_access_to_studies.py b/services/web/server/tests/unit/with_dbs/test_access_to_studies.py
index 464c6b0fcc6..ba4e50b9764 100644
--- a/services/web/server/tests/unit/with_dbs/test_access_to_studies.py
+++ b/services/web/server/tests/unit/with_dbs/test_access_to_studies.py
@@ -154,7 +154,7 @@ async def _get_user_projects(client):
def _assert_same_projects(got: Dict, expected: Dict):
# TODO: validate using api/specs/webserver/v0/components/schemas/project-v0.0.1.json
# TODO: validate workbench!
- exclude = ["creationDate", "lastChangeDate", "prjOwner", "uuid", "workbench"]
+ exclude = ["creationDate", "lastChangeDate", "prjOwner", "uuid", "workbench", "accessRights"]
for key in expected.keys():
if key not in exclude:
assert got[key] == expected[key], "Failed in %s" % key
diff --git a/services/web/server/tests/unit/with_dbs/test_groups.py b/services/web/server/tests/unit/with_dbs/test_groups.py
new file mode 100644
index 00000000000..d6da3bb01d3
--- /dev/null
+++ b/services/web/server/tests/unit/with_dbs/test_groups.py
@@ -0,0 +1,569 @@
+# pylint:disable=unused-variable
+# pylint:disable=unused-argument
+# pylint:disable=redefined-outer-name
+
+
+import random
+from copy import deepcopy
+from typing import Dict, List, Tuple
+
+import pytest
+from aiohttp import web
+
+from pytest_simcore.helpers.utils_assert import assert_status
+from pytest_simcore.helpers.utils_login import LoggedUser, create_user
+from servicelib.application import create_safe_application
+from simcore_service_webserver.db import setup_db
+from simcore_service_webserver.groups import setup_groups
+from simcore_service_webserver.groups_api import (
+ DEFAULT_GROUP_OWNER_ACCESS_RIGHTS,
+ DEFAULT_GROUP_READ_ACCESS_RIGHTS,
+)
+from simcore_service_webserver.login import setup_login
+from simcore_service_webserver.rest import setup_rest
+from simcore_service_webserver.security import setup_security
+from simcore_service_webserver.security_roles import UserRole
+from simcore_service_webserver.session import setup_session
+from simcore_service_webserver.users import setup_users
+
+## BUG FIXES #######################################################
+from simcore_service_webserver.utils import gravatar_hash
+
+API_VERSION = "v0"
+
+
+@pytest.fixture
+def client(loop, aiohttp_client, app_cfg, postgres_service):
+ cfg = deepcopy(app_cfg)
+
+ port = cfg["main"]["port"]
+
+ assert cfg["rest"]["version"] == API_VERSION
+
+ cfg["db"]["init_tables"] = True # inits postgres_service
+
+ # fake config
+ app = create_safe_application(cfg)
+
+ setup_db(app)
+ setup_session(app)
+ setup_security(app)
+ setup_rest(app)
+ setup_login(app)
+ setup_users(app)
+ setup_groups(app)
+
+ client = loop.run_until_complete(
+ aiohttp_client(app, server_kwargs={"port": port, "host": "localhost"})
+ )
+ return client
+
+
+# WARNING: pytest-asyncio and pytest-aiohttp are not compatible
+#
+# https://github.com/aio-libs/pytest-aiohttp/issues/8#issuecomment-405602020
+# https://github.com/pytest-dev/pytest-asyncio/issues/76
+#
+
+
+@pytest.fixture
+async def logged_user(client, role: UserRole):
+ """ adds a user in db and logs in with client
+
+ NOTE: role fixture is defined as a parametrization below
+ """
+ async with LoggedUser(
+ client, {"role": role.name}, check_if_succeeds=role != UserRole.ANONYMOUS
+ ) as user:
+ yield user
+
+
+# --------------------------------------------------------------------------
+PREFIX = "/" + API_VERSION + "/groups"
+
+
+def _assert_group(group: Dict[str, str]):
+ properties = ["gid", "label", "description", "thumbnail", "access_rights"]
+ assert all(x in group for x in properties)
+ access_rights = group["access_rights"]
+ access_rights_properties = ["read", "write", "delete"]
+ assert all(x in access_rights for x in access_rights_properties)
+
+
+def _assert__group_user(
+ expected_user: Dict, expected_access_rights: Dict[str, bool], actual_user: Dict
+):
+ assert "first_name" in actual_user
+ parts = expected_user["name"].split(".") + [""]
+ assert actual_user["first_name"] == parts[0]
+ assert "last_name" in actual_user
+ assert actual_user["last_name"] == parts[1]
+ assert "login" in actual_user
+ assert actual_user["login"] == expected_user["email"]
+ assert "gravatar_id" in actual_user
+ assert actual_user["gravatar_id"] == gravatar_hash(expected_user["email"])
+ assert "access_rights" in actual_user
+ assert actual_user["access_rights"] == expected_access_rights
+ assert "id" in actual_user
+ assert actual_user["id"] == expected_user["id"]
+ assert "gid" in actual_user
+
+
+@pytest.mark.parametrize(
+ "role,expected",
+ [
+ (UserRole.ANONYMOUS, web.HTTPUnauthorized),
+ (UserRole.GUEST, web.HTTPForbidden),
+ (UserRole.USER, web.HTTPOk),
+ (UserRole.TESTER, web.HTTPOk),
+ ],
+)
+async def test_list_groups(
+ client,
+ logged_user,
+ role,
+ expected,
+ primary_group: Dict[str, str],
+ standard_groups: List[Dict[str, str]],
+ all_group: Dict[str, str],
+):
+ url = client.app.router["list_groups"].url_for()
+ assert str(url) == f"{PREFIX}"
+
+ resp = await client.get(url)
+ data, error = await assert_status(resp, expected)
+
+ if not error:
+ assert isinstance(data, dict)
+ assert "me" in data
+ _assert_group(data["me"])
+ assert data["me"] == primary_group
+
+ assert "organizations" in data
+ assert isinstance(data["organizations"], list)
+ for group in data["organizations"]:
+ _assert_group(group)
+ assert data["organizations"] == standard_groups
+ assert "all" in data
+ _assert_group(data["all"])
+ assert data["all"] == all_group
+
+
+def _standard_role_response() -> Tuple[
+ str, List[Tuple[UserRole, web.Response, web.Response, web.Response]]
+]:
+ return (
+ "role,expected_ok, expected_created, expected_no_contents, expected_not_found",
+ [
+ (
+ UserRole.ANONYMOUS,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ ),
+ (
+ UserRole.GUEST,
+ web.HTTPForbidden,
+ web.HTTPForbidden,
+ web.HTTPForbidden,
+ web.HTTPForbidden,
+ ),
+ (
+ UserRole.USER,
+ web.HTTPOk,
+ web.HTTPCreated,
+ web.HTTPNoContent,
+ web.HTTPNotFound,
+ ),
+ (
+ UserRole.TESTER,
+ web.HTTPOk,
+ web.HTTPCreated,
+ web.HTTPNoContent,
+ web.HTTPNotFound,
+ ),
+ ],
+ )
+
+
+@pytest.mark.parametrize(
+ "role,expected",
+ [
+ (UserRole.ANONYMOUS, web.HTTPUnauthorized),
+ (UserRole.GUEST, web.HTTPForbidden),
+ (UserRole.USER, web.HTTPOk),
+ (UserRole.TESTER, web.HTTPOk),
+ ],
+)
+async def test_group_access_rights(
+ client,
+ logged_user,
+ role,
+ expected,
+ primary_group: Dict[str, str],
+ standard_groups: List[Dict[str, str]],
+ all_group: Dict[str, str],
+):
+ url = client.app.router["list_groups"].url_for()
+ assert str(url) == f"{PREFIX}"
+
+ resp = await client.get(url)
+ data, error = await assert_status(resp, expected)
+
+ if not error:
+ assert isinstance(data, dict)
+ assert "me" in data
+ _assert_group(data["me"])
+ assert data["me"] == primary_group
+
+ assert "organizations" in data
+ assert isinstance(data["organizations"], list)
+ for group in data["organizations"]:
+ _assert_group(group)
+ assert data["organizations"] == standard_groups
+ assert "all" in data
+ _assert_group(data["all"])
+ assert data["all"] == all_group
+
+ for group in standard_groups:
+ # try to delete a group
+ url = client.app.router["delete_group"].url_for(gid=str(group["gid"]))
+ resp = await client.delete(url)
+ data, error = await assert_status(resp, web.HTTPForbidden)
+ # try to add some user in the group
+ url = client.app.router["add_group_user"].url_for(gid=str(group["gid"]))
+ resp = await client.post(url, json={"uid": logged_user["id"]})
+ data, error = await assert_status(resp, web.HTTPForbidden)
+ # try to modify the user in the group
+ url = client.app.router["update_group_user"].url_for(
+ gid=str(group["gid"]), uid=str(logged_user["id"])
+ )
+ resp = await client.patch(
+ url,
+ json={"access_rights": {"read": True, "write": True, "delete": True}},
+ )
+ data, error = await assert_status(resp, web.HTTPForbidden)
+ # try to remove the user from the group
+ url = client.app.router["delete_group_user"].url_for(
+ gid=str(group["gid"]), uid=str(logged_user["id"])
+ )
+ resp = await client.delete(url)
+ data, error = await assert_status(resp, web.HTTPForbidden)
+
+
+@pytest.mark.parametrize(
+ "role,expected,expected_read,expected_delete,expected_not_found",
+ [
+ (
+ UserRole.ANONYMOUS,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ ),
+ (
+ UserRole.GUEST,
+ web.HTTPForbidden,
+ web.HTTPForbidden,
+ web.HTTPForbidden,
+ web.HTTPForbidden,
+ ),
+ (
+ UserRole.USER,
+ web.HTTPCreated,
+ web.HTTPOk,
+ web.HTTPNoContent,
+ web.HTTPNotFound,
+ ),
+ (
+ UserRole.TESTER,
+ web.HTTPCreated,
+ web.HTTPOk,
+ web.HTTPNoContent,
+ web.HTTPNotFound,
+ ),
+ ],
+)
+async def test_group_creation_workflow(
+ client,
+ logged_user,
+ role,
+ expected,
+ expected_read,
+ expected_delete,
+ expected_not_found,
+):
+ url = client.app.router["create_group"].url_for()
+ assert str(url) == f"{PREFIX}"
+
+ new_group = {
+ "gid": "4564",
+ "label": "Black Sabbath",
+ "description": "The founders of Rock'N'Roll",
+ "thumbnail": "https://www.startpage.com/av/proxy-image?piurl=https%3A%2F%2Fencrypted-tbn0.gstatic.com%2Fimages%3Fq%3Dtbn%3AANd9GcS3pAUISv_wtYDL9Ih4JtUfAWyHj9PkYMlEBGHJsJB9QlTZuuaK%26s&sp=1591105967T00f0b7ff95c7b3bca035102fa1ead205ab29eb6cd95acedcedf6320e64634f0c",
+ }
+
+ resp = await client.post(url, json=new_group)
+ data, error = await assert_status(resp, expected)
+
+ assigned_group = new_group
+ if not error:
+ assert isinstance(data, dict)
+ assigned_group = data
+ _assert_group(assigned_group)
+ # we get a new gid and the rest keeps the same
+ assert assigned_group["gid"] != new_group["gid"]
+ for prop in ["label", "description", "thumbnail"]:
+ assert assigned_group[prop] == new_group[prop]
+ # we get all rights on the group since we are the creator
+ assert assigned_group["access_rights"] == {
+ "read": True,
+ "write": True,
+ "delete": True,
+ }
+
+ # get the groups and check we are part of this new group
+ url = client.app.router["list_groups"].url_for()
+ assert str(url) == f"{PREFIX}"
+
+ resp = await client.get(url)
+ data, error = await assert_status(resp, expected_read)
+ if not error:
+ assert len(data["organizations"]) == 1
+ assert data["organizations"][0] == assigned_group
+
+ # check getting one group
+ url = client.app.router["get_group"].url_for(gid=str(assigned_group["gid"]))
+ assert str(url) == f"{PREFIX}/{assigned_group['gid']}"
+ resp = await client.get(url)
+ data, error = await assert_status(resp, expected_read)
+ if not error:
+ assert data == assigned_group
+
+ # modify the group
+ modified_group = {"label": "Led Zeppelin"}
+ url = client.app.router["update_group"].url_for(gid=str(assigned_group["gid"]))
+ assert str(url) == f"{PREFIX}/{assigned_group['gid']}"
+ resp = await client.patch(url, json=modified_group)
+ data, error = await assert_status(resp, expected_read)
+ if not error:
+ assert data != assigned_group
+ _assert_group(data)
+ assigned_group.update(**modified_group)
+ assert data == assigned_group
+ # check getting the group returns the newly modified group
+ url = client.app.router["get_group"].url_for(gid=str(assigned_group["gid"]))
+ assert str(url) == f"{PREFIX}/{assigned_group['gid']}"
+ resp = await client.get(url)
+ data, error = await assert_status(resp, expected_read)
+ if not error:
+ _assert_group(data)
+ assert data == assigned_group
+
+ # delete the group
+ url = client.app.router["delete_group"].url_for(gid=str(assigned_group["gid"]))
+ assert str(url) == f"{PREFIX}/{assigned_group['gid']}"
+ resp = await client.delete(url)
+ data, error = await assert_status(resp, expected_delete)
+ if not error:
+ assert not data
+
+ # check deleting the same group again fails
+ url = client.app.router["delete_group"].url_for(gid=str(assigned_group["gid"]))
+ assert str(url) == f"{PREFIX}/{assigned_group['gid']}"
+ resp = await client.delete(url)
+ data, error = await assert_status(resp, expected_not_found)
+
+ # check getting the group fails
+ url = client.app.router["get_group"].url_for(gid=str(assigned_group["gid"]))
+ assert str(url) == f"{PREFIX}/{assigned_group['gid']}"
+ resp = await client.get(url)
+ data, error = await assert_status(resp, expected_not_found)
+
+
+@pytest.mark.parametrize(
+ "role, expected_created,expected,expected_not_found,expected_no_content",
+ [
+ (
+ UserRole.ANONYMOUS,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ ),
+ (
+ UserRole.GUEST,
+ web.HTTPForbidden,
+ web.HTTPForbidden,
+ web.HTTPForbidden,
+ web.HTTPForbidden,
+ ),
+ (
+ UserRole.USER,
+ web.HTTPCreated,
+ web.HTTPOk,
+ web.HTTPNotFound,
+ web.HTTPNoContent,
+ ),
+ (
+ UserRole.TESTER,
+ web.HTTPCreated,
+ web.HTTPOk,
+ web.HTTPNotFound,
+ web.HTTPNoContent,
+ ),
+ ],
+)
+async def test_add_remove_users_from_group(
+ client,
+ logged_user,
+ role,
+ expected_created,
+ expected,
+ expected_not_found,
+ expected_no_content,
+):
+
+ new_group = {
+ "gid": "5",
+ "label": "team awesom",
+ "description": "awesomeness is just the summary",
+ "thumbnail": "https://www.startpage.com/av/proxy-image?piurl=https%3A%2F%2Fencrypted-tbn0.gstatic.com%2Fimages%3Fq%3Dtbn%3AANd9GcSQMopBeN0pq2gg6iIZuLGYniFxUdzi7a2LeT1Xg0Lz84bl36Nlqw%26s&sp=1591110539Tbbb022a272bc117e58cca2f2399e83e6b5d4a2d0a7c283330057d7718ae305bd",
+ }
+
+ # check that our group does not exist
+ url = client.app.router["get_group_users"].url_for(gid=new_group["gid"])
+ assert str(url) == f"{PREFIX}/{new_group['gid']}/users"
+ resp = await client.get(url)
+ data, error = await assert_status(resp, expected_not_found)
+
+ url = client.app.router["create_group"].url_for()
+ assert str(url) == f"{PREFIX}"
+
+ resp = await client.post(url, json=new_group)
+ data, error = await assert_status(resp, expected_created)
+
+ assigned_group = new_group
+ if not error:
+ assert isinstance(data, dict)
+ assigned_group = data
+ _assert_group(assigned_group)
+ # we get a new gid and the rest keeps the same
+ assert assigned_group["gid"] != new_group["gid"]
+ for prop in ["label", "description", "thumbnail"]:
+ assert assigned_group[prop] == new_group[prop]
+ # we get all rights on the group since we are the creator
+ assert assigned_group["access_rights"] == {
+ "read": True,
+ "write": True,
+ "delete": True,
+ }
+
+ # check that our user is in the group of users
+ get_group_users_url = client.app.router["get_group_users"].url_for(
+ gid=str(assigned_group["gid"])
+ )
+ assert str(get_group_users_url) == f"{PREFIX}/{assigned_group['gid']}/users"
+ resp = await client.get(get_group_users_url)
+ data, error = await assert_status(resp, expected)
+
+ if not error:
+ list_of_users = data
+ assert len(list_of_users) == 1
+ the_owner = list_of_users[0]
+ _assert__group_user(logged_user, DEFAULT_GROUP_OWNER_ACCESS_RIGHTS, the_owner)
+
+ # create a random number of users and put them in the group
+ add_group_user_url = client.app.router["add_group_user"].url_for(
+ gid=str(assigned_group["gid"])
+ )
+ assert str(add_group_user_url) == f"{PREFIX}/{assigned_group['gid']}/users"
+ num_new_users = random.randint(1, 10)
+ created_users_list = []
+ for i in range(num_new_users):
+ created_users_list.append(await create_user())
+
+ # add the user once per email once per id to test both
+ params = (
+ {"uid": created_users_list[i]["id"]}
+ if i % 2 == 0
+ else {"email": created_users_list[i]["email"]}
+ )
+ resp = await client.post(add_group_user_url, json=params)
+ data, error = await assert_status(resp, expected_no_content)
+
+ get_group_user_url = client.app.router["get_group_user"].url_for(
+ gid=str(assigned_group["gid"]), uid=str(created_users_list[i]["id"])
+ )
+ assert (
+ str(get_group_user_url)
+ == f"{PREFIX}/{assigned_group['gid']}/users/{created_users_list[i]['id']}"
+ )
+ resp = await client.get(get_group_user_url)
+ data, error = await assert_status(resp, expected)
+ if not error:
+ _assert__group_user(
+ created_users_list[i], DEFAULT_GROUP_READ_ACCESS_RIGHTS, data
+ )
+ # check list is correct
+ resp = await client.get(get_group_users_url)
+ data, error = await assert_status(resp, expected)
+ if not error:
+ list_of_users = data
+ # now we should have all the users in the group + the owner
+ all_created_users = created_users_list + [logged_user]
+ assert len(list_of_users) == len(all_created_users)
+ for actual_user in list_of_users:
+ expected_users_list = list(
+ filter(
+ lambda x, ac=actual_user: x["email"] == ac["login"],
+ all_created_users,
+ )
+ )
+ assert len(expected_users_list) == 1
+ _assert__group_user(
+ expected_users_list[0],
+ DEFAULT_GROUP_READ_ACCESS_RIGHTS
+ if actual_user["login"] != logged_user["email"]
+ else DEFAULT_GROUP_OWNER_ACCESS_RIGHTS,
+ actual_user,
+ )
+ all_created_users.remove(expected_users_list[0])
+
+ # modify the user and remove them from the group
+ MANAGER_ACCESS_RIGHTS = {"read": True, "write": True, "delete": False}
+ for i in range(num_new_users):
+ update_group_user_url = client.app.router["update_group_user"].url_for(
+ gid=str(assigned_group["gid"]), uid=str(created_users_list[i]["id"])
+ )
+ resp = await client.patch(
+ update_group_user_url, json={"access_rights": MANAGER_ACCESS_RIGHTS}
+ )
+ data, error = await assert_status(resp, expected)
+ if not error:
+ _assert__group_user(created_users_list[i], MANAGER_ACCESS_RIGHTS, data)
+ # check it is there
+ get_group_user_url = client.app.router["get_group_user"].url_for(
+ gid=str(assigned_group["gid"]), uid=str(created_users_list[i]["id"])
+ )
+ resp = await client.get(get_group_user_url)
+ data, error = await assert_status(resp, expected)
+ if not error:
+ _assert__group_user(created_users_list[i], MANAGER_ACCESS_RIGHTS, data)
+ # remove the user from the group
+ delete_group_user_url = client.app.router["delete_group_user"].url_for(
+ gid=str(assigned_group["gid"]), uid=str(created_users_list[i]["id"])
+ )
+ resp = await client.delete(delete_group_user_url)
+ data, error = await assert_status(resp, expected_no_content)
+ # do it again to check it is not found anymore
+ resp = await client.delete(delete_group_user_url)
+ data, error = await assert_status(resp, expected_not_found)
+
+ # check it is not there anymore
+ get_group_user_url = client.app.router["get_group_user"].url_for(
+ gid=str(assigned_group["gid"]), uid=str(created_users_list[i]["id"])
+ )
+ resp = await client.get(get_group_user_url)
+ data, error = await assert_status(resp, expected_not_found)
diff --git a/services/web/server/tests/unit/with_dbs/test_projects.py b/services/web/server/tests/unit/with_dbs/test_projects.py
index 7780dd096de..26976d1abe0 100644
--- a/services/web/server/tests/unit/with_dbs/test_projects.py
+++ b/services/web/server/tests/unit/with_dbs/test_projects.py
@@ -2,30 +2,27 @@
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
-import collections
-import json
import uuid as uuidlib
from asyncio import Future, sleep
from copy import deepcopy
-from pathlib import Path
-from typing import Dict, List
+from typing import Dict, List, Optional
import pytest
from aiohttp import web
from mock import call
-from yarl import URL
from pytest_simcore.helpers.utils_assert import assert_status
-from pytest_simcore.helpers.utils_login import LoggedUser
+from pytest_simcore.helpers.utils_login import LoggedUser, log_client_in
from pytest_simcore.helpers.utils_projects import NewProject, delete_all_projects
from servicelib.application import create_safe_application
-from servicelib.application_keys import APP_CONFIG_KEY
-from servicelib.rest_responses import unwrap_envelope
from simcore_service_webserver.db import setup_db
from simcore_service_webserver.db_models import UserRole
from simcore_service_webserver.director import setup_director
from simcore_service_webserver.login import setup_login
from simcore_service_webserver.projects import setup_projects
+from simcore_service_webserver.projects.projects_handlers import (
+ OVERRIDABLE_DOCUMENT_KEYS,
+)
from simcore_service_webserver.resource_manager import setup_resource_manager
from simcore_service_webserver.rest import setup_rest
from simcore_service_webserver.security import setup_security
@@ -125,6 +122,22 @@ async def logged_user(client, user_role: UserRole):
print("<----- logged out user", user_role)
+@pytest.fixture()
+async def logged_user2(client, user_role: UserRole):
+ """ adds a user in db and logs in with client
+
+ NOTE: `user_role` fixture is defined as a parametrization below!!!
+ """
+ async with LoggedUser(
+ client,
+ {"role": user_role.name},
+ check_if_succeeds=user_role != UserRole.ANONYMOUS,
+ ) as user:
+ print("-----> logged in user", user_role)
+ yield user
+ print("<----- logged out user", user_role)
+
+
@pytest.fixture
async def user_project(client, fake_project, logged_user):
async with NewProject(
@@ -142,7 +155,9 @@ async def template_project(
project_data = deepcopy(fake_project)
project_data["name"] = "Fake template"
project_data["uuid"] = "d4d0eca3-d210-4db6-84f9-63670b07176b"
- project_data["accessRights"] = {str(all_group["gid"]): "rw"}
+ project_data["accessRights"] = {
+ str(all_group["gid"]): {"read": True, "write": False, "delete": False}
+ }
async with NewProject(
project_data, client.app, user_id=None, clear_all=True
@@ -171,6 +186,12 @@ def create_fakes(number_services: int) -> List[Dict]:
yield create_fakes
+@pytest.fixture
+async def project_db_cleaner(client):
+ yield
+ await delete_all_projects(client.app)
+
+
def assert_replaced(current_project, update_data):
def _extract(dikt, keys):
return {k: dikt[k] for k in keys}
@@ -186,6 +207,20 @@ def _extract(dikt, keys):
assert to_datetime(update_data[k]) < to_datetime(current_project[k])
+async def _list_projects(
+ client, expected: web.Response, query_parameters: Optional[Dict] = None
+) -> List[Dict]:
+ # GET /v0/projects
+ url = client.app.router["list_projects"].url_for()
+ assert str(url) == API_PREFIX + "/projects"
+ if query_parameters:
+ url = url.with_query(**query_parameters)
+
+ resp = await client.get(url)
+ data, errors = await assert_status(resp, expected)
+ return data
+
+
# GET --------
@pytest.mark.parametrize(
"user_role,expected",
@@ -199,36 +234,40 @@ def _extract(dikt, keys):
async def test_list_projects(
client, logged_user, user_project, template_project, expected,
):
- # TODO: GET /v0/projects?start=0&count=3
-
- # GET /v0/projects
- url = client.app.router["list_projects"].url_for()
- assert str(url) == API_PREFIX + "/projects"
-
- resp = await client.get(url)
- data, errors = await assert_status(resp, expected)
-
- if not errors:
+ data = await _list_projects(client, expected)
+ if data:
assert len(data) == 2
assert data[0] == template_project
assert data[1] == user_project
# GET /v0/projects?type=user
- resp = await client.get(url.with_query(type="user"))
- data, errors = await assert_status(resp, expected)
- if not errors:
+ data = await _list_projects(client, expected, {"type": "user"})
+ if data:
assert len(data) == 1
assert data[0] == user_project
# GET /v0/projects?type=template
# instead /v0/projects/templates ??
- resp = await client.get(url.with_query(type="template"))
- data, errors = await assert_status(resp, expected)
- if not errors:
+ data = await _list_projects(client, expected, {"type": "template"})
+ if data:
assert len(data) == 1
assert data[0] == template_project
+async def _get_project(client, project: Dict, expected: web.Response) -> Dict:
+ # GET /v0/projects/{project_id}
+
+ # with a project owned by user
+ url = client.app.router["get_project"].url_for(project_id=project["uuid"])
+ assert str(url) == f"{API_PREFIX}/projects/{project['uuid']}"
+ resp = await client.get(url)
+ data, error = await assert_status(resp, expected)
+
+ if not error:
+ assert data == project
+ return data
+
+
@pytest.mark.parametrize(
"user_role,expected",
[
@@ -241,25 +280,95 @@ async def test_list_projects(
async def test_get_project(
client, logged_user, user_project, template_project, expected,
):
- # GET /v0/projects/{project_id}
+ await _get_project(client, user_project, expected)
- # with a project owned by user
- url = client.app.router["get_project"].url_for(project_id=user_project["uuid"])
+ # with a template
+ await _get_project(client, template_project, expected)
- resp = await client.get(url)
- data, error = await assert_status(resp, expected)
- if not error:
- assert data == user_project
+async def _new_project(
+ client,
+ expected_response: web.Response,
+ logged_user: Dict[str, str],
+ primary_group: Dict[str, str],
+ *,
+ project: Optional[Dict] = None,
+ from_template: Optional[Dict] = None,
+) -> Dict:
+ # POST /v0/projects
+ url = client.app.router["create_projects"].url_for()
+ assert str(url) == f"{API_PREFIX}/projects"
+ if from_template:
+ url = url.with_query(from_template=from_template["uuid"])
- # with a template
- url = client.app.router["get_project"].url_for(project_id=template_project["uuid"])
+ # Pre-defined fields imposed by required properties in schema
+ project_data = {}
+ expected_data = {}
+ if from_template:
+ # access rights are replaced
+ expected_data = deepcopy(from_template)
+ expected_data["accessRights"] = {}
+
+ if not from_template or project:
+ project_data = {
+ "uuid": "0000000-invalid-uuid",
+ "name": "Minimal name",
+ "description": "this description should not change",
+ "prjOwner": "me but I will be removed anyway",
+ "creationDate": now_str(),
+ "lastChangeDate": now_str(),
+ "thumbnail": "",
+ "accessRights": {},
+ "workbench": {},
+ "tags": [],
+ }
+ if project:
+ project_data.update(project)
- resp = await client.get(url)
- data, error = await assert_status(resp, expected)
+ for key in project_data:
+ expected_data[key] = project_data[key]
+ if (
+ key in OVERRIDABLE_DOCUMENT_KEYS
+ and not project_data[key]
+ and from_template
+ ):
+ expected_data[key] = from_template[key]
+ resp = await client.post(url, json=project_data)
+
+ new_project, error = await assert_status(resp, expected_response)
if not error:
- assert data == template_project
+ # updated fields
+ assert expected_data["uuid"] != new_project["uuid"]
+ assert (
+ new_project["prjOwner"] == logged_user["email"]
+ ) # the project owner is assigned the user id e-mail
+ assert to_datetime(expected_data["creationDate"]) < to_datetime(
+ new_project["creationDate"]
+ )
+ assert to_datetime(expected_data["lastChangeDate"]) < to_datetime(
+ new_project["lastChangeDate"]
+ )
+ # the access rights are set to use the logged user primary group + whatever was inside the project
+ expected_data["accessRights"].update(
+ {str(primary_group["gid"]): {"read": True, "write": True, "delete": True}}
+ )
+ assert new_project["accessRights"] == expected_data["accessRights"]
+
+ # invariant fields
+ modified_fields = [
+ "uuid",
+ "prjOwner",
+ "creationDate",
+ "lastChangeDate",
+ "accessRights",
+ "workbench" if from_template else None,
+ ]
+
+ for key in new_project.keys():
+ if key not in modified_fields:
+ assert expected_data[key] == new_project[key]
+ return new_project
# POST --------
@@ -273,53 +382,15 @@ async def test_get_project(
],
)
async def test_new_project(
- client, logged_user, expected, computational_system_mock, storage_subsystem_mock,
+ client,
+ logged_user,
+ primary_group,
+ expected,
+ computational_system_mock,
+ storage_subsystem_mock,
+ project_db_cleaner,
):
- # POST /v0/projects
- url = client.app.router["create_projects"].url_for()
- assert str(url) == API_PREFIX + "/projects"
-
- # Pre-defined fields imposed by required properties in schema
- default_project = {
- "uuid": "0000000-invalid-uuid",
- "name": "Minimal name",
- "description": "this description should not change",
- "prjOwner": "me but I will be removed anyway",
- "creationDate": now_str(),
- "lastChangeDate": now_str(),
- "thumbnail": "",
- "accessRights": {"12": "some rights"},
- "workbench": {},
- "tags": [],
- }
-
- resp = await client.post(url, json=default_project)
-
- data, error = await assert_status(resp, expected)
-
- if not error:
- new_project = data
-
- # updated fields
- assert default_project["uuid"] != new_project["uuid"]
- assert default_project["prjOwner"] != logged_user["email"]
- assert new_project["prjOwner"] == logged_user["email"]
- assert to_datetime(default_project["creationDate"]) < to_datetime(
- new_project["creationDate"]
- )
-
- # invariant fields
- for key in new_project.keys():
- if key not in ("uuid", "prjOwner", "creationDate", "lastChangeDate"):
- assert default_project[key] == new_project[key]
-
- # TODO: validate response using OAS?
- # FIXME: cannot delete user until project is deleted. See cascade or too coupled??
- # i.e. removing a user, removes all its projects!!
-
- # asyncpg.exceptions.ForeignKeyViolationError: update or delete on table "users"
- # violates foreign key constraint "user_to_projects_user_id_fkey" on table "user_to_projects"
- await delete_all_projects(client.app)
+ new_project = await _new_project(client, expected, logged_user, primary_group)
@pytest.mark.parametrize(
@@ -334,44 +405,20 @@ async def test_new_project(
async def test_new_project_from_template(
client,
logged_user,
+ primary_group: Dict[str, str],
template_project,
expected,
computational_system_mock,
storage_subsystem_mock,
+ project_db_cleaner,
):
- # POST /v0/projects?from_template={template_uuid}
- url = (
- client.app.router["create_projects"]
- .url_for()
- .with_query(from_template=template_project["uuid"])
+ new_project = await _new_project(
+ client, expected, logged_user, primary_group, from_template=template_project
)
- resp = await client.post(url)
-
- data, error = await assert_status(resp, expected)
-
- if not error:
- project = data
- modified = ["prjOwner", "creationDate", "lastChangeDate", "uuid"]
-
- # different ownership
- assert project["prjOwner"] == logged_user["email"]
- assert project["prjOwner"] != template_project["prjOwner"]
- assert project["accessRights"] == template_project["accessRights"]
-
- # different timestamps
- assert to_datetime(template_project["creationDate"]) < to_datetime(
- project["creationDate"]
- )
- assert to_datetime(template_project["lastChangeDate"]) < to_datetime(
- project["lastChangeDate"]
- )
-
- # different uuids for project and nodes!?
- assert project["uuid"] != template_project["uuid"]
-
+ if new_project:
# check uuid replacement
- for node_name in project["workbench"]:
+ for node_name in new_project["workbench"]:
try:
uuidlib.UUID(node_name)
except ValueError:
@@ -390,18 +437,14 @@ async def test_new_project_from_template(
async def test_new_project_from_template_with_body(
client,
logged_user,
+ primary_group: Dict[str, str],
+ standard_groups: List[Dict[str, str]],
template_project,
expected,
computational_system_mock,
storage_subsystem_mock,
+ project_db_cleaner,
):
- # POST /v0/projects?from_template={template_uuid}
- url = (
- client.app.router["create_projects"]
- .url_for()
- .with_query(from_template=template_project["uuid"])
- )
-
predefined = {
"uuid": "",
"name": "Sleepers8",
@@ -410,39 +453,30 @@ async def test_new_project_from_template_with_body(
"prjOwner": "",
"creationDate": "2019-06-03T09:59:31.987Z",
"lastChangeDate": "2019-06-03T09:59:31.987Z",
- "accessRights": {"123": "some new access rights"},
+ "accessRights": {
+ str(standard_groups[0]["gid"]): {
+ "read": True,
+ "write": True,
+ "delete": False,
+ }
+ },
"workbench": {},
"tags": [],
}
+ project = await _new_project(
+ client,
+ expected,
+ logged_user,
+ primary_group,
+ project=predefined,
+ from_template=template_project,
+ )
- resp = await client.post(url, json=predefined)
-
- data, error = await assert_status(resp, expected)
-
- if not error:
- project = data
-
+ if project:
# uses predefined
assert project["name"] == predefined["name"]
assert project["description"] == predefined["description"]
- modified = ["prjOwner", "creationDate", "lastChangeDate", "uuid"]
-
- # different ownership
- assert project["prjOwner"] == logged_user["email"]
- assert project["prjOwner"] != template_project["prjOwner"]
- # different access rights
- assert project["accessRights"] != template_project["accessRights"]
- assert project["accessRights"] == predefined["accessRights"]
-
- # different timestamps
- assert to_datetime(template_project["creationDate"]) < to_datetime(
- project["creationDate"]
- )
- assert to_datetime(template_project["lastChangeDate"]) < to_datetime(
- project["lastChangeDate"]
- )
-
# different uuids for project and nodes!?
assert project["uuid"] != template_project["uuid"]
@@ -466,10 +500,13 @@ async def test_new_project_from_template_with_body(
async def test_new_template_from_project(
client,
logged_user,
+ primary_group: Dict[str, str],
+ all_group: Dict[str, str],
user_project,
expected,
computational_system_mock,
storage_subsystem_mock,
+ project_db_cleaner,
):
# POST /v0/projects?as_template={user_uuid}
url = (
@@ -484,9 +521,7 @@ async def test_new_template_from_project(
if not error:
template_project = data
- url = client.app.router["list_projects"].url_for().with_query(type="template")
- resp = await client.get(url)
- templates, _ = await assert_status(resp, web.HTTPOk)
+ templates = await _list_projects(client, web.HTTPOk, {"type": "template"})
assert len(templates) == 1
assert templates[0] == template_project
@@ -524,7 +559,9 @@ async def test_new_template_from_project(
"creationDate": "2019-06-03T09:59:31.987Z",
"lastChangeDate": "2019-06-03T09:59:31.987Z",
"workbench": {},
- "accessRights": {"12": "rwx"},
+ "accessRights": {
+ str(all_group["gid"]): {"read": True, "write": False, "delete": False},
+ },
"tags": [],
}
@@ -537,16 +574,12 @@ async def test_new_template_from_project(
assert template_project["name"] == predefined["name"]
assert template_project["description"] == predefined["description"]
assert template_project["prjOwner"] == logged_user["email"]
+ # the logged in user access rights are added by default
+ predefined["accessRights"].update(
+ {str(primary_group["gid"]): {"read": True, "write": True, "delete": True}}
+ )
assert template_project["accessRights"] == predefined["accessRights"]
- modified = [
- "prjOwner",
- "creationDate",
- "lastChangeDate",
- "uuid",
- "accessRights",
- ]
-
# different ownership
assert template_project["prjOwner"] == logged_user["email"]
assert template_project["prjOwner"] == user_project["prjOwner"]
@@ -570,30 +603,162 @@ async def test_new_template_from_project(
pytest.fail("Invalid uuid in workbench node {}".format(node_name))
-# PUT --------
@pytest.mark.parametrize(
- "user_role,expected",
+ "user_role,expected_created,expected_ok,expected_notfound,expected_nocontents,expected_forbidden",
[
- (UserRole.ANONYMOUS, web.HTTPUnauthorized),
- (UserRole.GUEST, web.HTTPOk),
- (UserRole.USER, web.HTTPOk),
- (UserRole.TESTER, web.HTTPOk),
+ (
+ UserRole.ANONYMOUS,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ ),
+ (
+ UserRole.GUEST,
+ web.HTTPForbidden,
+ web.HTTPForbidden,
+ web.HTTPNotFound,
+ web.HTTPForbidden,
+ web.HTTPForbidden,
+ ),
+ (
+ UserRole.USER,
+ web.HTTPCreated,
+ web.HTTPOk,
+ web.HTTPNotFound,
+ web.HTTPNoContent,
+ web.HTTPForbidden,
+ ),
+ (
+ UserRole.TESTER,
+ web.HTTPCreated,
+ web.HTTPOk,
+ web.HTTPNotFound,
+ web.HTTPNoContent,
+ web.HTTPForbidden,
+ ),
],
)
-async def test_replace_project(
- client, logged_user, user_project, expected, computational_system_mock,
+@pytest.mark.parametrize(
+ "share_rights",
+ [
+ {"read": True, "write": True, "delete": True},
+ {"read": True, "write": True, "delete": False},
+ {"read": True, "write": False, "delete": False},
+ {"read": False, "write": False, "delete": False},
+ ],
+)
+async def test_share_project(
+ client,
+ logged_user,
+ primary_group: Dict[str, str],
+ standard_groups: List[Dict[str, str]],
+ all_group: Dict[str, str],
+ user_role,
+ expected_created,
+ expected_ok,
+ expected_notfound,
+ expected_nocontents,
+ expected_forbidden,
+ storage_subsystem_mock,
+ mocked_director_subsystem,
+ computational_system_mock,
+ share_rights,
+ project_db_cleaner,
):
- # PUT /v0/projects/{project_id}
- url = client.app.router["replace_project"].url_for(project_id=user_project["uuid"])
+ # Use-case: the user shares some projects with a group
- project_update = deepcopy(user_project)
- project_update["description"] = "some updated from original project!!!"
+ # create a few projects
+ new_project = await _new_project(
+ client,
+ expected_created,
+ logged_user,
+ primary_group,
+ project={"accessRights": {str(all_group["gid"]): share_rights}},
+ )
+ if new_project:
+ assert new_project["accessRights"] == {
+ str(primary_group["gid"]): {"read": True, "write": True, "delete": True},
+ str(all_group["gid"]): share_rights,
+ }
+
+ # user 1 can always get to his project
+ await _get_project(client, new_project, expected_ok)
+
+ # get another user logged in now
+ user_2 = await log_client_in(
+ client, {"role": user_role.name}, enable_check=user_role != UserRole.ANONYMOUS
+ )
+ if new_project:
+ # user 2 can only get the project if user 2 has read access
+ await _get_project(
+ client,
+ new_project,
+ expected_ok if share_rights["read"] else expected_forbidden,
+ )
+ # user 2 can only list projects if user 2 has read access
+ list_projects = await _list_projects(client, expected_ok)
+ assert len(list_projects) == (1 if share_rights["read"] else 0)
+ # user 2 can only update the project is user 2 has write access
+ project_update = deepcopy(new_project)
+ project_update["name"] = "my super name"
+ await _replace_project(
+ client,
+ project_update,
+ expected_ok if share_rights["write"] else expected_forbidden,
+ )
+ # user 2 can only delete projects if user 2 has delete access
+ await _delete_project(
+ client,
+ new_project,
+ expected_nocontents if share_rights["delete"] else expected_forbidden,
+ )
+
+async def _replace_project(
+ client, project_update: Dict, expected: web.Response
+) -> Dict:
+ # PUT /v0/projects/{project_id}
+ url = client.app.router["replace_project"].url_for(
+ project_id=project_update["uuid"]
+ )
+ assert str(url) == f"{API_PREFIX}/projects/{project_update['uuid']}"
resp = await client.put(url, json=project_update)
data, error = await assert_status(resp, expected)
-
if not error:
assert_replaced(current_project=data, update_data=project_update)
+ return data
+
+
+# PUT --------
+@pytest.mark.parametrize(
+ "user_role,expected,expected_change_access",
+ [
+ (UserRole.ANONYMOUS, web.HTTPUnauthorized, web.HTTPUnauthorized),
+ (UserRole.GUEST, web.HTTPOk, web.HTTPForbidden),
+ (UserRole.USER, web.HTTPOk, web.HTTPOk),
+ (UserRole.TESTER, web.HTTPOk, web.HTTPOk),
+ ],
+)
+async def test_replace_project(
+ client,
+ logged_user,
+ user_project,
+ expected,
+ expected_change_access,
+ computational_system_mock,
+ all_group,
+):
+ project_update = deepcopy(user_project)
+ project_update["description"] = "some updated from original project!!!"
+ await _replace_project(client, project_update, expected)
+
+ # replacing the owner access is not possible, it will keep the owner as well
+ project_update["accessRights"].update(
+ {str(all_group["gid"]): {"read": True, "write": True, "delete": True}}
+ )
+ await _replace_project(client, project_update, expected_change_access)
@pytest.mark.parametrize(
@@ -608,9 +773,6 @@ async def test_replace_project(
async def test_replace_project_updated_inputs(
client, logged_user, user_project, expected, computational_system_mock,
):
- # PUT /v0/projects/{project_id}
- url = client.app.router["replace_project"].url_for(project_id=user_project["uuid"])
-
project_update = deepcopy(user_project)
#
# "inputAccess": {
@@ -624,12 +786,7 @@ async def test_replace_project_updated_inputs(
project_update["workbench"]["5739e377-17f7-4f09-a6ad-62659fb7fdec"]["inputs"][
"Na"
] = 55
-
- resp = await client.put(url, json=project_update)
- data, error = await assert_status(resp, expected)
-
- if not error:
- assert_replaced(current_project=data, update_data=project_update)
+ await _replace_project(client, project_update, expected)
@pytest.mark.parametrize(
@@ -644,9 +801,6 @@ async def test_replace_project_updated_inputs(
async def test_replace_project_updated_readonly_inputs(
client, logged_user, user_project, expected, computational_system_mock,
):
- # PUT /v0/projects/{project_id}
- url = client.app.router["replace_project"].url_for(project_id=user_project["uuid"])
-
project_update = deepcopy(user_project)
project_update["workbench"]["5739e377-17f7-4f09-a6ad-62659fb7fdec"]["inputs"][
"Na"
@@ -654,15 +808,17 @@ async def test_replace_project_updated_readonly_inputs(
project_update["workbench"]["5739e377-17f7-4f09-a6ad-62659fb7fdec"]["inputs"][
"Kr"
] = 5
+ await _replace_project(client, project_update, expected)
- resp = await client.put(url, json=project_update)
- data, error = await assert_status(resp, expected)
- if not error:
- assert_replaced(current_project=data, update_data=project_update)
+# DELETE -------
-# DELETE -------
+async def _delete_project(client, project: Dict, expected: web.Response) -> None:
+ url = client.app.router["delete_project"].url_for(project_id=project["uuid"])
+ assert str(url) == f"{API_PREFIX}/projects/{project['uuid']}"
+ resp = await client.delete(url)
+ await assert_status(resp, expected)
@pytest.mark.parametrize(
@@ -690,11 +846,9 @@ async def test_delete_project(
"get_running_interactive_services"
].return_value = future_with_result(fakes)
- url = client.app.router["delete_project"].url_for(project_id=user_project["uuid"])
+ await _delete_project(client, user_project, expected)
- resp = await client.delete(url)
- await assert_status(resp, expected)
- if resp.status == web.HTTPNoContent.status_code:
+ if expected == web.HTTPNoContent:
mocked_director_subsystem[
"get_running_interactive_services"
].assert_called_once()
@@ -702,11 +856,7 @@ async def test_delete_project(
mocked_director_subsystem["stop_service"].has_calls(calls)
# wait for the fire&forget to run
await sleep(2)
- # check if database entries are correctly removed, there should be no project available here
- url = client.app.router["get_project"].url_for(project_id=user_project["uuid"])
-
- resp = await client.get(url)
- data, error = await assert_status(resp, web.HTTPNotFound)
+ await _get_project(client, user_project, web.HTTPNotFound)
@pytest.mark.parametrize(
@@ -898,9 +1048,7 @@ async def test_delete_shared_project_forbidden(
# delete project in tab2
client_session_id2 = client_session_id()
sio2 = await socketio_client(client_session_id2)
- url = client.app.router["delete_project"].url_for(project_id=user_project["uuid"])
- resp = await client.delete(url)
- await assert_status(resp, expected)
+ await _delete_project(client, user_project, expected)
@pytest.mark.parametrize(
@@ -1044,16 +1192,17 @@ async def test_tags_to_studies(
# Tag is included in response
assert added_tag.get("id") in data.get("tags")
+ # check the tags are in
+ user_project["tags"] = [tag["id"] for tag in added_tags]
+ data = await _get_project(client, user_project, expected)
+
# Delete tag0
url = client.app.router["delete_tag"].url_for(tag_id=str(added_tags[0].get("id")))
resp = await client.delete(url)
await assert_status(resp, web.HTTPNoContent)
# Get project and check that tag is no longer there
- url = client.app.router["get_project"].url_for(
- project_id=str(user_project.get("uuid"))
- )
- resp = await client.get(url)
- data, _ = await assert_status(resp, expected)
+ user_project["tags"].remove(added_tags[0]["id"])
+ data = await _get_project(client, user_project, expected)
assert added_tags[0].get("id") not in data.get("tags")
# Remove tag1 from project
@@ -1063,11 +1212,8 @@ async def test_tags_to_studies(
resp = await client.delete(url)
await assert_status(resp, expected)
# Get project and check that tag is no longer there
- url = client.app.router["get_project"].url_for(
- project_id=str(user_project.get("uuid"))
- )
- resp = await client.get(url)
- data, _ = await assert_status(resp, expected)
+ user_project["tags"].remove(added_tags[1]["id"])
+ data = await _get_project(client, user_project, expected)
assert added_tags[1].get("id") not in data.get("tags")
# Delete tag1
diff --git a/services/web/server/tests/unit/with_dbs/test_users.py b/services/web/server/tests/unit/with_dbs/test_users.py
index 7e7ae7c8b83..c4451548111 100644
--- a/services/web/server/tests/unit/with_dbs/test_users.py
+++ b/services/web/server/tests/unit/with_dbs/test_users.py
@@ -24,6 +24,7 @@
)
from servicelib.application import create_safe_application
from simcore_service_webserver.db import APP_DB_ENGINE_KEY, setup_db
+from simcore_service_webserver.groups import setup_groups
from simcore_service_webserver.login import setup_login
from simcore_service_webserver.rest import setup_rest
from simcore_service_webserver.security import setup_security
@@ -31,6 +32,7 @@
from simcore_service_webserver.session import setup_session
from simcore_service_webserver.users import setup_users
+
API_VERSION = "v0"
@@ -53,6 +55,7 @@ def client(loop, aiohttp_client, app_cfg, postgres_service):
setup_rest(app)
setup_login(app)
setup_users(app)
+ setup_groups(app)
client = loop.run_until_complete(
aiohttp_client(app, server_kwargs={"port": port, "host": "localhost"})
@@ -130,10 +133,10 @@ async def fake_tokens(logged_user, tokens_db):
],
)
async def test_get_profile(
- logged_user,
+ logged_user: Dict,
client,
- role,
- expected,
+ role: UserRole,
+ expected: web.HTTPException,
primary_group: Dict[str, str],
standard_groups: List[Dict[str, str]],
all_group: Dict[str, str],
@@ -306,9 +309,6 @@ async def test_delete_token(
assert not (await get_token_from_db(tokens_db, token_service=sid))
-## BUG FIXES #######################################################
-
-
@pytest.fixture
def mock_failing_connection(mocker) -> MagicMock:
"""
diff --git a/tests/e2e/tutorials/sleepers.js b/tests/e2e/tutorials/sleepers.js
index 401ca2ac421..34bba4cbf2a 100644
--- a/tests/e2e/tutorials/sleepers.js
+++ b/tests/e2e/tutorials/sleepers.js
@@ -17,7 +17,7 @@ const {
} = utils.getUserAndPass(args);
const templateName = "Sleepers";
-async function runTutorial () {
+async function runTutorial() {
const tutorial = new tutorialBase.TutorialBase(url, user, pass, newUser, templateName);
tutorial.init();
@@ -34,6 +34,7 @@ async function runTutorial () {
await tutorial.waitFor(5000);
await tutorial.runPipeline(25000);
+ console.log('Checking results for the first sleeper:');
await tutorial.openNodeFiles(0);
const outFiles = [
"logs.zip",
@@ -41,6 +42,11 @@ async function runTutorial () {
];
await tutorial.checkResults(outFiles.length);
+ await tutorial.waitFor(20000);
+ console.log('Checking results for the last sleeper:');
+ await tutorial.openNodeFiles(4);
+ await tutorial.checkResults(outFiles.length);
+
await tutorial.removeStudy();
await tutorial.logOut();
await tutorial.close();
@@ -50,4 +56,4 @@ runTutorial()
.catch(error => {
console.log('Puppeteer error: ' + error);
process.exit(1);
- });
\ No newline at end of file
+ });
diff --git a/tests/e2e/tutorials/sleepers_project_template_sql.csv b/tests/e2e/tutorials/sleepers_project_template_sql.csv
index e213c2b136f..1a667e38cf2 100644
--- a/tests/e2e/tutorials/sleepers_project_template_sql.csv
+++ b/tests/e2e/tutorials/sleepers_project_template_sql.csv
@@ -1,2 +1,2 @@
-id,type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench,published,accessRights
-10,TEMPLATE,template-uuid-5203-915e-1ae8ae0c9991,Sleepers,5 sleepers interconnected,"",,2019-06-06 14:34:19.631,2019-06-06 14:34:28.647,"{""template-uuid-5f7e-92b0-5a14e84401e9"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 0"", ""inputs"": {""in_2"": 2}, ""inputAccess"": {""in_1"": ""Invisible"", ""in_2"": ""ReadOnly""}, ""inputNodes"": [], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 300}}, ""template-uuid-5d8a-812c-44dacf56840e"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 1"", ""inputs"": {""in_1"": {""nodeUuid"": ""template-uuid-5f7e-92b0-5a14e84401e9"", ""output"": ""out_1""}, ""in_2"": 2}, ""inputNodes"": [""template-uuid-5f7e-92b0-5a14e84401e9""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""template-uuid-5706-b741-4073a4454f0d"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 2"", ""inputs"": {""in_1"": {""nodeUuid"": ""template-uuid-5d8a-812c-44dacf56840e"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""template-uuid-5d8a-812c-44dacf56840e"", ""output"": ""out_2""}}, ""inputNodes"": [""template-uuid-5d8a-812c-44dacf56840e""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 200}}, ""template-uuid-5065-a079-a5a0476e3c10"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 3"", ""inputs"": {""in_2"": {""nodeUuid"": ""template-uuid-5f7e-92b0-5a14e84401e9"", ""output"": ""out_2""}}, ""inputNodes"": [""template-uuid-5f7e-92b0-5a14e84401e9""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 420, ""y"": 400}}, ""template-uuid-559d-aa19-dc9293e10e4c"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 4"", ""inputs"": {""in_1"": {""nodeUuid"": ""template-uuid-5706-b741-4073a4454f0d"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""template-uuid-5065-a079-a5a0476e3c10"", ""output"": ""out_2""}}, ""inputNodes"": [""template-uuid-5706-b741-4073a4454f0d"", ""template-uuid-5065-a079-a5a0476e3c10""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 800, ""y"": 300}}}",true,"{""1"": ""rwx""}"
\ No newline at end of file
+id,type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench,published,access_rights
+10,TEMPLATE,template-uuid-5203-915e-1ae8ae0c9991,Sleepers,5 sleepers interconnected,"",,2019-06-06 14:34:19.631,2019-06-06 14:34:28.647,"{""template-uuid-5f7e-92b0-5a14e84401e9"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 0"", ""inputs"": {""in_2"": 2}, ""inputAccess"": {""in_1"": ""Invisible"", ""in_2"": ""ReadOnly""}, ""inputNodes"": [], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 300}}, ""template-uuid-5d8a-812c-44dacf56840e"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 1"", ""inputs"": {""in_1"": {""nodeUuid"": ""template-uuid-5f7e-92b0-5a14e84401e9"", ""output"": ""out_1""}, ""in_2"": 2}, ""inputNodes"": [""template-uuid-5f7e-92b0-5a14e84401e9""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""template-uuid-5706-b741-4073a4454f0d"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 2"", ""inputs"": {""in_1"": {""nodeUuid"": ""template-uuid-5d8a-812c-44dacf56840e"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""template-uuid-5d8a-812c-44dacf56840e"", ""output"": ""out_2""}}, ""inputNodes"": [""template-uuid-5d8a-812c-44dacf56840e""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 200}}, ""template-uuid-5065-a079-a5a0476e3c10"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 3"", ""inputs"": {""in_2"": {""nodeUuid"": ""template-uuid-5f7e-92b0-5a14e84401e9"", ""output"": ""out_2""}}, ""inputNodes"": [""template-uuid-5f7e-92b0-5a14e84401e9""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 420, ""y"": 400}}, ""template-uuid-559d-aa19-dc9293e10e4c"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 4"", ""inputs"": {""in_1"": {""nodeUuid"": ""template-uuid-5706-b741-4073a4454f0d"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""template-uuid-5065-a079-a5a0476e3c10"", ""output"": ""out_2""}}, ""inputNodes"": [""template-uuid-5706-b741-4073a4454f0d"", ""template-uuid-5065-a079-a5a0476e3c10""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 800, ""y"": 300}}}",true,"{""1"": {""read"":true, ""write"":false, ""delete"":false}}"
From 3fb36d0da4c1af4f1494af2ac3a2aaeaa2307ef1 Mon Sep 17 00:00:00 2001
From: Sylvain <35365065+sanderegg@users.noreply.github.com>
Date: Mon, 22 Jun 2020 10:27:32 +0200
Subject: [PATCH 06/43] maintenance fix codecov reports (#1568)
* add codecov.yml file
* set up codecov to report for api/packages/services
---
.codecov.yml | 56 ++++++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 56 insertions(+)
create mode 100644 .codecov.yml
diff --git a/.codecov.yml b/.codecov.yml
new file mode 100644
index 00000000000..4b47678244b
--- /dev/null
+++ b/.codecov.yml
@@ -0,0 +1,56 @@
+codecov:
+ require_ci_to_pass: yes
+ branch: master
+
+coverage:
+ precision: 1
+ round: down
+ range: "70...100"
+
+ status:
+ project:
+ default:
+ informational: true
+ threshold: 1%
+ paths:
+ - api
+ - packages
+ - services
+ api:
+ informational: true
+ threshold: 1%
+ paths:
+ - api
+ packages:
+ informational: true
+ threshold: 1%
+ paths:
+ - packages
+ services:
+ informational: true
+ threshold: 1%
+ paths:
+ - services
+
+
+ patch:
+ default:
+ informational: true
+ threshold: 1%
+ paths:
+ - api
+ - packages
+ - services
+
+parsers:
+ gcov:
+ branch_detection:
+ conditional: yes
+ loop: yes
+ method: no
+ macro: no
+
+comment:
+ layout: "reach,diff,flags,tree"
+ behavior: default
+ require_changes: no
From bdfcd084a4bde44b3b0155653fc5f52b14faf75e Mon Sep 17 00:00:00 2001
From: "dependabot-preview[bot]"
<27856297+dependabot-preview[bot]@users.noreply.github.com>
Date: Mon, 22 Jun 2020 10:34:29 +0200
Subject: [PATCH 07/43] Bump faker from 4.1.0 to 4.1.1 in
/packages/postgres-database (#1573)
Bumps [faker](https://github.com/joke2k/faker) from 4.1.0 to 4.1.1.
- [Release notes](https://github.com/joke2k/faker/releases)
- [Changelog](https://github.com/joke2k/faker/blob/master/CHANGELOG.rst)
- [Commits](https://github.com/joke2k/faker/compare/v4.1.0...v4.1.1)
Signed-off-by: dependabot-preview[bot]
Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com>
---
packages/postgres-database/requirements/_test.txt | 7 +------
1 file changed, 1 insertion(+), 6 deletions(-)
diff --git a/packages/postgres-database/requirements/_test.txt b/packages/postgres-database/requirements/_test.txt
index b84a9ea42c2..7de06d70860 100644
--- a/packages/postgres-database/requirements/_test.txt
+++ b/packages/postgres-database/requirements/_test.txt
@@ -17,10 +17,8 @@ coverage==5.1 # via -r requirements/_test.in, coveralls, pytest-cov
coveralls==2.0.0 # via -r requirements/_test.in
docker==4.2.1 # via -r requirements/_migration.txt
docopt==0.6.2 # via coveralls
-faker==4.1.0 # via -r requirements/_test.in
-idna-ssl==1.1.0 # via aiohttp
+faker==4.1.1 # via -r requirements/_test.in
idna==2.9 # via -r requirements/_migration.txt, requests, yarl
-importlib-metadata==1.6.0 # via pluggy, pytest
isort==4.3.21 # via pylint
lazy-object-proxy==1.4.3 # via astroid
mako==1.1.2 # via -r requirements/_migration.txt, alembic
@@ -49,11 +47,8 @@ sqlalchemy[postgresql_psycopg2binary]==1.3.17 # via -r requirements/_migration.
tenacity==6.2.0 # via -r requirements/_migration.txt
text-unidecode==1.3 # via faker
toml==0.10.1 # via pylint
-typed-ast==1.4.1 # via astroid
-typing-extensions==3.7.4.2 # via aiohttp
urllib3==1.25.9 # via -r requirements/_migration.txt, requests
wcwidth==0.1.9 # via pytest
websocket-client==0.57.0 # via -r requirements/_migration.txt, docker
wrapt==1.12.1 # via astroid
yarl==1.4.2 # via -r requirements/_migration.txt, aiohttp
-zipp==3.1.0 # via importlib-metadata
From 1b7a47e5f7fa32a159fdcada8ae1f7ea81f721f2 Mon Sep 17 00:00:00 2001
From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com>
Date: Mon, 22 Jun 2020 13:37:26 +0200
Subject: [PATCH 08/43] Is1570/study fails 500 (#1572)
Fixes #1570
* increases verbose in gc errors
* Turned gather errors into warnings
* Fixing issue when valid token with invalid user
* Authz cache cleared when any user is deleted
* Tests fix emulating a valid cookie invalid user
* Enhances error handling
---
.../service-library/src/servicelib/utils.py | 18 ++-
.../resource_manager/garbage_collector.py | 2 +-
.../simcore_service_webserver/security_api.py | 14 +-
.../studies_access.py | 46 ++++--
.../templates/error-page.html | 14 ++
.../simcore_service_webserver/users_api.py | 13 +-
.../users_handlers.py | 8 +-
.../src/simcore_service_webserver/utils.py | 5 +
.../unit/with_dbs/test_access_to_studies.py | 149 +++++++++++++-----
9 files changed, 198 insertions(+), 71 deletions(-)
create mode 100644 services/web/server/src/simcore_service_webserver/templates/error-page.html
diff --git a/packages/service-library/src/servicelib/utils.py b/packages/service-library/src/servicelib/utils.py
index 0c8ca8fd686..758b47cbf3e 100644
--- a/packages/service-library/src/servicelib/utils.py
+++ b/packages/service-library/src/servicelib/utils.py
@@ -60,15 +60,25 @@ def log_exception_callback(fut: asyncio.Future):
# // tasks
-async def logged_gather(*tasks, reraise: bool = True) -> List[Any]:
- # all coroutine called in // and we take care of returning the exceptions
+async def logged_gather(
+ *tasks, reraise: bool = True, log: logging.Logger = logger
+) -> List[Any]:
+ """
+ *all* coroutine passed are executed concurrently and once they are all
+ completed, the first error (if any) is reraised or all returned
+
+ log: passing the logger gives a chance to identify the origin of the gather call
+ """
results = await asyncio.gather(*tasks, return_exceptions=True)
for value in results:
+ # WARN: note that ONLY THE FIRST exception is raised
if isinstance(value, Exception):
if reraise:
raise value
- logger.error(
- "Exception occured while running %s: %s",
+ # Exception is returned, therefore it is not logged as error but as warning
+ # It was user's decision not to reraise them
+ log.warning(
+ "Exception occured while running task %s in gather: %s",
str(tasks[results.index(value)]),
str(value),
)
diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py b/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py
index 0c361313eb8..14e01df6d0c 100644
--- a/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py
+++ b/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py
@@ -177,7 +177,7 @@ async def garbage_collector_task(app: web.Application):
keep_alive = False
logger.info("Garbage collection task was cancelled, it will not restart!")
except Exception: # pylint: disable=broad-except
- logger.warning("There was an error during garbage collection, restarting...")
+ logger.warning("There was an error during garbage collection, restarting...", exc_info=True)
# will wait 5 seconds before restarting to avoid restart loops
await asyncio.sleep(5)
diff --git a/services/web/server/src/simcore_service_webserver/security_api.py b/services/web/server/src/simcore_service_webserver/security_api.py
index ac76c16af1c..c9e76c6f518 100644
--- a/services/web/server/src/simcore_service_webserver/security_api.py
+++ b/services/web/server/src/simcore_service_webserver/security_api.py
@@ -18,6 +18,7 @@
from aiopg.sa import Engine
from .db_models import UserStatus, users
+from .security_authorization import AuthorizationPolicy, RoleBasedAccessModel
from .security_roles import UserRole
log = logging.getLogger(__file__)
@@ -35,19 +36,24 @@ async def check_credentials(engine: Engine, email: str, password: str) -> bool:
return False
-def encrypt_password(password):
+def encrypt_password(password: str) -> str:
return passlib.hash.sha256_crypt.encrypt(password, rounds=1000)
-def check_password(password, password_hash):
+def check_password(password: str, password_hash: str) -> bool:
return passlib.hash.sha256_crypt.verify(password, password_hash)
-def get_access_model(app: web.Application):
- autz_policy = app[AUTZ_KEY]
+def get_access_model(app: web.Application) -> RoleBasedAccessModel:
+ autz_policy: AuthorizationPolicy = app[AUTZ_KEY]
return autz_policy.access_model
+def clean_auth_policy_cache(app: web.Application) -> None:
+ autz_policy: AuthorizationPolicy = app[AUTZ_KEY]
+ autz_policy.timed_cache.clear()
+
+
__all__ = (
"encrypt_password",
"check_credentials",
diff --git a/services/web/server/src/simcore_service_webserver/studies_access.py b/services/web/server/src/simcore_service_webserver/studies_access.py
index d89d38d3b66..1f9e7160a46 100644
--- a/services/web/server/src/simcore_service_webserver/studies_access.py
+++ b/services/web/server/src/simcore_service_webserver/studies_access.py
@@ -23,6 +23,7 @@
from .login.decorators import login_required
from .security_api import is_anonymous, remember
from .statics import INDEX_RESOURCE_NAME
+from .utils import compose_error_msg
log = logging.getLogger(__name__)
@@ -156,7 +157,9 @@ async def access_study(request: web.Request) -> web.Response:
- public studies are templates that are marked as published in the database
- if user is not registered, it creates a temporary guest account with limited resources and expiration
+ - this handler is NOT part of the API and therefore does NOT respond with json
"""
+ # TODO: implement nice error-page.html
project_id = request.match_info["id"]
template_project = await get_public_project(request.app, project_id)
@@ -166,25 +169,38 @@ async def access_study(request: web.Request) -> web.Response:
Please contact the data curators for more information."
)
+ # Get or create a valid user
user = None
is_anonymous_user = await is_anonymous(request)
- if is_anonymous_user:
- log.debug("Creating temporary user ...")
- user = await create_temporary_user(request)
- else:
+ if not is_anonymous_user:
+ # NOTE: covers valid cookie with unauthorized user (e.g. expired guest/banned)
+ # TODO: test if temp user overrides old cookie properly
user = await get_authorized_user(request)
if not user:
- raise RuntimeError("Unable to start user session")
+ log.debug("Creating temporary user ...")
+ user = await create_temporary_user(request)
+ is_anonymous_user = True
- log.debug(
- "Granted access to study '%s' for user %s. Copying study over ...",
- template_project.get("name"),
- user.get("email"),
- )
- copied_project_id = await copy_study_to_account(request, template_project, user)
+ try:
+ log.debug(
+ "Granted access to study '%s' for user %s. Copying study over ...",
+ template_project.get("name"),
+ user.get("email"),
+ )
+ copied_project_id = await copy_study_to_account(request, template_project, user)
+
+ log.debug("Study %s copied", copied_project_id)
- log.debug("Study %s copied", copied_project_id)
+ except Exception: # pylint: disable=broad-except
+ log.exception(
+ "Failed while copying project '%s' to '%s'",
+ template_project.get("name"),
+ user.get("email"),
+ )
+ raise web.HTTPInternalServerError(
+ reason=compose_error_msg("Unable to copy project.")
+ )
try:
redirect_url = (
@@ -193,11 +209,11 @@ async def access_study(request: web.Request) -> web.Response:
.with_fragment("/study/{}".format(copied_project_id))
)
except KeyError:
- log.error(
+ log.exception(
"Cannot redirect to website because route was not registered. Probably qx output was not ready and it was disabled (see statics.py)"
)
- raise RuntimeError(
- "Unable to serve front-end. Study has been anyway copied over to user."
+ raise web.HTTPInternalServerError(
+ reason=compose_error_msg("Unable to serve front-end.")
)
response = web.HTTPFound(location=redirect_url)
diff --git a/services/web/server/src/simcore_service_webserver/templates/error-page.html b/services/web/server/src/simcore_service_webserver/templates/error-page.html
new file mode 100644
index 00000000000..b322bc40f7b
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/templates/error-page.html
@@ -0,0 +1,14 @@
+{% block title %}
+Site Error
+{% endblock %}
+
+{% block content %}
+Opps, this is a bit embarrasing
+
+
+
+ {{ error_text }}
+
+
+
+{% endblock %}
diff --git a/services/web/server/src/simcore_service_webserver/users_api.py b/services/web/server/src/simcore_service_webserver/users_api.py
index 1679a7033e6..451a9e2aae6 100644
--- a/services/web/server/src/simcore_service_webserver/users_api.py
+++ b/services/web/server/src/simcore_service_webserver/users_api.py
@@ -12,8 +12,9 @@
from .db_models import GroupType, groups, tokens, user_to_groups, users
from .groups_api import convert_groups_db_to_schema
-from .users_utils import convert_user_db_to_schema
+from .security_api import clean_auth_policy_cache
from .users_exceptions import UserNotFoundError
+from .users_utils import convert_user_db_to_schema
logger = logging.getLogger(__name__)
@@ -23,6 +24,7 @@ async def get_user_profile(app: web.Application, user_id: int) -> Dict[str, Any]
user_profile: Dict[str, Any] = {}
user_primary_group = all_group = {}
user_standard_groups = []
+
async with engine.acquire() as conn:
async for row in conn.execute(
sa.select(
@@ -105,6 +107,11 @@ async def is_user_guest(app: web.Application, user_id: int) -> bool:
async def delete_user(app: web.Application, user_id: int) -> None:
"""Deletes a user from the database if the user exists"""
+ # FIXME: user cannot be deleted without deleting first all ist project
+ # otherwise this function will raise asyncpg.exceptions.ForeignKeyViolationError
+ # Consider "marking" users as deleted and havning a background job that
+ # cleans it up
+
db = get_storage(app)
user = await db.get_user({"id": user_id})
if not user:
@@ -115,6 +122,10 @@ async def delete_user(app: web.Application, user_id: int) -> None:
await db.delete_user(user)
+ # This user might be cached in the auth. If so, any request
+ # with this user-id will get thru producing unexpected side-effects
+ clean_auth_policy_cache(app)
+
# TOKEN -------------------------------------------
async def create_token(
diff --git a/services/web/server/src/simcore_service_webserver/users_handlers.py b/services/web/server/src/simcore_service_webserver/users_handlers.py
index b2eacf6fbfc..ec96851a1f9 100644
--- a/services/web/server/src/simcore_service_webserver/users_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/users_handlers.py
@@ -8,10 +8,7 @@
from . import users_api
from .login.decorators import RQT_USERID_KEY, login_required
from .security_decorators import permission_required
-from .users_exceptions import (
- TokenNotFoundError,
- UserNotFoundError,
-)
+from .users_exceptions import TokenNotFoundError, UserNotFoundError
logger = logging.getLogger(__name__)
@@ -24,7 +21,8 @@ async def get_my_profile(request: web.Request):
try:
return await users_api.get_user_profile(request.app, uid)
except UserNotFoundError:
- raise web.HTTPServerError(reason="could not find profile!")
+ # NOTE: invalid user_id could happen due to timed-cache in AuthorizationPolicy
+ raise web.HTTPNotFound(reason="Could not find profile!")
@login_required
diff --git a/services/web/server/src/simcore_service_webserver/utils.py b/services/web/server/src/simcore_service_webserver/utils.py
index 3b3d47895ec..3a12b43e0d2 100644
--- a/services/web/server/src/simcore_service_webserver/utils.py
+++ b/services/web/server/src/simcore_service_webserver/utils.py
@@ -196,3 +196,8 @@ def get_tracemalloc_info(top=10) -> List[str]:
)
return top_trace
+
+
+def compose_error_msg(msg: str) -> str:
+ msg = msg.strip()
+ return f"{msg}. Please send this message to support@osparc.io [{now_str()}]"
diff --git a/services/web/server/tests/unit/with_dbs/test_access_to_studies.py b/services/web/server/tests/unit/with_dbs/test_access_to_studies.py
index ba4e50b9764..9f39f410b00 100644
--- a/services/web/server/tests/unit/with_dbs/test_access_to_studies.py
+++ b/services/web/server/tests/unit/with_dbs/test_access_to_studies.py
@@ -5,6 +5,7 @@
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
+import re
import textwrap
from copy import deepcopy
from pathlib import Path
@@ -12,25 +13,25 @@
from typing import Dict
import pytest
-from aiohttp import web
+from aiohttp import ClientResponse, ClientSession, web
import simcore_service_webserver.statics
from pytest_simcore.helpers.utils_assert import assert_status
from pytest_simcore.helpers.utils_login import LoggedUser, UserRole
from pytest_simcore.helpers.utils_projects import NewProject, delete_all_projects
from servicelib.application import create_safe_application
-from servicelib.application_keys import APP_CONFIG_KEY
from servicelib.rest_responses import unwrap_envelope
-from simcore_service_webserver import studies_access
from simcore_service_webserver.db import setup_db
from simcore_service_webserver.login import setup_login
from simcore_service_webserver.projects import setup_projects
+from simcore_service_webserver.projects.projects_api import delete_project_from_db
from simcore_service_webserver.rest import setup_rest
from simcore_service_webserver.security import setup_security
from simcore_service_webserver.session import setup_session
from simcore_service_webserver.statics import setup_statics
from simcore_service_webserver.studies_access import setup_studies_access
from simcore_service_webserver.users import setup_users
+from simcore_service_webserver.users_api import delete_user, is_user_guest
SHARED_STUDY_UUID = "e2e38eee-c569-4e55-b104-70d159e49c87"
@@ -114,7 +115,7 @@ async def logged_user(client): # , role: UserRole):
@pytest.fixture
-async def published_project(client, fake_project):
+async def published_project(client, fake_project) -> Dict:
project_data = deepcopy(fake_project)
project_data["name"] = "Published project"
project_data["uuid"] = SHARED_STUDY_UUID
@@ -154,12 +155,45 @@ async def _get_user_projects(client):
def _assert_same_projects(got: Dict, expected: Dict):
# TODO: validate using api/specs/webserver/v0/components/schemas/project-v0.0.1.json
# TODO: validate workbench!
- exclude = ["creationDate", "lastChangeDate", "prjOwner", "uuid", "workbench", "accessRights"]
+ exclude = set(
+ [
+ "creationDate",
+ "lastChangeDate",
+ "prjOwner",
+ "uuid",
+ "workbench",
+ "accessRights",
+ ]
+ )
for key in expected.keys():
if key not in exclude:
assert got[key] == expected[key], "Failed in %s" % key
+async def assert_redirected_to_study(
+ resp: ClientResponse, session: ClientSession
+) -> str:
+ content = await resp.text()
+ assert resp.status == web.HTTPOk.status_code, f"Got {content}"
+
+ # Expects redirection to osparc web (see qx_client_outdir fixture)
+ assert resp.url.path == "/"
+ assert (
+ "OSPARC-SIMCORE" in content
+ ), "Expected front-end rendering workbench's study, got %s" % str(content)
+
+ # Expects auth cookie for current user
+ assert "osparc.WEBAPI_SESSION" in [c.key for c in session.cookie_jar]
+
+ # Expects fragment to indicate client where to find newly created project
+ m = re.match(r"/study/([\d\w-]+)", resp.real_url.fragment)
+ assert m, f"Expected /study/uuid, got {resp.real_url.fragment}"
+
+ # returns newly created project
+ redirected_project_id = m.group(1)
+ return redirected_project_id
+
+
# TESTS --------------------------------------
async def test_access_to_invalid_study(client, published_project):
resp = await client.get("/study/SOME_INVALID_UUID")
@@ -173,34 +207,26 @@ async def test_access_to_forbidden_study(client, unpublished_project):
valid_but_not_sharable = unpublished_project["uuid"]
- resp = await client.get("/study/%s" % valid_but_not_sharable)
+ resp = await client.get(f"/study/valid_but_not_sharable")
content = await resp.text()
- assert resp.status == web.HTTPNotFound.status_code, (
- "STANDARD studies are NOT sharable: %s" % content
- )
+ assert (
+ resp.status == web.HTTPNotFound.status_code
+ ), f"STANDARD studies are NOT sharable: {content}"
async def test_access_study_anonymously(
client, qx_client_outdir, published_project, storage_subsystem_mock
):
- params = {"uuid": SHARED_STUDY_UUID, "name": "some-template"}
-
- url_path = "/study/%s" % SHARED_STUDY_UUID
- resp = await client.get(url_path)
- content = await resp.text()
+ study_url = client.app.router["study"].url_for(id=published_project["uuid"])
+ resp = await client.get(study_url)
- # index
- assert resp.status == web.HTTPOk.status_code, "Got %s" % str(content)
- assert str(resp.url.path) == "/"
- assert (
- "OSPARC-SIMCORE" in content
- ), "Expected front-end rendering workbench's study, got %s" % str(content)
-
- real_url = str(resp.real_url)
+ expected_prj_id = await assert_redirected_to_study(resp, client.session)
# has auto logged in as guest?
- resp = await client.get("/v0/me")
+ me_url = client.app.router["get_my_profile"].url_for()
+ resp = await client.get(me_url)
+
data, _ = await assert_status(resp, web.HTTPOk)
assert data["login"].endswith("guest-at-osparc.io")
assert data["gravatar_id"]
@@ -211,7 +237,7 @@ async def test_access_study_anonymously(
assert len(projects) == 1
guest_project = projects[0]
- assert real_url.endswith("#/study/%s" % guest_project["uuid"])
+ assert expected_prj_id == guest_project["uuid"]
_assert_same_projects(guest_project, published_project)
assert guest_project["prjOwner"] == data["login"]
@@ -220,29 +246,70 @@ async def test_access_study_anonymously(
async def test_access_study_by_logged_user(
client, logged_user, qx_client_outdir, published_project, storage_subsystem_mock
):
- params = {"uuid": SHARED_STUDY_UUID, "name": "some-template"}
-
- url_path = "/study/%s" % SHARED_STUDY_UUID
- resp = await client.get(url_path)
- content = await resp.text()
-
- # returns index
- assert resp.status == web.HTTPOk.status_code, "Got %s" % str(content)
- assert str(resp.url.path) == "/"
- real_url = str(resp.real_url)
-
- assert (
- "OSPARC-SIMCORE" in content
- ), "Expected front-end rendering workbench's study, got %s" % str(content)
+ study_url = client.app.router["study"].url_for(id=published_project["uuid"])
+ resp = await client.get(study_url)
+ await assert_redirected_to_study(resp, client.session)
# user has a copy of the template project
projects = await _get_user_projects(client)
assert len(projects) == 1
user_project = projects[0]
- # TODO: check redirects to /#/study/{uuid}
- assert real_url.endswith("#/study/%s" % user_project["uuid"])
-
+ # heck redirects to /#/study/{uuid}
+ assert resp.real_url.fragment.endswith("/study/%s" % user_project["uuid"])
_assert_same_projects(user_project, published_project)
assert user_project["prjOwner"] == logged_user["email"]
+
+
+async def test_access_cookie_of_expired_user(
+ client, qx_client_outdir, published_project, storage_subsystem_mock
+):
+ # emulates issue #1570
+ app: web.Application = client.app
+
+ study_url = app.router["study"].url_for(id=published_project["uuid"])
+ resp = await client.get(study_url)
+
+ await assert_redirected_to_study(resp, client.session)
+
+ # Expects valid cookie and GUEST access
+ me_url = app.router["get_my_profile"].url_for()
+ resp = await client.get(me_url)
+
+ data, _ = await assert_status(resp, web.HTTPOk)
+ assert await is_user_guest(app, data["id"])
+
+ async def garbage_collect_guest(uid):
+ # Emulates garbage collector:
+ # - anonymous user expired, cleaning it up
+ # - client still holds cookie with its identifier nonetheless
+ #
+ assert await is_user_guest(app, uid)
+ projects = await _get_user_projects(client)
+ assert len(projects) == 1
+
+ prj_id = projects[0]["uuid"]
+ await delete_project_from_db(app, prj_id, uid)
+ await delete_user(app, uid)
+ return uid
+
+ user_id = await garbage_collect_guest(uid=data["id"])
+ user_email = data["login"]
+
+ # Now this should be non -authorized
+ resp = await client.get(me_url)
+ await assert_status(resp, web.HTTPUnauthorized)
+
+ # But still can access as a new user
+ resp = await client.get(study_url)
+ await assert_redirected_to_study(resp, client.session)
+
+ # as a guest user
+ resp = await client.get(me_url)
+ data, _ = await assert_status(resp, web.HTTPOk)
+ assert await is_user_guest(app, data["id"])
+
+ # But I am another user
+ assert data["id"] != user_id
+ assert data["login"] != user_email
From 294611d7822045972aff714871d72dffc62bc461 Mon Sep 17 00:00:00 2001
From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com>
Date: Mon, 22 Jun 2020 19:55:33 +0200
Subject: [PATCH 09/43] Maintenance/upgrades and tooling (#1546)
* script to create invitations
* ujson 2.0.3 -> 3.0.0 in ALL packages
* Upgrades webserver
* Upgrades packages
* Autoformats web-server
---
Makefile | 107 ++++++++++--------
README.md | 12 +-
api/tests/requirements.txt | 14 +--
.../requirements/_migration.txt | 6 +-
.../postgres-database/requirements/_test.in | 2 +-
.../postgres-database/requirements/_test.txt | 25 ++--
.../pytest_simcore/helpers/utils_pylint.py | 2 +-
packages/s3wrapper/requirements/_base.txt | 2 +-
packages/s3wrapper/requirements/_test.in | 2 +-
packages/s3wrapper/requirements/_test.txt | 18 +--
.../service-library/requirements/_base.in | 10 +-
.../service-library/requirements/_base.txt | 8 +-
.../service-library/requirements/_test.in | 2 +-
.../service-library/requirements/_test.txt | 25 ++--
packages/simcore-sdk/requirements/_test.in | 2 +-
packages/simcore-sdk/requirements/_test.txt | 22 ++--
scripts/demo/create_portal_markdown.py | 2 +-
.../simcore_service_api_server/core/redoc.py | 5 +-
services/web/server/requirements/Makefile | 2 +-
services/web/server/requirements/_base.txt | 16 +--
services/web/server/requirements/_test.in | 2 +-
services/web/server/requirements/_test.txt | 40 +++----
.../simcore_service_webserver/__version__.py | 1 -
.../activity/__init__.py | 1 +
.../application_config.py | 21 ++--
.../src/simcore_service_webserver/cli.py | 2 +-
.../simcore_service_webserver/cli_config.py | 2 +-
.../src/simcore_service_webserver/db.py | 3 +-
.../simcore_service_webserver/db_models.py | 12 +-
.../diagnostics_monitoring.py | 9 +-
.../director/config.py | 3 +-
.../simcore_service_webserver/email_config.py | 1 -
.../login/__init__.py | 1 +
.../login/decorators.py | 1 +
.../login/handlers.py | 3 +-
.../simcore_service_webserver/login/routes.py | 2 +-
.../login/settings.py | 1 -
.../simcore_service_webserver/login/sql.py | 9 +-
.../login/storage.py | 11 +-
.../simcore_service_webserver/login/utils.py | 6 +-
.../projects/__init__.py | 2 +-
.../projects/nodes_handlers.py | 5 +-
.../projects/projects_access.py | 2 +-
.../resource_manager/garbage_collector.py | 22 ++--
.../resource_manager/redis.py | 3 +-
.../handlers/aiohttp_client_extension.py | 10 +-
.../reverse_proxy/handlers/jupyter.py | 2 +-
.../security_roles.py | 1 -
.../socketio/config.py | 2 +-
.../socketio/handlers.py | 8 +-
.../socketio/handlers_utils.py | 1 -
.../src/simcore_service_webserver/storage.py | 4 +-
.../simcore_service_webserver/storage_api.py | 3 +-
.../storage_config.py | 1 +
.../storage_handlers.py | 3 +-
.../storage_routes.py | 1 +
.../simcore_service_webserver/tag_handlers.py | 3 +-
.../tracing/__init__.py | 4 +-
.../simcore_service_webserver/users_api.py | 1 +
services/web/server/tests/unit/conftest.py | 2 +-
.../server/tests/unit/test_catalog_setup.py | 1 -
.../web/server/tests/unit/test_healthcheck.py | 2 +-
.../web/server/tests/unit/test_package.py | 6 +-
.../tests/unit/test_template_projects.py | 3 +-
.../requirements/requirements.txt | 20 ++--
65 files changed, 277 insertions(+), 250 deletions(-)
diff --git a/Makefile b/Makefile
index ef2dfa7fec8..71e34f82d28 100644
--- a/Makefile
+++ b/Makefile
@@ -6,11 +6,11 @@
# - In windows, only WSL is supported
#
# by sanderegg, pcrespov
+#
.DEFAULT_GOAL := help
-SHELL := /bin/bash
+SHELL := /bin/bash
-# TOOLS --------------------------------------
MAKE_C := $(MAKE) --no-print-directory --directory
@@ -60,7 +60,9 @@ export SWARM_STACK_NAME ?= simcore
export DOCKER_IMAGE_TAG ?= latest
export DOCKER_REGISTRY ?= itisfoundation
+
.PHONY: help
+
help: ## help on rule's targets
ifeq ($(IS_WIN),)
@awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST)
@@ -70,7 +72,7 @@ endif
-## docker BUILD -------------------------------
+## DOCKER BUILD -------------------------------
#
# - all builds are inmediatly tagged as 'local/{service}:${BUILD_TARGET}' where BUILD_TARGET='development', 'production', 'cache'
# - only production and cache images are released (i.e. tagged pushed into registry)
@@ -127,7 +129,6 @@ endif
endif
-# TODO: should download cache if any??
build-cache build-cache-nc build-cache-kit build-cache-x: .env ## Build cache images and tags them as 'local/{service-name}:cache'
ifeq ($(target),)
# Compiling front-end
@@ -152,7 +153,7 @@ shell:
docker run -it local/$(target):production /bin/sh
-## docker SWARM -------------------------------
+## DOCKER SWARM -------------------------------
#
# - All resolved configuration are named as .stack-${name}-*.yml to distinguish from docker-compose files which can be parametrized
#
@@ -179,6 +180,7 @@ docker-compose-configs = $(wildcard services/docker-compose*.yml)
# Creating config for ops stack to $@
@docker-compose -f services/docker-compose-ops.yml --log-level=ERROR config > $@
+
.PHONY: up-devel up-prod up-version up-latest .deploy-ops
.deploy-ops: .stack-ops.yml
@@ -235,7 +237,7 @@ leave: ## Forces to stop all services, networks, etc by the node leaving the swa
$(if $(SWARM_HOSTS),,docker swarm init)
-## docker TAGS -------------------------------
+## DOCKER TAGS -------------------------------
.PHONY: tag-local tag-cache tag-version tag-latest
@@ -263,10 +265,10 @@ tag-latest: ## Tags last locally built production images as '${DOCKER_REGISTRY}/
-## docker PULL/PUSH -------------------------------
+## DOCKER PULL/PUSH -------------------------------
#
-# TODO: cannot push modified/untracke
-# TODO: cannot push discetedD
+# TODO: cannot push modified/untracked
+# TODO: cannot push disceted
#
.PHONY: pull-cache pull-version
pull-cache: .env
@@ -296,21 +298,7 @@ push-version: tag-version
)
-## PYTHON -------------------------------
-.PHONY: pylint
-
-pylint: ## Runs python linter framework's wide
- # See exit codes and command line https://pylint.readthedocs.io/en/latest/user_guide/run.html#exit-codes
- # TODO: NOT windows friendly
- /bin/bash -c "pylint --jobs=0 --rcfile=.pylintrc $(strip $(shell find services packages -iname '*.py' \
- -not -path "*egg*" \
- -not -path "*migration*" \
- -not -path "*datcore.py" \
- -not -path "*sandbox*" \
- -not -path "*-sdk/python*" \
- -not -path "*generated_code*" \
- -not -path "*datcore.py" \
- -not -path "*web/server*"))"
+## ENVIRONMENT -------------------------------
.PHONY: devenv devenv-all
@@ -332,25 +320,42 @@ devenv-all: devenv ## sets up extra development tools (everything else besides p
@$(MAKE_C) scripts/json-schema-to-openapi-schema
-## MISC -------------------------------
-
-.PHONY: new-service
-new-service: .venv ## Bakes a new project from cookiecutter-simcore-pyservice and drops it under services/ [UNDER DEV]
- $
-
+
+
-[`master`](https://github.com/itisfoundation/osparc-simcore/tree/master)
[![Code style: black]](https://github.com/psf/black)
[![Requires.io]](https://requires.io/github/ITISFoundation/osparc-simcore/requirements/?branch=master "State of third party python dependencies")
[![travis-ci]](https://travis-ci.org/ITISFoundation/osparc-simcore "State of CI: build, test and pushing images")
@@ -14,10 +14,10 @@
[![codecov.io]](https://codecov.io/gh/ITISFoundation/osparc-simcore)
[![github.io]](https://itisfoundation.github.io/)
[![itis.dockerhub]](https://hub.docker.com/u/itisfoundation)
+[![license]](./LICENSE)
-
-
+
[Code style: black]:https://img.shields.io/badge/code%20style-black-000000.svg
[Requires.io]:https://img.shields.io/requires/github/ITISFoundation/osparc-simcore.svg
[travis-ci]:https://travis-ci.org/ITISFoundation/osparc-simcore.svg?branch=master
@@ -25,8 +25,8 @@
[itis.dockerhub]:https://img.shields.io/website/https/hub.docker.com/u/itisfoundation.svg?down_color=red&label=dockerhub%20repos&up_color=green
[coveralls.io]:https://coveralls.io/repos/github/ITISFoundation/osparc-simcore/badge.svg?branch=master
[codecov.io]:https://codecov.io/gh/ITISFoundation/osparc-simcore/branch/master/graph/badge.svg
-
-
+[license]:https://img.shields.io/github/license/ITISFoundation/osparc-simcore
+
The SIM-CORE, named **o2S2PARC** – **O**pen **O**nline **S**imulations for **S**timulating **P**eripheral **A**ctivity to **R**elieve **C**onditions – is one of the three integrative cores of the SPARC program’s Data Resource Center (DRC).
diff --git a/api/tests/requirements.txt b/api/tests/requirements.txt
index 0bfa736a91b..81de68bb585 100644
--- a/api/tests/requirements.txt
+++ b/api/tests/requirements.txt
@@ -11,11 +11,11 @@ chardet==3.0.4 # via aiohttp
coverage==5.1 # via -r requirements.in, pytest-cov
idna-ssl==1.1.0 # via aiohttp
idna==2.9 # via idna-ssl, yarl
-importlib-metadata==1.6.0 # via jsonschema, pluggy, pytest
+importlib-metadata==1.6.1 # via jsonschema, pluggy, pytest
isodate==0.6.0 # via openapi-schema-validator
jsonschema==3.2.0 # via openapi-schema-validator, openapi-spec-validator
-lazy-object-proxy==1.4.3 # via openapi-core
-more-itertools==8.3.0 # via openapi-core, pytest
+lazy-object-proxy==1.5.0 # via openapi-core
+more-itertools==8.4.0 # via openapi-core, pytest
multidict==4.7.6 # via aiohttp, yarl
openapi-core==0.13.3 # via -r requirements.in
openapi-schema-validator==0.1.1 # via openapi-core
@@ -23,12 +23,12 @@ openapi-spec-validator==0.2.8 # via openapi-core
packaging==20.4 # via pytest, pytest-sugar
parse==1.15.0 # via openapi-core
pluggy==0.13.1 # via pytest
-py==1.8.1 # via pytest
+py==1.8.2 # via pytest
pyparsing==2.4.7 # via packaging
pyrsistent==0.16.0 # via jsonschema
pytest-aiohttp==0.3.0 # via -r requirements.in
-pytest-cov==2.9.0 # via -r requirements.in
-pytest-instafail==0.4.1.post0 # via -r requirements.in
+pytest-cov==2.10.0 # via -r requirements.in
+pytest-instafail==0.4.2 # via -r requirements.in
pytest-sugar==0.9.3 # via -r requirements.in
pytest==5.4.3 # via -r requirements.in, pytest-aiohttp, pytest-cov, pytest-instafail, pytest-sugar
pyyaml==5.3.1 # via openapi-spec-validator
@@ -36,7 +36,7 @@ six==1.15.0 # via isodate, jsonschema, openapi-core, openapi-schem
strict-rfc3339==0.7 # via openapi-schema-validator
termcolor==1.1.0 # via pytest-sugar
typing-extensions==3.7.4.2 # via aiohttp
-wcwidth==0.1.9 # via pytest
+wcwidth==0.2.4 # via pytest
werkzeug==1.0.1 # via openapi-core
yarl==1.4.2 # via aiohttp
zipp==3.1.0 # via importlib-metadata
diff --git a/packages/postgres-database/requirements/_migration.txt b/packages/postgres-database/requirements/_migration.txt
index 999ad388cb4..850c491883a 100644
--- a/packages/postgres-database/requirements/_migration.txt
+++ b/packages/postgres-database/requirements/_migration.txt
@@ -5,18 +5,18 @@
# pip-compile --output-file=requirements/_migration.txt requirements/_migration.in
#
alembic==1.4.2 # via -r requirements/_migration.in
-certifi==2020.4.5.1 # via requests
+certifi==2020.6.20 # via requests
chardet==3.0.4 # via requests
click==7.1.2 # via -r requirements/_migration.in
docker==4.2.1 # via -r requirements/_migration.in
idna==2.9 # via -r requirements/_base.txt, requests, yarl
-mako==1.1.2 # via alembic
+mako==1.1.3 # via alembic
markupsafe==1.1.1 # via mako
multidict==4.7.6 # via -r requirements/_base.txt, yarl
psycopg2-binary==2.8.5 # via -r requirements/_base.txt, sqlalchemy
python-dateutil==2.8.1 # via alembic
python-editor==1.0.4 # via alembic
-requests==2.23.0 # via docker
+requests==2.24.0 # via docker
six==1.15.0 # via docker, python-dateutil, tenacity, websocket-client
sqlalchemy[postgresql_psycopg2binary]==1.3.17 # via -r requirements/_base.txt, alembic
tenacity==6.2.0 # via -r requirements/_migration.in
diff --git a/packages/postgres-database/requirements/_test.in b/packages/postgres-database/requirements/_test.in
index 0b4c92a022f..fadd6b3aa47 100644
--- a/packages/postgres-database/requirements/_test.in
+++ b/packages/postgres-database/requirements/_test.in
@@ -20,5 +20,5 @@ pytest-runner
pytest-docker
# CI
-pylint==2.5.0 # 2.5.3 fails to run in parallel
+pylint
coveralls
diff --git a/packages/postgres-database/requirements/_test.txt b/packages/postgres-database/requirements/_test.txt
index 7de06d70860..d51075d48a3 100644
--- a/packages/postgres-database/requirements/_test.txt
+++ b/packages/postgres-database/requirements/_test.txt
@@ -7,10 +7,10 @@
aiohttp==3.6.2 # via pytest-aiohttp
aiopg[sa]==1.0.0 # via -r requirements/_test.in
alembic==1.4.2 # via -r requirements/_migration.txt
-astroid==2.4.1 # via pylint
+astroid==2.4.2 # via pylint
async-timeout==3.0.1 # via aiohttp
attrs==19.3.0 # via aiohttp, pytest, pytest-docker
-certifi==2020.4.5.1 # via -r requirements/_migration.txt, requests
+certifi==2020.6.20 # via -r requirements/_migration.txt, requests
chardet==3.0.4 # via -r requirements/_migration.txt, aiohttp, requests
click==7.1.2 # via -r requirements/_migration.txt
coverage==5.1 # via -r requirements/_test.in, coveralls, pytest-cov
@@ -18,37 +18,42 @@ coveralls==2.0.0 # via -r requirements/_test.in
docker==4.2.1 # via -r requirements/_migration.txt
docopt==0.6.2 # via coveralls
faker==4.1.1 # via -r requirements/_test.in
+idna-ssl==1.1.0 # via aiohttp
idna==2.9 # via -r requirements/_migration.txt, requests, yarl
+importlib-metadata==1.6.1 # via pluggy, pytest
isort==4.3.21 # via pylint
lazy-object-proxy==1.4.3 # via astroid
-mako==1.1.2 # via -r requirements/_migration.txt, alembic
+mako==1.1.3 # via -r requirements/_migration.txt, alembic
markupsafe==1.1.1 # via -r requirements/_migration.txt, mako
mccabe==0.6.1 # via pylint
-more-itertools==8.3.0 # via pytest
+more-itertools==8.4.0 # via pytest
multidict==4.7.6 # via -r requirements/_migration.txt, aiohttp, yarl
packaging==20.4 # via pytest
pluggy==0.13.1 # via pytest
psycopg2-binary==2.8.5 # via -r requirements/_migration.txt, aiopg, sqlalchemy
-py==1.8.1 # via pytest
-pylint==2.5.0 # via -r requirements/_test.in
+py==1.8.2 # via pytest
+pylint==2.5.3 # via -r requirements/_test.in
pyparsing==2.4.7 # via packaging
pytest-aiohttp==0.3.0 # via -r requirements/_test.in
-pytest-cov==2.9.0 # via -r requirements/_test.in
+pytest-cov==2.10.0 # via -r requirements/_test.in
pytest-docker==0.7.2 # via -r requirements/_test.in
-pytest-instafail==0.4.1.post0 # via -r requirements/_test.in
+pytest-instafail==0.4.2 # via -r requirements/_test.in
pytest-runner==5.2 # via -r requirements/_test.in
pytest==5.4.3 # via -r requirements/_test.in, pytest-aiohttp, pytest-cov, pytest-instafail
python-dateutil==2.8.1 # via -r requirements/_migration.txt, alembic, faker
python-editor==1.0.4 # via -r requirements/_migration.txt, alembic
pyyaml==5.3.1 # via -r requirements/_test.in
-requests==2.23.0 # via -r requirements/_migration.txt, coveralls, docker
+requests==2.24.0 # via -r requirements/_migration.txt, coveralls, docker
six==1.15.0 # via -r requirements/_migration.txt, astroid, docker, packaging, python-dateutil, tenacity, websocket-client
sqlalchemy[postgresql_psycopg2binary]==1.3.17 # via -r requirements/_migration.txt, aiopg, alembic
tenacity==6.2.0 # via -r requirements/_migration.txt
text-unidecode==1.3 # via faker
toml==0.10.1 # via pylint
+typed-ast==1.4.1 # via astroid
+typing-extensions==3.7.4.2 # via aiohttp
urllib3==1.25.9 # via -r requirements/_migration.txt, requests
-wcwidth==0.1.9 # via pytest
+wcwidth==0.2.4 # via pytest
websocket-client==0.57.0 # via -r requirements/_migration.txt, docker
wrapt==1.12.1 # via astroid
yarl==1.4.2 # via -r requirements/_migration.txt, aiohttp
+zipp==3.1.0 # via importlib-metadata
diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_pylint.py b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_pylint.py
index 73483c028d9..2553e383981 100644
--- a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_pylint.py
+++ b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_pylint.py
@@ -13,7 +13,7 @@ def assert_pylint_is_passing(pylintrc, package_dir, number_of_jobs: int = AUTODE
command = f"pylint --jobs={number_of_jobs} --rcfile {pylintrc} -v {package_dir}".split(
" "
)
- pipes = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ pipes = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
std_out, _ = pipes.communicate()
if pipes.returncode != 0:
print(f'>>>> Exit code "{pipes.returncode}"\n{std_out.decode("utf-8")}\n<<<<')
diff --git a/packages/s3wrapper/requirements/_base.txt b/packages/s3wrapper/requirements/_base.txt
index 8aedfb9abb7..9349477f2f9 100644
--- a/packages/s3wrapper/requirements/_base.txt
+++ b/packages/s3wrapper/requirements/_base.txt
@@ -4,7 +4,7 @@
#
# pip-compile --output-file=requirements/_base.txt requirements/_base.in
#
-certifi==2020.4.5.1 # via minio
+certifi==2020.6.20 # via minio
configparser==5.0.0 # via minio
minio==5.0.10 # via -r requirements/_base.in
python-dateutil==2.8.1 # via minio
diff --git a/packages/s3wrapper/requirements/_test.in b/packages/s3wrapper/requirements/_test.in
index 9c0bdb53f36..b5b1c1af315 100644
--- a/packages/s3wrapper/requirements/_test.in
+++ b/packages/s3wrapper/requirements/_test.in
@@ -16,5 +16,5 @@ pytest-runner
requests
# tools for CI
-pylint==2.5.0 # 2.5.3 fails to run in parallel
+pylint
coveralls
diff --git a/packages/s3wrapper/requirements/_test.txt b/packages/s3wrapper/requirements/_test.txt
index e6d34318a92..68c746fa78e 100644
--- a/packages/s3wrapper/requirements/_test.txt
+++ b/packages/s3wrapper/requirements/_test.txt
@@ -4,37 +4,37 @@
#
# pip-compile --output-file=requirements/_test.txt requirements/_test.in
#
-astroid==2.4.1 # via pylint
+astroid==2.4.2 # via pylint
attrs==19.3.0 # via pytest, pytest-docker
-certifi==2020.4.5.1 # via -r requirements/_base.txt, minio, requests
+certifi==2020.6.20 # via -r requirements/_base.txt, minio, requests
chardet==3.0.4 # via requests
configparser==5.0.0 # via -r requirements/_base.txt, minio
coverage==5.1 # via -r requirements/_test.in, coveralls, pytest-cov
coveralls==2.0.0 # via -r requirements/_test.in
docopt==0.6.2 # via coveralls
idna==2.9 # via requests
-importlib-metadata==1.6.0 # via pluggy, pytest
+importlib-metadata==1.6.1 # via pluggy, pytest
isort==4.3.21 # via pylint
lazy-object-proxy==1.4.3 # via astroid
mccabe==0.6.1 # via pylint
minio==5.0.10 # via -r requirements/_base.txt
-more-itertools==8.3.0 # via pytest
+more-itertools==8.4.0 # via pytest
packaging==20.4 # via pytest
pluggy==0.13.1 # via pytest
-py==1.8.1 # via pytest
-pylint==2.5.0 # via -r requirements/_test.in
+py==1.8.2 # via pytest
+pylint==2.5.3 # via -r requirements/_test.in
pyparsing==2.4.7 # via packaging
-pytest-cov==2.9.0 # via -r requirements/_test.in
+pytest-cov==2.10.0 # via -r requirements/_test.in
pytest-docker==0.7.2 # via -r requirements/_test.in
pytest-runner==5.2 # via -r requirements/_test.in
pytest==5.4.3 # via -r requirements/_test.in, pytest-cov
python-dateutil==2.8.1 # via -r requirements/_base.txt, minio
pytz==2020.1 # via -r requirements/_base.txt, minio
-requests==2.23.0 # via -r requirements/_test.in, coveralls
+requests==2.24.0 # via -r requirements/_test.in, coveralls
six==1.15.0 # via -r requirements/_base.txt, astroid, packaging, python-dateutil
toml==0.10.1 # via pylint
typed-ast==1.4.1 # via astroid
urllib3==1.25.9 # via -r requirements/_base.txt, minio, requests
-wcwidth==0.1.9 # via pytest
+wcwidth==0.2.4 # via pytest
wrapt==1.12.1 # via astroid
zipp==3.1.0 # via importlib-metadata
diff --git a/packages/service-library/requirements/_base.in b/packages/service-library/requirements/_base.in
index 1edd0b10c94..78d7c098b63 100644
--- a/packages/service-library/requirements/_base.in
+++ b/packages/service-library/requirements/_base.in
@@ -2,11 +2,11 @@
# Specifies third-party dependencies for 'service-library'
#
-sqlalchemy>=1.3.3 # https://nvd.nist.gov/vuln/detail/CVE-2019-7164
-pyyaml>=5.3 # Vulnerable
-psycopg2-binary # enforces binary version - http://initd.org/psycopg/docs/install.html#binary-install-from-pypi
-openapi-core==0.12.0 # frozen until https://github.com/ITISFoundation/osparc-simcore/pull/1396 is CLOSED
-
+sqlalchemy>=1.3.3 # https://nvd.nist.gov/vuln/detail/CVE-2019-7164
+pyyaml>=5.3 # Vulnerable
+psycopg2-binary # enforces binary version - http://initd.org/psycopg/docs/install.html#binary-install-from-pypi
+openapi-core==0.12.0 # frozen until https://github.com/ITISFoundation/osparc-simcore/pull/1396 is CLOSED
+lazy-object-proxy~=1.4.3 # cannot upgrade due to contraints in openapi-core
aiohttp
aiopg[sa]
diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt
index 36a65894d15..ff9829ac502 100644
--- a/packages/service-library/requirements/_base.txt
+++ b/packages/service-library/requirements/_base.txt
@@ -11,10 +11,12 @@ aiozipkin==0.6.0 # via -r requirements/_base.in
async-timeout==3.0.1 # via aiohttp
attrs==19.3.0 # via -r requirements/_base.in, aiohttp, jsonschema, openapi-core
chardet==3.0.4 # via aiohttp
-idna==2.9 # via yarl
+idna-ssl==1.1.0 # via aiohttp
+idna==2.9 # via idna-ssl, yarl
+importlib-metadata==1.6.1 # via jsonschema
isodate==0.6.0 # via openapi-core
jsonschema==3.2.0 # via -r requirements/_base.in, openapi-spec-validator
-lazy-object-proxy==1.4.3 # via openapi-core
+lazy-object-proxy==1.4.3 # via -r requirements/_base.in, openapi-core
multidict==4.7.6 # via aiohttp, yarl
openapi-core==0.12.0 # via -r requirements/_base.in
openapi-spec-validator==0.2.8 # via openapi-core
@@ -27,9 +29,11 @@ sqlalchemy[postgresql_psycopg2binary]==1.3.17 # via -r requirements/_base.in, a
strict-rfc3339==0.7 # via openapi-core
tenacity==6.2.0 # via -r requirements/_base.in
trafaret==2.0.2 # via -r requirements/_base.in
+typing-extensions==3.7.4.2 # via aiohttp
ujson==3.0.0 # via -r requirements/_base.in
werkzeug==1.0.1 # via -r requirements/_base.in
yarl==1.4.2 # via aiohttp
+zipp==3.1.0 # via importlib-metadata
# The following packages are considered to be unsafe in a requirements file:
# setuptools
diff --git a/packages/service-library/requirements/_test.in b/packages/service-library/requirements/_test.in
index 1e78d11fdf2..f958c7097cf 100644
--- a/packages/service-library/requirements/_test.in
+++ b/packages/service-library/requirements/_test.in
@@ -17,5 +17,5 @@ pytest-mock
pytest-sugar
# tools
-pylint==2.5.0 # 2.5.3 fails to run in parallel
+pylint # NOTE: The version in pylint at _text.txt is used as a reference for ci/helpers/install_pylint.bash
coveralls
diff --git a/packages/service-library/requirements/_test.txt b/packages/service-library/requirements/_test.txt
index 2a4e82d35b0..bd2752ccc37 100644
--- a/packages/service-library/requirements/_test.txt
+++ b/packages/service-library/requirements/_test.txt
@@ -8,23 +8,23 @@ aiodebug==1.1.2 # via -r requirements/_base.txt
aiohttp==3.6.2 # via -r requirements/_base.txt, aiozipkin, pytest-aiohttp
aiopg[sa]==1.0.0 # via -r requirements/_base.txt
aiozipkin==0.6.0 # via -r requirements/_base.txt
-astroid==2.4.1 # via pylint
+astroid==2.4.2 # via pylint
async-timeout==3.0.1 # via -r requirements/_base.txt, aiohttp
attrs==19.3.0 # via -r requirements/_base.txt, aiohttp, jsonschema, openapi-core, pytest, pytest-docker
-certifi==2020.4.5.1 # via requests
+certifi==2020.6.20 # via requests
chardet==3.0.4 # via -r requirements/_base.txt, aiohttp, requests
coverage==5.1 # via -r requirements/_test.in, coveralls, pytest-cov
coveralls==2.0.0 # via -r requirements/_test.in
docopt==0.6.2 # via coveralls
-idna-ssl==1.1.0 # via -r requirements/_base.txt
+idna-ssl==1.1.0 # via -r requirements/_base.txt, aiohttp
idna==2.9 # via -r requirements/_base.txt, idna-ssl, requests, yarl
-importlib-metadata==1.6.0 # via -r requirements/_base.txt
+importlib-metadata==1.6.1 # via -r requirements/_base.txt, jsonschema, pluggy, pytest
isodate==0.6.0 # via -r requirements/_base.txt, openapi-core
isort==4.3.21 # via pylint
jsonschema==3.2.0 # via -r requirements/_base.txt, openapi-spec-validator
lazy-object-proxy==1.4.3 # via -r requirements/_base.txt, astroid, openapi-core
mccabe==0.6.1 # via pylint
-more-itertools==8.3.0 # via pytest
+more-itertools==8.4.0 # via pytest
multidict==4.7.6 # via -r requirements/_base.txt, aiohttp, yarl
openapi-core==0.12.0 # via -r requirements/_base.txt
openapi-spec-validator==0.2.8 # via -r requirements/_base.txt, openapi-core
@@ -32,20 +32,20 @@ packaging==20.4 # via pytest, pytest-sugar
pluggy==0.13.1 # via pytest
prometheus-client==0.8.0 # via -r requirements/_base.txt
psycopg2-binary==2.8.5 # via -r requirements/_base.txt, aiopg, sqlalchemy
-py==1.8.1 # via pytest
-pylint==2.5.0 # via -r requirements/_test.in
+py==1.8.2 # via pytest
+pylint==2.5.3 # via -r requirements/_test.in
pyparsing==2.4.7 # via packaging
pyrsistent==0.16.0 # via -r requirements/_base.txt, jsonschema
pytest-aiohttp==0.3.0 # via -r requirements/_test.in
-pytest-cov==2.9.0 # via -r requirements/_test.in
+pytest-cov==2.10.0 # via -r requirements/_test.in
pytest-docker==0.7.2 # via -r requirements/_test.in
-pytest-instafail==0.4.1.post0 # via -r requirements/_test.in
+pytest-instafail==0.4.2 # via -r requirements/_test.in
pytest-mock==3.1.1 # via -r requirements/_test.in
pytest-runner==5.2 # via -r requirements/_test.in
pytest-sugar==0.9.3 # via -r requirements/_test.in
pytest==5.4.3 # via -r requirements/_test.in, pytest-aiohttp, pytest-cov, pytest-instafail, pytest-mock, pytest-sugar
pyyaml==5.3.1 # via -r requirements/_base.txt, openapi-spec-validator
-requests==2.23.0 # via coveralls
+requests==2.24.0 # via coveralls
six==1.15.0 # via -r requirements/_base.txt, astroid, isodate, jsonschema, openapi-core, openapi-spec-validator, packaging, pyrsistent, tenacity
sqlalchemy[postgresql_psycopg2binary]==1.3.17 # via -r requirements/_base.txt, aiopg
strict-rfc3339==0.7 # via -r requirements/_base.txt, openapi-core
@@ -53,12 +53,15 @@ tenacity==6.2.0 # via -r requirements/_base.txt
termcolor==1.1.0 # via pytest-sugar
toml==0.10.1 # via pylint
trafaret==2.0.2 # via -r requirements/_base.txt
+typed-ast==1.4.1 # via astroid
+typing-extensions==3.7.4.2 # via -r requirements/_base.txt, aiohttp
ujson==3.0.0 # via -r requirements/_base.txt
urllib3==1.25.9 # via requests
-wcwidth==0.1.9 # via pytest
+wcwidth==0.2.4 # via pytest
werkzeug==1.0.1 # via -r requirements/_base.txt
wrapt==1.12.1 # via astroid
yarl==1.4.2 # via -r requirements/_base.txt, aiohttp
+zipp==3.1.0 # via -r requirements/_base.txt, importlib-metadata
# The following packages are considered to be unsafe in a requirements file:
# setuptools
diff --git a/packages/simcore-sdk/requirements/_test.in b/packages/simcore-sdk/requirements/_test.in
index 4a124236c11..6316344ba96 100644
--- a/packages/simcore-sdk/requirements/_test.in
+++ b/packages/simcore-sdk/requirements/_test.in
@@ -22,5 +22,5 @@ requests
docker
# tools for CI
-pylint==2.5.0 # 2.5.3 fails to run in parallel
+pylint
coveralls
diff --git a/packages/simcore-sdk/requirements/_test.txt b/packages/simcore-sdk/requirements/_test.txt
index c54809e303d..8b8c9aef0a3 100644
--- a/packages/simcore-sdk/requirements/_test.txt
+++ b/packages/simcore-sdk/requirements/_test.txt
@@ -7,10 +7,10 @@
aiofiles==0.5.0 # via -r requirements/_base.txt
aiohttp==3.6.2 # via -r requirements/_base.txt, pytest-aiohttp
aiopg[sa]==1.0.0 # via -r requirements/_base.txt
-astroid==2.4.1 # via pylint
+astroid==2.4.2 # via pylint
async-timeout==3.0.1 # via -r requirements/_base.txt, aiohttp
attrs==19.3.0 # via -r requirements/_base.txt, aiohttp, pytest, pytest-docker
-certifi==2020.4.5.1 # via requests
+certifi==2020.6.20 # via requests
chardet==3.0.4 # via -r requirements/_base.txt, aiohttp, requests
coverage==5.1 # via -r requirements/_test.in, coveralls, pytest-cov
coveralls==2.0.0 # via -r requirements/_test.in
@@ -20,31 +20,31 @@ docker==4.2.1 # via -r requirements/_test.in
docopt==0.6.2 # via coveralls
idna-ssl==1.1.0 # via -r requirements/_base.txt, aiohttp
idna==2.9 # via -r requirements/_base.txt, idna-ssl, requests, yarl
-importlib-metadata==1.6.0 # via pluggy, pytest
+importlib-metadata==1.6.1 # via pluggy, pytest
isort==4.3.21 # via pylint
lazy-object-proxy==1.4.3 # via astroid
mccabe==0.6.1 # via pylint
mock==4.0.2 # via -r requirements/_test.in
-more-itertools==8.3.0 # via pytest
+more-itertools==8.4.0 # via pytest
multidict==4.7.6 # via -r requirements/_base.txt, aiohttp, yarl
networkx==2.4 # via -r requirements/_base.txt
packaging==20.4 # via pytest, pytest-sugar
pluggy==0.13.1 # via pytest
psycopg2-binary==2.8.5 # via -r requirements/_base.txt, aiopg, sqlalchemy
-py==1.8.1 # via pytest
+py==1.8.2 # via pytest
pydantic==1.5.1 # via -r requirements/_base.txt
-pylint==2.5.0 # via -r requirements/_test.in
+pylint==2.5.3 # via -r requirements/_test.in
pyparsing==2.4.7 # via packaging
pytest-aiohttp==0.3.0 # via -r requirements/_test.in
-pytest-cov==2.9.0 # via -r requirements/_test.in
+pytest-cov==2.10.0 # via -r requirements/_test.in
pytest-docker==0.7.2 # via -r requirements/_test.in
-pytest-instafail==0.4.1.post0 # via -r requirements/_test.in
-pytest-mock==3.1.0 # via -r requirements/_test.in
+pytest-instafail==0.4.2 # via -r requirements/_test.in
+pytest-mock==3.1.1 # via -r requirements/_test.in
pytest-runner==5.2 # via -r requirements/_test.in
pytest-sugar==0.9.3 # via -r requirements/_test.in
pytest==5.4.3 # via -r requirements/_test.in, pytest-aiohttp, pytest-cov, pytest-instafail, pytest-mock, pytest-sugar
pyyaml==5.3.1 # via -r requirements/_base.txt, trafaret-config
-requests==2.23.0 # via -r requirements/_test.in, coveralls, docker
+requests==2.24.0 # via -r requirements/_test.in, coveralls, docker
six==1.15.0 # via -r requirements/_base.txt, astroid, docker, packaging, tenacity, websocket-client
sqlalchemy[postgresql_psycopg2binary]==1.3.17 # via -r requirements/_base.txt, aiopg
tenacity==6.2.0 # via -r requirements/_base.txt
@@ -55,7 +55,7 @@ trafaret==2.0.2 # via -r requirements/_base.txt, trafaret-config
typed-ast==1.4.1 # via astroid
typing-extensions==3.7.4.2 # via -r requirements/_base.txt, aiohttp
urllib3==1.25.9 # via requests
-wcwidth==0.1.9 # via pytest
+wcwidth==0.2.4 # via pytest
websocket-client==0.57.0 # via docker
wrapt==1.12.1 # via astroid
yarl==1.4.2 # via -r requirements/_base.txt, aiohttp
diff --git a/scripts/demo/create_portal_markdown.py b/scripts/demo/create_portal_markdown.py
index 75cd698039b..bce8fa96786 100644
--- a/scripts/demo/create_portal_markdown.py
+++ b/scripts/demo/create_portal_markdown.py
@@ -121,7 +121,7 @@ def main(mock_codes):
print("", file=fh)
- today = datetime.today()
+ today: datetime = datetime.today()
file_path = current_path.parent / CONFIRMATIONS_FILENAME
with _open(file_path) as fh:
print("code,user_id,action,data,created_at", file=fh)
diff --git a/services/api-server/src/simcore_service_api_server/core/redoc.py b/services/api-server/src/simcore_service_api_server/core/redoc.py
index 45ff4d407db..b3f256d466e 100644
--- a/services/api-server/src/simcore_service_api_server/core/redoc.py
+++ b/services/api-server/src/simcore_service_api_server/core/redoc.py
@@ -4,7 +4,7 @@
from fastapi.applications import HTMLResponse, Request
from fastapi.openapi.docs import get_redoc_html
-# from ..__version__ import api_vtag
+# TODO: move all these static resources away from the server!
FAVICON = "https://osparc.io/resource/osparc/favicon.png"
LOGO = "https://raw.githubusercontent.com/ITISFoundation/osparc-manual/b809d93619512eb60c827b7e769c6145758378d0/_media/osparc-logo.svg"
@@ -14,9 +14,8 @@
def compose_long_description(description: str) -> str:
desc = f"**{description}**\n"
desc += "## Python Library\n"
- desc += "- Documentation (https://itisfoundation.github.io/osparc-simcore-python-client/#/)\n"
+ desc += "- Check the [documentation](https://itisfoundation.github.io/osparc-simcore-python-client)\n"
desc += "- Quick install: ``pip install git+https://github.com/ITISFoundation/osparc-simcore-python-client.git``\n"
-
return desc
diff --git a/services/web/server/requirements/Makefile b/services/web/server/requirements/Makefile
index 9e3b90f7e91..8903b3c1bc6 100644
--- a/services/web/server/requirements/Makefile
+++ b/services/web/server/requirements/Makefile
@@ -6,6 +6,6 @@ include ../../../../scripts/requirements.Makefile
# Add here any extra explicit dependency: e.g. _migration.txt: _base.txt
packages_input_reqs = $(shell grep "requirements/_base.in" _base.in | awk '{print $$2}')
-$(info in-repo deps: $(packages_input_reqs))
+
_base.txt: _base.in $(packages_input_reqs)
diff --git a/services/web/server/requirements/_base.txt b/services/web/server/requirements/_base.txt
index da1514401d2..f251b56bebc 100644
--- a/services/web/server/requirements/_base.txt
+++ b/services/web/server/requirements/_base.txt
@@ -16,29 +16,30 @@ aioredis==1.3.1 # via -r requirements/_base.in
aiormq==3.2.2 # via aio-pika
aiosmtplib==1.1.3 # via -r requirements/_base.in
aiozipkin==0.6.0 # via -r requirements/../../../../packages/service-library/requirements/_base.in
-amqp==2.5.2 # via kombu
+amqp==2.6.0 # via kombu
async-timeout==3.0.1 # via aiohttp, aioredis
asyncpg==0.20.1 # via -r requirements/_base.in
attrs==19.3.0 # via -r requirements/../../../../packages/service-library/requirements/_base.in, aiohttp, jsonschema, openapi-core
billiard==3.6.3.0 # via celery
-celery==4.4.2 # via -r requirements/_base.in
+celery==4.4.5 # via -r requirements/_base.in
cffi==1.14.0 # via cryptography
change-case==0.5.2 # via -r requirements/_base.in
chardet==3.0.4 # via aiohttp
cryptography==2.9.2 # via -r requirements/_base.in, aiohttp-session
-expiringdict==1.2.0 # via -r requirements/_base.in
+expiringdict==1.2.1 # via -r requirements/_base.in
+future==0.18.2 # via celery
hiredis==1.0.1 # via aioredis
idna-ssl==1.1.0 # via aiohttp
idna==2.9 # via idna-ssl, yarl
-importlib-metadata==1.6.0 # via jsonschema, kombu
+importlib-metadata==1.6.1 # via jsonschema, kombu
isodate==0.6.0 # via openapi-core
jinja-app-loader==1.0.2 # via -r requirements/_base.in
jinja2==2.11.2 # via aiohttp-jinja2, aiohttp-swagger
json2html==1.3.0 # via -r requirements/_base.in
jsondiff==1.2.0 # via -r requirements/_base.in
jsonschema==3.2.0 # via -r requirements/../../../../packages/service-library/requirements/_base.in, openapi-spec-validator
-kombu==4.6.8 # via celery
-lazy-object-proxy==1.4.3 # via openapi-core
+kombu==4.6.10 # via celery
+lazy-object-proxy==1.4.3 # via -r requirements/../../../../packages/service-library/requirements/_base.in, openapi-core
markupsafe==1.1.1 # via jinja2
multidict==4.7.6 # via aiohttp, yarl
openapi-core==0.12.0 # via -r requirements/../../../../packages/service-library/requirements/_base.in
@@ -60,8 +61,7 @@ strict-rfc3339==0.7 # via openapi-core
tenacity==6.2.0 # via -r requirements/../../../../packages/service-library/requirements/_base.in
trafaret==2.0.2 # via -r requirements/../../../../packages/service-library/requirements/_base.in
typing-extensions==3.7.4.2 # via aiohttp
-typing==3.7.4.1 # via expiringdict
-ujson==2.0.3 # via -r requirements/../../../../packages/service-library/requirements/_base.in, aiohttp-swagger
+ujson==3.0.0 # via -r requirements/../../../../packages/service-library/requirements/_base.in, aiohttp-swagger
vine==1.3.0 # via amqp, celery
werkzeug==1.0.1 # via -r requirements/../../../../packages/service-library/requirements/_base.in
yarl==1.4.2 # via -r requirements/../../../../packages/postgres-database/requirements/_base.in, aio-pika, aiohttp, aiormq
diff --git a/services/web/server/requirements/_test.in b/services/web/server/requirements/_test.in
index eefd14bf731..f56a752e56e 100644
--- a/services/web/server/requirements/_test.in
+++ b/services/web/server/requirements/_test.in
@@ -30,7 +30,7 @@ docker
redis
# tools
-pylint==2.5.0 # 2.5.3 fails to run in parallel
+pylint==2.5.0 # 2.5.3 fails to run in parallel. SEE https://github.com/PyCQA/pylint/releases for updates
coveralls
codecov
ptvsd
diff --git a/services/web/server/requirements/_test.txt b/services/web/server/requirements/_test.txt
index 006ae8d7cb1..b37c8c46a0e 100644
--- a/services/web/server/requirements/_test.txt
+++ b/services/web/server/requirements/_test.txt
@@ -16,29 +16,30 @@ aioredis==1.3.1 # via -r requirements/_base.txt
aiormq==3.2.2 # via -r requirements/_base.txt, aio-pika
aiosmtplib==1.1.3 # via -r requirements/_base.txt
aiozipkin==0.6.0 # via -r requirements/_base.txt
-amqp==2.5.2 # via -r requirements/_base.txt, kombu
-astroid==2.4.1 # via pylint
+amqp==2.6.0 # via -r requirements/_base.txt, kombu
+astroid==2.4.2 # via pylint
async-timeout==3.0.1 # via -r requirements/_base.txt, aiohttp, aioredis
asyncpg==0.20.1 # via -r requirements/_base.txt
attrs==19.3.0 # via -r requirements/_base.txt, aiohttp, jsonschema, openapi-core, pytest, pytest-docker
billiard==3.6.3.0 # via -r requirements/_base.txt, celery
-celery==4.4.2 # via -r requirements/_base.txt
-certifi==2020.4.5.1 # via requests
+celery==4.4.5 # via -r requirements/_base.txt
+certifi==2020.6.20 # via requests
cffi==1.14.0 # via -r requirements/_base.txt, cryptography
change-case==0.5.2 # via -r requirements/_base.txt
chardet==3.0.4 # via -r requirements/_base.txt, aiohttp, requests
-codecov==2.1.3 # via -r requirements/_test.in
+codecov==2.1.7 # via -r requirements/_test.in
coverage==5.1 # via -r requirements/_test.in, codecov, coveralls, pytest-cov
coveralls==2.0.0 # via -r requirements/_test.in
cryptography==2.9.2 # via -r requirements/_base.txt, aiohttp-session
-docker==4.2.0 # via -r requirements/_test.in
+docker==4.2.1 # via -r requirements/_test.in
docopt==0.6.2 # via coveralls
-expiringdict==1.2.0 # via -r requirements/_base.txt
-faker==4.1.0 # via -r requirements/_test.in
+expiringdict==1.2.1 # via -r requirements/_base.txt
+faker==4.1.1 # via -r requirements/_test.in
+future==0.18.2 # via -r requirements/_base.txt, celery
hiredis==1.0.1 # via -r requirements/_base.txt, aioredis
idna-ssl==1.1.0 # via -r requirements/_base.txt, aiohttp
idna==2.9 # via -r requirements/_base.txt, idna-ssl, requests, yarl
-importlib-metadata==1.6.0 # via -r requirements/_base.txt, jsonschema, kombu, pluggy, pytest
+importlib-metadata==1.6.1 # via -r requirements/_base.txt, jsonschema, kombu, pluggy, pytest
isodate==0.6.0 # via -r requirements/_base.txt, openapi-core
isort==4.3.21 # via pylint
jinja-app-loader==1.0.2 # via -r requirements/_base.txt
@@ -46,12 +47,12 @@ jinja2==2.11.2 # via -r requirements/_base.txt, aiohttp-jinja2, aioht
json2html==1.3.0 # via -r requirements/_base.txt
jsondiff==1.2.0 # via -r requirements/_base.txt
jsonschema==3.2.0 # via -r requirements/_base.txt, -r requirements/_test.in, openapi-spec-validator
-kombu==4.6.8 # via -r requirements/_base.txt, celery
+kombu==4.6.10 # via -r requirements/_base.txt, celery
lazy-object-proxy==1.4.3 # via -r requirements/_base.txt, astroid, openapi-core
markupsafe==1.1.1 # via -r requirements/_base.txt, jinja2
mccabe==0.6.1 # via pylint
mock==4.0.2 # via -r requirements/_test.in
-more-itertools==8.3.0 # via pytest
+more-itertools==8.4.0 # via pytest
multidict==4.7.6 # via -r requirements/_base.txt, aiohttp, yarl
openapi-core==0.12.0 # via -r requirements/_base.txt
openapi-spec-validator==0.2.8 # via -r requirements/_base.txt, -r requirements/_test.in, openapi-core
@@ -62,16 +63,16 @@ pluggy==0.13.1 # via pytest
prometheus-client==0.8.0 # via -r requirements/_base.txt
psycopg2-binary==2.8.5 # via -r requirements/_base.txt, aiopg, sqlalchemy
ptvsd==4.3.2 # via -r requirements/_test.in
-py==1.8.1 # via pytest
+py==1.8.2 # via pytest
pycparser==2.20 # via -r requirements/_base.txt, cffi
pylint==2.5.0 # via -r requirements/_test.in
pyparsing==2.4.7 # via packaging
pyrsistent==0.16.0 # via -r requirements/_base.txt, jsonschema
pytest-aiohttp==0.3.0 # via -r requirements/_test.in
-pytest-cov==2.9.0 # via -r requirements/_test.in
+pytest-cov==2.10.0 # via -r requirements/_test.in
pytest-docker==0.7.2 # via -r requirements/_test.in
-pytest-instafail==0.4.1.post0 # via -r requirements/_test.in
-pytest-mock==3.1.0 # via -r requirements/_test.in
+pytest-instafail==0.4.2 # via -r requirements/_test.in
+pytest-mock==3.1.1 # via -r requirements/_test.in
pytest-runner==5.2 # via -r requirements/_test.in
pytest-sugar==0.9.3 # via -r requirements/_test.in
pytest==5.4.3 # via -r requirements/_test.in, pytest-aiohttp, pytest-cov, pytest-instafail, pytest-mock, pytest-sugar
@@ -80,8 +81,8 @@ python-engineio==3.13.0 # via -r requirements/_base.txt, python-socketio
python-socketio==4.6.0 # via -r requirements/_base.txt
pytz==2020.1 # via -r requirements/_base.txt, celery
pyyaml==5.3.1 # via -r requirements/_base.txt, aiohttp-swagger, openapi-spec-validator
-redis==3.5.2 # via -r requirements/_test.in
-requests==2.23.0 # via codecov, coveralls, docker
+redis==3.5.3 # via -r requirements/_test.in
+requests==2.24.0 # via codecov, coveralls, docker
semantic-version==2.8.5 # via -r requirements/_base.txt
six==1.15.0 # via -r requirements/_base.txt, astroid, cryptography, docker, isodate, jsonschema, openapi-core, openapi-spec-validator, packaging, pyrsistent, python-dateutil, python-engineio, python-socketio, tenacity, websocket-client
sqlalchemy[postgresql_psycopg2binary]==1.3.17 # via -r requirements/_base.txt, aiopg
@@ -93,11 +94,10 @@ toml==0.10.1 # via pylint
trafaret==2.0.2 # via -r requirements/_base.txt
typed-ast==1.4.1 # via astroid
typing-extensions==3.7.4.2 # via -r requirements/_base.txt, aiohttp
-typing==3.7.4.1 # via -r requirements/_base.txt, expiringdict
-ujson==2.0.3 # via -r requirements/_base.txt, aiohttp-swagger
+ujson==3.0.0 # via -r requirements/_base.txt, aiohttp-swagger
urllib3==1.25.9 # via requests
vine==1.3.0 # via -r requirements/_base.txt, amqp, celery
-wcwidth==0.1.9 # via pytest
+wcwidth==0.2.4 # via pytest
websocket-client==0.57.0 # via docker
websockets==8.1 # via -r requirements/_test.in
werkzeug==1.0.1 # via -r requirements/_base.txt
diff --git a/services/web/server/src/simcore_service_webserver/__version__.py b/services/web/server/src/simcore_service_webserver/__version__.py
index 8f4c858af49..9267ec38c28 100644
--- a/services/web/server/src/simcore_service_webserver/__version__.py
+++ b/services/web/server/src/simcore_service_webserver/__version__.py
@@ -2,7 +2,6 @@
"""
import pkg_resources
-
from semantic_version import Version
__version__: str = pkg_resources.get_distribution("simcore_service_webserver").version
diff --git a/services/web/server/src/simcore_service_webserver/activity/__init__.py b/services/web/server/src/simcore_service_webserver/activity/__init__.py
index 8c7520cb601..b212dc44027 100644
--- a/services/web/server/src/simcore_service_webserver/activity/__init__.py
+++ b/services/web/server/src/simcore_service_webserver/activity/__init__.py
@@ -2,6 +2,7 @@
import logging
from aiohttp import web
+
from servicelib.application_keys import APP_CONFIG_KEY
from servicelib.application_setup import ModuleCategory, app_module_setup
from servicelib.rest_routing import (
diff --git a/services/web/server/src/simcore_service_webserver/application_config.py b/services/web/server/src/simcore_service_webserver/application_config.py
index 1c3698eb905..4ea666d6f72 100644
--- a/services/web/server/src/simcore_service_webserver/application_config.py
+++ b/services/web/server/src/simcore_service_webserver/application_config.py
@@ -9,20 +9,21 @@
The app configuration is created before the application instance exists.
-
-TODO: add more strict checks with re
-TODO: add support for versioning.
- - check shema fits version
- - parse/format version in schema
"""
+# TODO: add more strict checks with re
+# TODO: add support for versioning.
+# - check shema fits version
+# - parse/format version in schema
+
import logging
from pathlib import Path
from typing import Dict
import trafaret as T
+from trafaret_config.simple import read_and_validate
+
from servicelib import application_keys # pylint:disable=unused-import
from servicelib.config_schema_utils import addon_section, minimal_addon_schema
-from trafaret_config.simple import read_and_validate
from . import (
catalog_config,
@@ -101,9 +102,9 @@ def create_schema() -> T.Dict:
section_names = [k.name for k in schema.keys]
- assert len(section_names) == len(set(section_names)), (
- "Found repeated section names in %s" % section_names
- ) # nosec
+ # fmt: off
+ assert len(section_names) == len(set(section_names)), f"Found repeated section names in {section_names}" # nosec
+ # fmt: on
return schema
@@ -113,4 +114,4 @@ def load_default_config(environs=None) -> Dict:
return read_and_validate(filepath, trafaret=app_schema, vars=environs)
-app_schema = create_schema() # TODO: rename as schema
+app_schema = create_schema()
diff --git a/services/web/server/src/simcore_service_webserver/cli.py b/services/web/server/src/simcore_service_webserver/cli.py
index 492b0632828..49facafcece 100644
--- a/services/web/server/src/simcore_service_webserver/cli.py
+++ b/services/web/server/src/simcore_service_webserver/cli.py
@@ -18,8 +18,8 @@
from argparse import ArgumentParser
from typing import Dict, List, Optional
-from aiohttp.log import access_logger
from aiodebug import log_slow_callbacks
+from aiohttp.log import access_logger
from .application import run_service
from .application_config import CLI_DEFAULT_CONFIGFILE, app_schema
diff --git a/services/web/server/src/simcore_service_webserver/cli_config.py b/services/web/server/src/simcore_service_webserver/cli_config.py
index 08e2f8cee03..d5c3276678a 100644
--- a/services/web/server/src/simcore_service_webserver/cli_config.py
+++ b/services/web/server/src/simcore_service_webserver/cli_config.py
@@ -1,6 +1,6 @@
import argparse
-import os
import logging
+import os
import trafaret_config
import trafaret_config.commandline as commandline
diff --git a/services/web/server/src/simcore_service_webserver/db.py b/services/web/server/src/simcore_service_webserver/db.py
index c9e5bd289ba..a78e2126444 100644
--- a/services/web/server/src/simcore_service_webserver/db.py
+++ b/services/web/server/src/simcore_service_webserver/db.py
@@ -5,6 +5,8 @@
import logging
from aiohttp import web
+from tenacity import Retrying
+
from servicelib.aiopg_utils import (
DataSourceName,
PostgresRetryPolicyUponInitialization,
@@ -15,7 +17,6 @@
)
from servicelib.application_keys import APP_CONFIG_KEY, APP_DB_ENGINE_KEY
from servicelib.application_setup import ModuleCategory, app_module_setup
-from tenacity import Retrying
from .db_config import CONFIG_SECTION_NAME
from .db_models import metadata
diff --git a/services/web/server/src/simcore_service_webserver/db_models.py b/services/web/server/src/simcore_service_webserver/db_models.py
index 76972e49aea..18564c97303 100644
--- a/services/web/server/src/simcore_service_webserver/db_models.py
+++ b/services/web/server/src/simcore_service_webserver/db_models.py
@@ -4,17 +4,17 @@
from simcore_postgres_database.models.base import metadata
from simcore_postgres_database.webserver_models import (
ConfirmationAction,
+ GroupType,
UserRole,
UserStatus,
+ api_keys,
confirmations,
- tokens,
- users,
groups,
- GroupType,
- user_to_groups,
- tags,
study_tags,
- api_keys,
+ tags,
+ tokens,
+ user_to_groups,
+ users,
)
# TODO: roles table that maps every role with allowed tasks e.g. read/write,...??
diff --git a/services/web/server/src/simcore_service_webserver/diagnostics_monitoring.py b/services/web/server/src/simcore_service_webserver/diagnostics_monitoring.py
index e678bf44f9d..5852d8bc881 100644
--- a/services/web/server/src/simcore_service_webserver/diagnostics_monitoring.py
+++ b/services/web/server/src/simcore_service_webserver/diagnostics_monitoring.py
@@ -10,9 +10,10 @@
from prometheus_client import CONTENT_TYPE_LATEST, Counter, Gauge, Histogram
from prometheus_client.registry import CollectorRegistry
-from .diagnostics_core import DelayWindowProbe, kLATENCY_PROBE
from servicelib.monitor_services import add_instrumentation
+from .diagnostics_core import DelayWindowProbe, kLATENCY_PROBE
+
log = logging.getLogger(__name__)
kSTART_TIME = f"{__name__}.start_time"
@@ -44,9 +45,9 @@ async def _middleware_handler(request: web.Request, handler):
resp = await handler(request)
log_exception = None
- assert isinstance(
- resp, web.StreamResponse
- ), "Forgot envelope middleware?" # nsec
+ # fmt: off
+ assert isinstance(resp, web.StreamResponse), "Forgot envelope middleware?" # nsec
+ # fmt: om
except web.HTTPServerError as exc:
# Transforms exception into response object and log exception
diff --git a/services/web/server/src/simcore_service_webserver/director/config.py b/services/web/server/src/simcore_service_webserver/director/config.py
index 29f23a2f603..79033b5c422 100644
--- a/services/web/server/src/simcore_service_webserver/director/config.py
+++ b/services/web/server/src/simcore_service_webserver/director/config.py
@@ -7,9 +7,10 @@
import trafaret as T
from aiohttp import ClientSession, web
-from servicelib.application_keys import APP_CONFIG_KEY, APP_CLIENT_SESSION_KEY
from yarl import URL
+from servicelib.application_keys import APP_CLIENT_SESSION_KEY, APP_CONFIG_KEY
+
APP_DIRECTOR_API_KEY = __name__ + ".director_api"
CONFIG_SECTION_NAME = "director"
diff --git a/services/web/server/src/simcore_service_webserver/email_config.py b/services/web/server/src/simcore_service_webserver/email_config.py
index 6550b7c613a..93c6201e221 100644
--- a/services/web/server/src/simcore_service_webserver/email_config.py
+++ b/services/web/server/src/simcore_service_webserver/email_config.py
@@ -5,7 +5,6 @@
"""
import trafaret as T
-
CONFIG_SECTION_NAME = "smtp"
diff --git a/services/web/server/src/simcore_service_webserver/login/__init__.py b/services/web/server/src/simcore_service_webserver/login/__init__.py
index 12ee926619c..ba36f4bb41a 100644
--- a/services/web/server/src/simcore_service_webserver/login/__init__.py
+++ b/services/web/server/src/simcore_service_webserver/login/__init__.py
@@ -9,6 +9,7 @@
import asyncpg
from aiohttp import web
+
from servicelib.aiopg_utils import DSN
from servicelib.application_keys import APP_CONFIG_KEY
from servicelib.application_setup import ModuleCategory, app_module_setup
diff --git a/services/web/server/src/simcore_service_webserver/login/decorators.py b/services/web/server/src/simcore_service_webserver/login/decorators.py
index 275957047b4..924c935f30d 100644
--- a/services/web/server/src/simcore_service_webserver/login/decorators.py
+++ b/services/web/server/src/simcore_service_webserver/login/decorators.py
@@ -1,6 +1,7 @@
from functools import wraps
from aiohttp_security.api import check_authorized
+
from servicelib.request_keys import RQT_USERID_KEY
from servicelib.requests_utils import get_request
diff --git a/services/web/server/src/simcore_service_webserver/login/handlers.py b/services/web/server/src/simcore_service_webserver/login/handlers.py
index ff1c5c1bacd..86e43439926 100644
--- a/services/web/server/src/simcore_service_webserver/login/handlers.py
+++ b/services/web/server/src/simcore_service_webserver/login/handlers.py
@@ -1,9 +1,10 @@
import logging
from aiohttp import web
+from yarl import URL
+
from servicelib import observer
from servicelib.rest_utils import extract_and_validate
-from yarl import URL
from ..db_models import ConfirmationAction, UserRole, UserStatus
from ..security_api import check_password, encrypt_password, forget, remember
diff --git a/services/web/server/src/simcore_service_webserver/login/routes.py b/services/web/server/src/simcore_service_webserver/login/routes.py
index 8a8554217e6..56df605b7d9 100644
--- a/services/web/server/src/simcore_service_webserver/login/routes.py
+++ b/services/web/server/src/simcore_service_webserver/login/routes.py
@@ -12,8 +12,8 @@
from servicelib import openapi
from servicelib.rest_routing import iter_path_operations, map_handlers_with_operations
-from . import handlers as login_handlers
from . import api_keys_handlers
+from . import handlers as login_handlers
log = logging.getLogger(__name__)
diff --git a/services/web/server/src/simcore_service_webserver/login/settings.py b/services/web/server/src/simcore_service_webserver/login/settings.py
index cd092905e45..8a04cbbae99 100644
--- a/services/web/server/src/simcore_service_webserver/login/settings.py
+++ b/services/web/server/src/simcore_service_webserver/login/settings.py
@@ -1,6 +1,5 @@
from aiohttp import web
-
APP_LOGIN_CONFIG = __name__ + ".config"
CFG_LOGIN_STORAGE = "STORAGE" # Needs to match login.cfg!!!
diff --git a/services/web/server/src/simcore_service_webserver/login/sql.py b/services/web/server/src/simcore_service_webserver/login/sql.py
index 5876bfa6dd4..b371973f298 100644
--- a/services/web/server/src/simcore_service_webserver/login/sql.py
+++ b/services/web/server/src/simcore_service_webserver/login/sql.py
@@ -1,5 +1,6 @@
from logging import getLogger
+# FIXME: Possible SQL injection vector through string-based query construction.
log = getLogger(__name__)
LOG_TPL = "%s <--%s"
@@ -21,7 +22,7 @@ def find_one_sql(table, filter_, fields=None):
keys, values = _split_dict(filter_)
fields = ", ".join(fields) if fields else "*"
where = _pairs(keys)
- sql = "SELECT {} FROM {} WHERE {}".format(fields, table, where)
+ sql = "SELECT {} FROM {} WHERE {}".format(fields, table, where) # nosec
return sql, values
@@ -43,7 +44,7 @@ def insert_sql(table, data, returning="id"):
('INSERT INTO tbl (foo, id) VALUES ($1, $2) RETURNING pk', ['bar', 1])
"""
keys, values = _split_dict(data)
- sql = "INSERT INTO {} ({}) VALUES ({}){}".format(
+ sql = "INSERT INTO {} ({}) VALUES ({}){}".format( # nosec
table,
", ".join(keys),
", ".join(_placeholders(data)),
@@ -67,7 +68,7 @@ def update_sql(table, filter_, updates):
up_keys, up_vals = _split_dict(updates)
changes = _pairs(up_keys, sep=", ")
where = _pairs(where_keys, start=len(up_keys) + 1)
- sql = "UPDATE {} SET {} WHERE {}".format(table, changes, where)
+ sql = "UPDATE {} SET {} WHERE {}".format(table, changes, where) # nosec
return sql, up_vals + where_vals
@@ -84,7 +85,7 @@ def delete_sql(table, filter_):
"""
keys, values = _split_dict(filter_)
where = _pairs(keys)
- sql = "DELETE FROM {} WHERE {}".format(table, where)
+ sql = "DELETE FROM {} WHERE {}".format(table, where) # nosec
return sql, values
diff --git a/services/web/server/src/simcore_service_webserver/login/storage.py b/services/web/server/src/simcore_service_webserver/login/storage.py
index ca908ae205b..540e86550b4 100644
--- a/services/web/server/src/simcore_service_webserver/login/storage.py
+++ b/services/web/server/src/simcore_service_webserver/login/storage.py
@@ -1,13 +1,12 @@
-from logging import getLogger
-from datetime import datetime
import enum
+from datetime import datetime
+from logging import getLogger
+
import asyncpg
-from .utils import get_random_string
+from ..db_models import ConfirmationAction, UserRole, UserStatus
from . import sql
-
-from ..db_models import UserRole, UserStatus, ConfirmationAction
-
+from .utils import get_random_string
log = getLogger(__name__)
diff --git a/services/web/server/src/simcore_service_webserver/login/utils.py b/services/web/server/src/simcore_service_webserver/login/utils.py
index 6a3a3db971e..c1ff8854fb6 100644
--- a/services/web/server/src/simcore_service_webserver/login/utils.py
+++ b/services/web/server/src/simcore_service_webserver/login/utils.py
@@ -7,15 +7,15 @@
from logging import getLogger
from os.path import join
from pprint import pformat
-from typing import Mapping, Optional, Tuple, List
-
-import attr
+from typing import List, Mapping, Optional, Tuple
import aiosmtplib
+import attr
import passlib.hash
from aiohttp import web
from aiohttp_jinja2 import render_string
from passlib import pwd
+
from servicelib.rest_models import LogMessageType
from ..resources import resources
diff --git a/services/web/server/src/simcore_service_webserver/projects/__init__.py b/services/web/server/src/simcore_service_webserver/projects/__init__.py
index 66cd4423026..f1ed58e39e3 100644
--- a/services/web/server/src/simcore_service_webserver/projects/__init__.py
+++ b/services/web/server/src/simcore_service_webserver/projects/__init__.py
@@ -4,11 +4,11 @@
It contains metadata about the study (e.g. name, description, owner, etc) and a workbench section that describes the study pipeline
"""
import asyncio
+import json
import logging
from pprint import pformat
import jsonschema
-import json
from aiohttp import ClientSession, web
from tenacity import before_sleep_log, retry, stop_after_attempt, wait_fixed
diff --git a/services/web/server/src/simcore_service_webserver/projects/nodes_handlers.py b/services/web/server/src/simcore_service_webserver/projects/nodes_handlers.py
index 32409184359..2ee923e0ba6 100644
--- a/services/web/server/src/simcore_service_webserver/projects/nodes_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/projects/nodes_handlers.py
@@ -6,10 +6,11 @@
:raises NotImplementedError
"""
-from aiohttp import web
import logging
-from ..login.decorators import login_required
+from aiohttp import web
+
+from ..login.decorators import login_required
log = logging.getLogger(__name__)
diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_access.py b/services/web/server/src/simcore_service_webserver/projects/projects_access.py
index 0a71e902d8a..c09ec4a0000 100644
--- a/services/web/server/src/simcore_service_webserver/projects/projects_access.py
+++ b/services/web/server/src/simcore_service_webserver/projects/projects_access.py
@@ -1,7 +1,7 @@
import jsondiff
from aiohttp import web
-from ..security_api import get_access_model, UserRole
+from ..security_api import UserRole, get_access_model
async def can_update_node_inputs(context):
diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py b/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py
index 14e01df6d0c..c7d3adfe2ce 100644
--- a/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py
+++ b/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py
@@ -13,24 +13,24 @@
from servicelib.observer import emit
from servicelib.utils import logged_gather
-
-from .config import APP_GARBAGE_COLLECTOR_KEY, get_garbage_collector_interval
-from .registry import RedisResourceRegistry, get_registry
-from simcore_service_webserver.projects.projects_api import delete_project_from_db
-from simcore_service_webserver.users_api import is_user_guest, delete_user
-from simcore_service_webserver.projects.projects_exceptions import ProjectNotFoundError
-from simcore_service_webserver.projects.projects_api import (
- get_workbench_node_ids_from_project_uuid,
- is_node_id_present_in_any_project_workbench,
-)
from simcore_service_webserver.director.director_api import (
get_running_interactive_services,
stop_service,
)
from simcore_service_webserver.director.director_exceptions import (
- ServiceNotFoundError,
DirectorException,
+ ServiceNotFoundError,
+)
+from simcore_service_webserver.projects.projects_api import (
+ delete_project_from_db,
+ get_workbench_node_ids_from_project_uuid,
+ is_node_id_present_in_any_project_workbench,
)
+from simcore_service_webserver.projects.projects_exceptions import ProjectNotFoundError
+from simcore_service_webserver.users_api import delete_user, is_user_guest
+
+from .config import APP_GARBAGE_COLLECTOR_KEY, get_garbage_collector_interval
+from .registry import RedisResourceRegistry, get_registry
logger = logging.getLogger(__name__)
diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/redis.py b/services/web/server/src/simcore_service_webserver/resource_manager/redis.py
index fef9636e658..005b0347984 100644
--- a/services/web/server/src/simcore_service_webserver/resource_manager/redis.py
+++ b/services/web/server/src/simcore_service_webserver/resource_manager/redis.py
@@ -2,8 +2,9 @@
import aioredis
from aiohttp import web
+from tenacity import Retrying, before_log, stop_after_attempt, wait_random
+
from servicelib.application_keys import APP_CONFIG_KEY
-from tenacity import Retrying, stop_after_attempt, wait_random, before_log
from .config import APP_CLIENT_REDIS_CLIENT_KEY, CONFIG_SECTION_NAME
diff --git a/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/aiohttp_client_extension.py b/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/aiohttp_client_extension.py
index 87f6fe328ff..a817f210215 100644
--- a/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/aiohttp_client_extension.py
+++ b/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/aiohttp_client_extension.py
@@ -13,13 +13,13 @@
import traceback
import warnings
from types import SimpleNamespace, TracebackType
+from typing import List # noqa
from typing import (
Any,
Coroutine,
Generator,
Generic,
Iterable,
- List, # noqa
Mapping,
Optional,
Set,
@@ -77,12 +77,8 @@
strip_auth_from_url,
)
from aiohttp.http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter
-from aiohttp.http_websocket import (
- WSHandshakeError,
- WSMessage, # noqa
- ws_ext_gen,
- ws_ext_parse,
-)
+from aiohttp.http_websocket import WSMessage # noqa
+from aiohttp.http_websocket import WSHandshakeError, ws_ext_gen, ws_ext_parse
from aiohttp.streams import FlowControlDataQueue
from aiohttp.tracing import Trace, TraceConfig
from aiohttp.typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL
diff --git a/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/jupyter.py b/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/jupyter.py
index 59fd867a7e0..9e9bcdc5bf2 100644
--- a/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/jupyter.py
+++ b/services/web/server/src/simcore_service_webserver/reverse_proxy/handlers/jupyter.py
@@ -8,9 +8,9 @@
import aiohttp
from aiohttp import web
-from .aiohttp_client_extension import client_request
from yarl import URL
+from .aiohttp_client_extension import client_request
APP_SOCKETS_KEY = "simcore_service_webserver.reverse_proxy.settings.sockets"
# FIXME: Image tag should determine the handler instead of the opposite!!!
diff --git a/services/web/server/src/simcore_service_webserver/security_roles.py b/services/web/server/src/simcore_service_webserver/security_roles.py
index 368fafde3ab..903056cd6b9 100644
--- a/services/web/server/src/simcore_service_webserver/security_roles.py
+++ b/services/web/server/src/simcore_service_webserver/security_roles.py
@@ -6,7 +6,6 @@
from simcore_postgres_database.models.users import UserRole
-
# A role defines a set of operations that the user *can* perform
# - Every operation is named as a resource and an action
# - Resource is named hierarchically
diff --git a/services/web/server/src/simcore_service_webserver/socketio/config.py b/services/web/server/src/simcore_service_webserver/socketio/config.py
index cc4e3d5526b..0b88d8db69b 100644
--- a/services/web/server/src/simcore_service_webserver/socketio/config.py
+++ b/services/web/server/src/simcore_service_webserver/socketio/config.py
@@ -7,9 +7,9 @@
import trafaret as T
from aiohttp import web
+from socketio import AsyncServer
from servicelib.application_keys import APP_CONFIG_KEY
-from socketio import AsyncServer
CONFIG_SECTION_NAME = "socketio"
APP_CLIENT_SOCKET_SERVER_KEY = __name__ + ".socketio_socketio"
diff --git a/services/web/server/src/simcore_service_webserver/socketio/handlers.py b/services/web/server/src/simcore_service_webserver/socketio/handlers.py
index daa271f8773..51681a1b992 100644
--- a/services/web/server/src/simcore_service_webserver/socketio/handlers.py
+++ b/services/web/server/src/simcore_service_webserver/socketio/handlers.py
@@ -8,20 +8,20 @@
import asyncio
import logging
-from typing import Dict, List, Optional, Any
+from typing import Any, Dict, List, Optional
from aiohttp import web
+from socketio.exceptions import ConnectionRefusedError as SocketIOConnectionError
from servicelib.observer import observe
from servicelib.utils import fire_and_forget_task, logged_gather
-from socketio.exceptions import ConnectionRefusedError as SocketIOConnectionError
from ..login.decorators import RQT_USERID_KEY, login_required
-from ..resource_manager.websocket_manager import managed_resource
from ..resource_manager.config import get_service_deletion_timeout
+from ..resource_manager.websocket_manager import managed_resource
from .config import get_socket_server
-from .handlers_utils import register_socketio_handler
from .events import post_messages
+from .handlers_utils import register_socketio_handler
ANONYMOUS_USER_ID = -1
_SOCKET_IO_AIOHTTP_REQUEST_KEY = "aiohttp.request"
diff --git a/services/web/server/src/simcore_service_webserver/socketio/handlers_utils.py b/services/web/server/src/simcore_service_webserver/socketio/handlers_utils.py
index eb56ef26129..4f10ef951e6 100644
--- a/services/web/server/src/simcore_service_webserver/socketio/handlers_utils.py
+++ b/services/web/server/src/simcore_service_webserver/socketio/handlers_utils.py
@@ -6,7 +6,6 @@
from .config import APP_CLIENT_SOCKET_DECORATED_HANDLERS_KEY, get_socket_server
-
socketio_handlers_registry = []
diff --git a/services/web/server/src/simcore_service_webserver/storage.py b/services/web/server/src/simcore_service_webserver/storage.py
index 734283adb61..b7a71934a1c 100644
--- a/services/web/server/src/simcore_service_webserver/storage.py
+++ b/services/web/server/src/simcore_service_webserver/storage.py
@@ -5,13 +5,13 @@
import logging
from aiohttp import web
+
from servicelib.application_keys import APP_OPENAPI_SPECS_KEY
+from servicelib.application_setup import ModuleCategory, app_module_setup
from . import storage_routes
from .storage_config import get_config
-from servicelib.application_setup import app_module_setup, ModuleCategory
-
log = logging.getLogger(__name__)
diff --git a/services/web/server/src/simcore_service_webserver/storage_api.py b/services/web/server/src/simcore_service_webserver/storage_api.py
index cac23f492cb..d846dd524ab 100644
--- a/services/web/server/src/simcore_service_webserver/storage_api.py
+++ b/services/web/server/src/simcore_service_webserver/storage_api.py
@@ -5,9 +5,10 @@
from pprint import pformat
from aiohttp import web
-from servicelib.rest_responses import unwrap_envelope
from yarl import URL
+from servicelib.rest_responses import unwrap_envelope
+
from .storage_config import get_client_session, get_config
log = logging.getLogger(__name__)
diff --git a/services/web/server/src/simcore_service_webserver/storage_config.py b/services/web/server/src/simcore_service_webserver/storage_config.py
index 905b0cbd113..35c48099ce3 100644
--- a/services/web/server/src/simcore_service_webserver/storage_config.py
+++ b/services/web/server/src/simcore_service_webserver/storage_config.py
@@ -7,6 +7,7 @@
import trafaret as T
from aiohttp import ClientSession, web
+
from servicelib.application_keys import APP_CLIENT_SESSION_KEY, APP_CONFIG_KEY
CONFIG_SECTION_NAME = "storage"
diff --git a/services/web/server/src/simcore_service_webserver/storage_handlers.py b/services/web/server/src/simcore_service_webserver/storage_handlers.py
index 700829442db..f1ca3d12051 100644
--- a/services/web/server/src/simcore_service_webserver/storage_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/storage_handlers.py
@@ -4,9 +4,10 @@
"""
from aiohttp import web
+from yarl import URL
+
from servicelib.request_keys import RQT_USERID_KEY
from servicelib.rest_utils import extract_and_validate
-from yarl import URL
from .login.decorators import login_required
from .security_api import check_permission
diff --git a/services/web/server/src/simcore_service_webserver/storage_routes.py b/services/web/server/src/simcore_service_webserver/storage_routes.py
index a1f31ae18d8..663ccd5dbe8 100644
--- a/services/web/server/src/simcore_service_webserver/storage_routes.py
+++ b/services/web/server/src/simcore_service_webserver/storage_routes.py
@@ -7,6 +7,7 @@
from typing import List
from aiohttp import web
+
from servicelib import openapi
from . import storage_handlers
diff --git a/services/web/server/src/simcore_service_webserver/tag_handlers.py b/services/web/server/src/simcore_service_webserver/tag_handlers.py
index 14f9a835bd0..991f77202e8 100644
--- a/services/web/server/src/simcore_service_webserver/tag_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/tag_handlers.py
@@ -1,8 +1,9 @@
import sqlalchemy as sa
from aiohttp import web
-from servicelib.application_keys import APP_DB_ENGINE_KEY
from sqlalchemy import and_
+from servicelib.application_keys import APP_DB_ENGINE_KEY
+
from .db_models import tags
from .login.decorators import RQT_USERID_KEY, login_required
from .security_api import check_permission
diff --git a/services/web/server/src/simcore_service_webserver/tracing/__init__.py b/services/web/server/src/simcore_service_webserver/tracing/__init__.py
index ffb1a0621dc..717f626777c 100644
--- a/services/web/server/src/simcore_service_webserver/tracing/__init__.py
+++ b/services/web/server/src/simcore_service_webserver/tracing/__init__.py
@@ -4,9 +4,7 @@
from servicelib.application_keys import APP_CONFIG_KEY
from servicelib.application_setup import ModuleCategory, app_module_setup
-from servicelib.tracing import setup_tracing
-from servicelib.tracing import schema
-
+from servicelib.tracing import schema, setup_tracing
CONFIG_SECTION_NAME = "tracing"
diff --git a/services/web/server/src/simcore_service_webserver/users_api.py b/services/web/server/src/simcore_service_webserver/users_api.py
index 451a9e2aae6..ea35c123b37 100644
--- a/services/web/server/src/simcore_service_webserver/users_api.py
+++ b/services/web/server/src/simcore_service_webserver/users_api.py
@@ -94,6 +94,7 @@ async def update_user_profile(
assert resp.rowcount == 1 # nosec
+
async def is_user_guest(app: web.Application, user_id: int) -> bool:
"""Returns True if the user exists and is a GUEST"""
db = get_storage(app)
diff --git a/services/web/server/tests/unit/conftest.py b/services/web/server/tests/unit/conftest.py
index 0adccde9cb9..70d54fa9d22 100644
--- a/services/web/server/tests/unit/conftest.py
+++ b/services/web/server/tests/unit/conftest.py
@@ -11,10 +11,10 @@
import json
import logging
import sys
+from asyncio import Future
from pathlib import Path
from typing import Dict
from uuid import uuid4
-from asyncio import Future
import pytest
diff --git a/services/web/server/tests/unit/test_catalog_setup.py b/services/web/server/tests/unit/test_catalog_setup.py
index 6b1aa9f3046..3e219c93c0d 100644
--- a/services/web/server/tests/unit/test_catalog_setup.py
+++ b/services/web/server/tests/unit/test_catalog_setup.py
@@ -7,7 +7,6 @@
import pytest
from yarl import URL
-
from servicelib.application import create_safe_application
from servicelib.client_session import APP_CLIENT_SESSION_KEY
from simcore_service_webserver.__version__ import api_version_prefix
diff --git a/services/web/server/tests/unit/test_healthcheck.py b/services/web/server/tests/unit/test_healthcheck.py
index d4bc839fb77..4a13aa8c51e 100644
--- a/services/web/server/tests/unit/test_healthcheck.py
+++ b/services/web/server/tests/unit/test_healthcheck.py
@@ -10,6 +10,7 @@
import pytest
from aiohttp import web
from tenacity import before_log, retry, stop_after_attempt, wait_fixed
+from yarl import URL
from pytest_simcore.helpers.utils_assert import assert_status
from servicelib.application import create_safe_application
@@ -25,7 +26,6 @@
)
from simcore_service_webserver.rest import setup_rest
from simcore_service_webserver.security import setup_security
-from yarl import URL
logger = logging.getLogger(__name__)
diff --git a/services/web/server/tests/unit/test_package.py b/services/web/server/tests/unit/test_package.py
index 53366274c55..d8c244b01c7 100644
--- a/services/web/server/tests/unit/test_package.py
+++ b/services/web/server/tests/unit/test_package.py
@@ -2,13 +2,13 @@
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
-import pytest
-from pytest_simcore.helpers.utils_pylint import assert_pylint_is_passing
import os
import re
-
from pathlib import Path
+import pytest
+
+from pytest_simcore.helpers.utils_pylint import assert_pylint_is_passing
from simcore_service_webserver.cli import main
diff --git a/services/web/server/tests/unit/test_template_projects.py b/services/web/server/tests/unit/test_template_projects.py
index 0ca947e87cb..e5edf23c420 100644
--- a/services/web/server/tests/unit/test_template_projects.py
+++ b/services/web/server/tests/unit/test_template_projects.py
@@ -9,6 +9,8 @@
import aiohttp
import pytest
from jsonschema import SchemaError, ValidationError
+from yarl import URL
+
from servicelib.jsonschema_specs import create_jsonschema_specs
from servicelib.jsonschema_validation import validate_instance
from simcore_service_webserver.projects.projects_fakes import Fake
@@ -17,7 +19,6 @@
variable_pattern,
)
from simcore_service_webserver.resources import resources
-from yarl import URL
@pytest.fixture
diff --git a/tests/swarm-deploy/requirements/requirements.txt b/tests/swarm-deploy/requirements/requirements.txt
index 7f356bd52b0..e34055bce2b 100644
--- a/tests/swarm-deploy/requirements/requirements.txt
+++ b/tests/swarm-deploy/requirements/requirements.txt
@@ -4,40 +4,40 @@
#
# pip-compile --output-file=requirements/requirements.txt requirements/requirements.in
#
-aio-pika==6.6.0 # via -r requirements/requirements.in
+aio-pika==6.6.1 # via -r requirements/requirements.in
aiohttp==3.6.2 # via pytest-aiohttp
aiormq==3.2.2 # via aio-pika
async-timeout==3.0.1 # via aiohttp
attrs==19.3.0 # via aiohttp, pytest
-certifi==2020.4.5.1 # via requests
+certifi==2020.6.20 # via requests
chardet==3.0.4 # via aiohttp, requests
coverage==5.1 # via -r requirements/requirements.in, pytest-cov
docker==4.2.1 # via -r requirements/requirements.in
idna-ssl==1.1.0 # via aiohttp
idna==2.9 # via requests, yarl
-importlib-metadata==1.6.0 # via pluggy, pytest
-more-itertools==8.3.0 # via pytest
+importlib-metadata==1.6.1 # via pluggy, pytest
+more-itertools==8.4.0 # via pytest
multidict==4.7.6 # via aiohttp, yarl
packaging==20.4 # via pytest, pytest-sugar
pamqp==2.3.0 # via aiormq
pluggy==0.13.1 # via pytest
-py==1.8.1 # via pytest
+py==1.8.2 # via pytest
pyparsing==2.4.7 # via packaging
pytest-aiohttp==0.3.0 # via -r requirements/requirements.in
-pytest-cov==2.9.0 # via -r requirements/requirements.in
-pytest-instafail==0.4.1.post0 # via -r requirements/requirements.in
-pytest-mock==3.1.0 # via -r requirements/requirements.in
+pytest-cov==2.10.0 # via -r requirements/requirements.in
+pytest-instafail==0.4.2 # via -r requirements/requirements.in
+pytest-mock==3.1.1 # via -r requirements/requirements.in
pytest-runner==5.2 # via -r requirements/requirements.in
pytest-sugar==0.9.3 # via -r requirements/requirements.in
pytest==5.4.3 # via -r requirements/requirements.in, pytest-aiohttp, pytest-cov, pytest-instafail, pytest-mock, pytest-sugar
pyyaml==5.3.1 # via -r requirements/requirements.in
-requests==2.23.0 # via docker
+requests==2.24.0 # via docker
six==1.15.0 # via docker, packaging, tenacity, websocket-client
tenacity==6.2.0 # via -r requirements/requirements.in
termcolor==1.1.0 # via pytest-sugar
typing-extensions==3.7.4.2 # via aiohttp
urllib3==1.25.9 # via requests
-wcwidth==0.1.9 # via pytest
+wcwidth==0.2.4 # via pytest
websocket-client==0.57.0 # via docker
yarl==1.4.2 # via aio-pika, aiohttp, aiormq
zipp==3.1.0 # via importlib-metadata
From fb9870f260d9cfe7abed3db92b4d5014a127fe40 Mon Sep 17 00:00:00 2001
From: Andrei Neagu
Date: Tue, 23 Jun 2020 12:23:13 +0200
Subject: [PATCH 10/43] Adds support for GPU scheduling of computational
services (#1553)
* added sidecar_gpu debug profile to launch.json
* added a new API to the director
it is now possible to get service extras
containing additional information which hidden from the user
* sidecar now supports GPU services
- it can now schedule a container requesting GPU resources (VRAM)
- sidecard has 2 start modes CPU and GPU
- for development a GPU sidecar was added
* updating comp_task now adds resoruce requirements
- uses the director API to determine if node requires GPU
- the requires_gpu field is used by the sidecar
* added call to get_service_extras
* fixes some pylint warnings
* this should redirect errors to strout
* disabaled warning
* added some logging to detect why CI fails
* added more debugging for CI
* refactored the way cgroups is parsed
* fixed pylint and message
* removed sidecar_gpu container used for development
* sidecar_gpu removed from the wrong list
* node_extras can be None an it is now accounted for
* correeclty removes the sidecar_gpu now
* sidecar_gpu is not present in all deployments
removing it from both lists as it is only required in development
* added tests for the gpu and non gpu modes
fixed an issue where the process would crash
* fixed pylint
* refactored tests
- names are more readable
- removed some typos
* this is a dictionary
* added comment to rememeber how to run in dev
* updated service description and openapi spec
* moved generate_service_extras
* moved env var resolution to an appropriate place
* added annotations and fixed typos
* added annotations and less broad exception
* cleanedup implementation
* added more specific exception
* corrected wrong import path
* the sidecar_gpu is now properly started locally
tests where reverted
* fixed pylint caught error
* changing import schema
refactored tests to reflect changes
* removed unused import
* moved sidecar_gpu to docker-compose.devel.yml
* keeping track of these exludes
* replaced with cleaner implementation
* cleanedup and fixed boot modes for sidecar
if will now properly start as expected on all deployments
* fixing import and moved comment
* fixed an issue causing issues starting in dev mode
* api server ninja'd 3006 port moving to 3007
Co-authored-by: Andrei Neagu
---
.vscode-template/launch.json | 15 +-
api/specs/common/schemas/services.yaml | 27 ++-
api/specs/director/openapi.yaml | 39 ++++
.../api/v0/openapi.yaml | 177 ++++++++++++++++++
.../src/simcore_service_director/producer.py | 31 +++
.../simcore_service_director/rest/handlers.py | 22 +++
services/director/tests/test_docker_utils.py | 2 +
services/docker-compose.devel.yml | 57 +++++-
services/sidecar/requirements/_test.in | 1 +
services/sidecar/requirements/_test.txt | 26 +--
.../src/simcore_service_sidecar/celery.py | 37 +---
.../celery_configurator.py | 136 ++++++++++++++
.../src/simcore_service_sidecar/config.py | 4 +
.../src/simcore_service_sidecar/core.py | 19 ++
.../src/simcore_service_sidecar/exceptions.py | 17 +-
.../src/simcore_service_sidecar/utils.py | 78 ++++++++
.../tests/unit/test_celery_configurator.py | 151 +++++++++++++++
.../computation_api.py | 48 ++++-
.../director/director_api.py | 23 +++
19 files changed, 853 insertions(+), 57 deletions(-)
create mode 100644 services/sidecar/src/simcore_service_sidecar/celery_configurator.py
create mode 100644 services/sidecar/tests/unit/test_celery_configurator.py
diff --git a/.vscode-template/launch.json b/.vscode-template/launch.json
index 28d4310274e..d8fc96ae2ec 100644
--- a/.vscode-template/launch.json
+++ b/.vscode-template/launch.json
@@ -43,6 +43,19 @@
}
]
},
+ {
+ "name": "Python: Remote Attach sidecar_gpu",
+ "type": "python",
+ "request": "attach",
+ "port": 3007,
+ "host": "127.0.0.1",
+ "pathMappings": [
+ {
+ "localRoot": "${workspaceFolder}",
+ "remoteRoot": "/devel"
+ }
+ ]
+ },
{
"name": "Python: Remote Attach storage",
"type": "python",
@@ -86,4 +99,4 @@
"port": 9229
}
]
-}
\ No newline at end of file
+}
diff --git a/api/specs/common/schemas/services.yaml b/api/specs/common/schemas/services.yaml
index 29ce84ff5e0..12a54157f71 100644
--- a/api/specs/common/schemas/services.yaml
+++ b/api/specs/common/schemas/services.yaml
@@ -8,8 +8,31 @@ components:
data:
type: array
items:
- $ref: './node-meta-v0.0.1-converted.yaml'
+ $ref: "./node-meta-v0.0.1-converted.yaml"
+ error:
+ nullable: true
+ default: null
+
+ ServiceExtras:
+ type: object
+ required:
+ - node_requirements
+ properties:
+ node_requirements:
+ type: array
+ items:
+ type: string
+ enum:
+ - CPU
+ - GPU
+
+ ServiceExtrasEnveloped:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ $ref: "#/components/schemas/ServiceExtras"
error:
nullable: true
default: null
-
\ No newline at end of file
diff --git a/api/specs/director/openapi.yaml b/api/specs/director/openapi.yaml
index a41a9991cf8..74dba637194 100644
--- a/api/specs/director/openapi.yaml
+++ b/api/specs/director/openapi.yaml
@@ -129,6 +129,42 @@ paths:
schema:
$ref: '#/components/schemas/ErrorEnveloped'
+ /service_extras/{service_key}/{service_version}:
+ get:
+ tags:
+ - users
+ summary: Returns the service's details which should be hidden from the user defined as extras.
+ description: Currently returns the node_requirements an array of resoruces needed for scheduling.
+ operationId: services_extras_get
+ parameters:
+ - $ref: '#/components/parameters/ServiceKeyPath'
+ - $ref: '#/components/parameters/ServiceVersionPath'
+ responses:
+ "200":
+ description: Success, returns an object containing details hidden from the user
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ServiceExtrasEnveloped'
+ "401":
+ description: Unauthorized access
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorEnveloped'
+ "404":
+ description: Service not found
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorEnveloped'
+ default:
+ description: Unexpected error
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorEnveloped'
+
/running_interactive_services:
get:
tags:
@@ -403,5 +439,8 @@ components:
ServicesEnveloped:
$ref: '../common/schemas/services.yaml#/components/schemas/ServicesEnveloped'
+ ServiceExtrasEnveloped:
+ $ref: '../common/schemas/services.yaml#/components/schemas/ServiceExtrasEnveloped'
+
HealthCheckEnveloped:
$ref: '../common/schemas/health_check.yaml#/components/schemas/HealthCheckEnveloped'
diff --git a/services/director/src/simcore_service_director/api/v0/openapi.yaml b/services/director/src/simcore_service_director/api/v0/openapi.yaml
index 55e20af3fc9..883b49df9da 100644
--- a/services/director/src/simcore_service_director/api/v0/openapi.yaml
+++ b/services/director/src/simcore_service_director/api/v0/openapi.yaml
@@ -900,6 +900,163 @@ paths:
description: Error code
type: integer
example: 404
+ '/service_extras/{service_key}/{service_version}':
+ get:
+ tags:
+ - users
+ summary: Returns the service's details which should be hidden from the user defined as extras.
+ description: Currently returns the node_requirements an array of resoruces needed for scheduling.
+ operationId: services_extras_get
+ parameters:
+ - in: path
+ name: service_key
+ description: The key (url) of the service
+ required: true
+ schema:
+ type: string
+ description: distinctive name for the node based on the docker registry path
+ pattern: '^(simcore)/(services)/(comp|dynamic)(/[^\s/]+)+$'
+ example:
+ - simcore/services/comp/itis/sleeper
+ - simcore/services/dynamic/3dviewer
+ - in: path
+ name: service_version
+ description: The tag/version of the service
+ required: true
+ schema:
+ type: string
+ description: semantic version number
+ pattern: '^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$'
+ example:
+ - 1.0.0
+ - 0.0.1
+ responses:
+ '200':
+ description: 'Success, returns an object containing details hidden from the user'
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ type: object
+ required:
+ - node_requirements
+ properties:
+ node_requirements:
+ type: array
+ items:
+ type: string
+ enum:
+ - CPU
+ - GPU
+ error:
+ nullable: true
+ default: null
+ '401':
+ description: Unauthorized access
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - error
+ properties:
+ data:
+ nullable: true
+ default: null
+ error:
+ type: object
+ required:
+ - status
+ - message
+ properties:
+ message:
+ description: Error message
+ type: string
+ example: Unexpected error
+ errors:
+ type: array
+ items:
+ properties:
+ code:
+ type: string
+ description: Server Exception
+ example: ServiceUUIDNotFoundError
+ status:
+ description: Error code
+ type: integer
+ example: 404
+ '404':
+ description: Service not found
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - error
+ properties:
+ data:
+ nullable: true
+ default: null
+ error:
+ type: object
+ required:
+ - status
+ - message
+ properties:
+ message:
+ description: Error message
+ type: string
+ example: Unexpected error
+ errors:
+ type: array
+ items:
+ properties:
+ code:
+ type: string
+ description: Server Exception
+ example: ServiceUUIDNotFoundError
+ status:
+ description: Error code
+ type: integer
+ example: 404
+ default:
+ description: Unexpected error
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - error
+ properties:
+ data:
+ nullable: true
+ default: null
+ error:
+ type: object
+ required:
+ - status
+ - message
+ properties:
+ message:
+ description: Error message
+ type: string
+ example: Unexpected error
+ errors:
+ type: array
+ items:
+ properties:
+ code:
+ type: string
+ description: Server Exception
+ example: ServiceUUIDNotFoundError
+ status:
+ description: Error code
+ type: integer
+ example: 404
/running_interactive_services:
get:
tags:
@@ -2230,6 +2387,26 @@ components:
error:
nullable: true
default: null
+ ServiceExtrasEnveloped:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ type: object
+ required:
+ - node_requirements
+ properties:
+ node_requirements:
+ type: array
+ items:
+ type: string
+ enum:
+ - CPU
+ - GPU
+ error:
+ nullable: true
+ default: null
HealthCheckEnveloped:
type: object
required:
diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py
index 70247f8b743..13ac078f229 100644
--- a/services/director/src/simcore_service_director/producer.py
+++ b/services/director/src/simcore_service_director/producer.py
@@ -946,3 +946,34 @@ async def stop_service(app: web.Application, node_uuid: str) -> None:
"DYNAMIC",
"SUCCESS",
)
+
+
+async def generate_service_extras(
+ app: web.Application, image_key: str, image_tag: str
+) -> Dict:
+ result = {}
+ labels = await registry_proxy.get_image_labels(app, image_key, image_tag)
+ log.debug("Compiling service extras from labels %s", labels)
+
+ # check physical node requirements
+ # all nodes require "CPU"
+ result["node_requirements"] = ["CPU"]
+ # check if the service requires GPU support
+
+ def validate_vram(entry_to_validate):
+ for element in (
+ entry_to_validate.get("value", {})
+ .get("Reservations", {})
+ .get("GenericResources", [])
+ ):
+ if element.get("DiscreteResourceSpec", {}).get("Kind") == "VRAM":
+ return True
+ return False
+
+ if SERVICE_RUNTIME_SETTINGS in labels:
+ service_settings = json.loads(labels[SERVICE_RUNTIME_SETTINGS])
+ for entry in service_settings:
+ if entry.get("name") == "Resources" and validate_vram(entry):
+ result["node_requirements"].append("GPU")
+
+ return result
diff --git a/services/director/src/simcore_service_director/rest/handlers.py b/services/director/src/simcore_service_director/rest/handlers.py
index b830d3a91a2..25c5105104e 100644
--- a/services/director/src/simcore_service_director/rest/handlers.py
+++ b/services/director/src/simcore_service_director/rest/handlers.py
@@ -83,6 +83,28 @@ async def services_by_key_version_get(
raise web_exceptions.HTTPInternalServerError(reason=str(err))
+async def services_extras_get(
+ request: web.Request, service_key: str, service_version: str
+) -> web.Response:
+ log.debug(
+ "Client does services_extras_get request %s with service_key %s, service_version %s",
+ request,
+ service_key,
+ service_version,
+ )
+ try:
+ service_extras = await producer.generate_service_extras(
+ request.app, service_key, service_version
+ )
+ return web.json_response(data=dict(data=service_extras))
+ except exceptions.ServiceNotAvailableError as err:
+ raise web_exceptions.HTTPNotFound(reason=str(err))
+ except exceptions.RegistryConnectionError as err:
+ raise web_exceptions.HTTPUnauthorized(reason=str(err))
+ except Exception as err:
+ raise web_exceptions.HTTPInternalServerError(reason=str(err))
+
+
async def running_interactive_services_list_get(
request: web.Request, user_id: str, project_id: str
) -> web.Response:
diff --git a/services/director/tests/test_docker_utils.py b/services/director/tests/test_docker_utils.py
index 48a122c4d52..2e50d9affb9 100644
--- a/services/director/tests/test_docker_utils.py
+++ b/services/director/tests/test_docker_utils.py
@@ -39,6 +39,8 @@ async def test_docker_client(loop):
],
)
async def test_swarm_method_with_no_swarm(loop, fct):
+ # if this fails on your development machine run
+ # `docker swarm leave --force` to leave the swarm
with pytest.raises(DockerError):
await fct()
diff --git a/services/docker-compose.devel.yml b/services/docker-compose.devel.yml
index 47daede4cfe..7d5492cdb78 100644
--- a/services/docker-compose.devel.yml
+++ b/services/docker-compose.devel.yml
@@ -48,8 +48,9 @@ services:
- ./storage/client-sdk:/devel/services/storage/client-sdk
- ../packages:/devel/packages
environment:
+ # force to start as cpu mode otherwise it will boot in gpu mode in development
+ - START_AS_MODE_CPU=1
- SC_BOOT_MODE=debug-ptvsd
- - SIDECAR_LOGLEVEL=DEBUG
ports:
- "3002:3000"
deploy:
@@ -57,6 +58,60 @@ services:
endpoint_mode: vip
replicas: 1
+ # adding a separate worker to handling GPU mode for development
+ # in production the sidecar autodetects its hardware and start either in CPU or GPU mode
+ sidecar_gpu:
+ image: ${DOCKER_REGISTRY:-itisfoundation}/sidecar:${DOCKER_IMAGE_TAG:-latest}
+ init: true
+ deploy:
+ mode: replicated
+ replicas: 1
+ # NOTE: Allows 3007 to be exposed for ptvsd
+ endpoint_mode: vip
+ resources:
+ reservations:
+ cpus: "0.1"
+ memory: "100M"
+ volumes:
+ - input:/home/scu/input
+ - output:/home/scu/output
+ - log:/home/scu/log
+ - /var/run/docker.sock:/var/run/docker.sock
+ - ./sidecar:/devel/services/sidecar
+ - ./storage/client-sdk:/devel/services/storage/client-sdk
+ - ../packages:/devel/packages
+ ports:
+ - "3007:3000"
+ environment:
+ - SC_BOOT_MODE=debug-ptvsd
+ - SIDECAR_LOGLEVEL=DEBUG
+ - START_AS_MODE_GPU=1
+ - RABBIT_HOST=${RABBIT_HOST}
+ - RABBIT_PORT=${RABBIT_PORT}
+ - RABBIT_USER=${RABBIT_USER}
+ - RABBIT_PASSWORD=${RABBIT_PASSWORD}
+ - RABBIT_CHANNELS=${RABBIT_CHANNELS}
+ - POSTGRES_ENDPOINT=${POSTGRES_ENDPOINT}
+ - POSTGRES_USER=${POSTGRES_USER}
+ - POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
+ - POSTGRES_DB=${POSTGRES_DB}
+ - POSTGRES_HOST=${POSTGRES_HOST}
+ - POSTGRES_PORT=${POSTGRES_PORT}
+ - S3_ENDPOINT=${S3_ENDPOINT}
+ - S3_ACCESS_KEY=${S3_ACCESS_KEY}
+ - S3_SECRET_KEY=${S3_SECRET_KEY}
+ - S3_BUCKET_NAME=${S3_BUCKET_NAME}
+ - STORAGE_ENDPOINT=${STORAGE_ENDPOINT}
+ - REGISTRY_URL=${REGISTRY_URL}
+ - REGISTRY_USER=${REGISTRY_USER}
+ - REGISTRY_PW=${REGISTRY_PW}
+ - SWARM_STACK_NAME=${SWARM_STACK_NAME:-simcore}
+ depends_on:
+ - rabbit
+ - postgres
+ networks:
+ - computational_services_subnet
+
storage:
volumes:
- ./storage:/devel/services/storage
diff --git a/services/sidecar/requirements/_test.in b/services/sidecar/requirements/_test.in
index 8c54392101f..be5db65a909 100644
--- a/services/sidecar/requirements/_test.in
+++ b/services/sidecar/requirements/_test.in
@@ -13,6 +13,7 @@ pytest-cov
pytest-instafail
pytest-mock
pytest-sugar
+pytest-lazy-fixture
# fixtures
aiopg
diff --git a/services/sidecar/requirements/_test.txt b/services/sidecar/requirements/_test.txt
index 28090520cb0..a9f449c9fe4 100644
--- a/services/sidecar/requirements/_test.txt
+++ b/services/sidecar/requirements/_test.txt
@@ -11,19 +11,19 @@ aiohttp==3.6.2 # via -r requirements/_base.txt, aiodocker, pytest-aio
aiopg==1.0.0 # via -r requirements/_base.txt, -r requirements/_test.in
aiormq==3.2.1 # via -r requirements/_base.txt, aio-pika
amqp==2.5.2 # via -r requirements/_base.txt, kombu
-astroid==2.3.3 # via pylint
+astroid==2.4.2 # via pylint
async-timeout==3.0.1 # via -r requirements/_base.txt, aiohttp
attrs==19.3.0 # via -r requirements/_base.txt, aiohttp, pytest
billiard==3.6.3.0 # via -r requirements/_base.txt, celery
celery==4.4.2 # via -r requirements/_base.txt
-certifi==2019.11.28 # via requests
+certifi==2020.4.5.2 # via requests
chardet==3.0.4 # via -r requirements/_base.txt, aiohttp, requests
click==7.1.1 # via -r requirements/_base.txt
coverage==4.5.1 # via -r requirements/_test.in, coveralls, pytest-cov
-coveralls==1.11.1 # via -r requirements/_test.in
+coveralls==2.0.0 # via -r requirements/_test.in
dataclasses==0.7 # via -r requirements/_base.txt, pydantic
decorator==4.4.2 # via -r requirements/_base.txt, networkx
-docker==4.2.0 # via -r requirements/_test.in
+docker==4.2.1 # via -r requirements/_test.in
docopt==0.6.2 # via coveralls
idna-ssl==1.1.0 # via -r requirements/_base.txt, aiohttp
idna==2.9 # via -r requirements/_base.txt, idna-ssl, requests, yarl
@@ -32,7 +32,7 @@ isort==4.3.21 # via pylint
kombu==4.6.8 # via -r requirements/_base.txt, celery
lazy-object-proxy==1.4.3 # via astroid
mccabe==0.6.1 # via pylint
-more-itertools==8.2.0 # via pytest
+more-itertools==8.3.0 # via pytest
multidict==4.7.5 # via -r requirements/_base.txt, aiohttp, yarl
networkx==2.4 # via -r requirements/_base.txt
packaging==20.3 # via -r requirements/_base.txt, pytest, pytest-sugar
@@ -42,26 +42,28 @@ psycopg2-binary==2.8.4 # via -r requirements/_base.txt, aiopg
ptvsd==4.3.2 # via -r requirements/_test.in
py==1.8.1 # via pytest
pydantic==1.4 # via -r requirements/_base.txt
-pylint==2.4.4 # via -r requirements/_test.in
+pylint==2.5.3 # via -r requirements/_test.in
pyparsing==2.4.6 # via -r requirements/_base.txt, packaging
pytest-aiohttp==0.3.0 # via -r requirements/_test.in
-pytest-cov==2.8.1 # via -r requirements/_test.in
+pytest-cov==2.9.0 # via -r requirements/_test.in
pytest-instafail==0.4.1.post0 # via -r requirements/_test.in
-pytest-mock==2.0.0 # via -r requirements/_test.in
-pytest-sugar==0.9.2 # via -r requirements/_test.in
-pytest==5.4.3 # via -r requirements/_test.in, pytest-aiohttp, pytest-cov, pytest-instafail, pytest-mock, pytest-sugar
+pytest-lazy-fixture==0.6.3 # via -r requirements/_test.in
+pytest-mock==3.1.1 # via -r requirements/_test.in
+pytest-sugar==0.9.3 # via -r requirements/_test.in
+pytest==5.4.3 # via -r requirements/_test.in, pytest-aiohttp, pytest-cov, pytest-instafail, pytest-lazy-fixture, pytest-mock, pytest-sugar
pytz==2019.3 # via -r requirements/_base.txt, celery
requests==2.23.0 # via coveralls, docker
six==1.14.0 # via -r requirements/_base.txt, astroid, docker, packaging, tenacity, websocket-client
sqlalchemy==1.3.15 # via -r requirements/_base.txt
tenacity==6.1.0 # via -r requirements/_base.txt
termcolor==1.1.0 # via pytest-sugar
+toml==0.10.1 # via pylint
typed-ast==1.4.1 # via astroid
typing-extensions==3.7.4.1 # via -r requirements/_base.txt, aiohttp
urllib3==1.25.8 # via -r requirements/_base.txt, requests
vine==1.3.0 # via -r requirements/_base.txt, amqp, celery
-wcwidth==0.1.9 # via pytest
+wcwidth==0.2.4 # via pytest
websocket-client==0.57.0 # via docker
-wrapt==1.11.2 # via astroid
+wrapt==1.12.1 # via astroid
yarl==1.4.2 # via -r requirements/_base.txt, aio-pika, aiohttp, aiormq
zipp==3.1.0 # via -r requirements/_base.txt, importlib-metadata
diff --git a/services/sidecar/src/simcore_service_sidecar/celery.py b/services/sidecar/src/simcore_service_sidecar/celery.py
index 2a7a641a98f..715f31c8357 100644
--- a/services/sidecar/src/simcore_service_sidecar/celery.py
+++ b/services/sidecar/src/simcore_service_sidecar/celery.py
@@ -1,41 +1,8 @@
-from celery import Celery, states
-
-from simcore_sdk.config.rabbit import Config as RabbitConfig
-
-from .celery_log_setup import get_task_logger
-from .cli import run_sidecar
from .remote_debug import setup_remote_debugging
-from .utils import wrap_async_call
-
-log = get_task_logger(__name__)
-log.info("Inititalizing celery app ...")
-
-rabbit_config = RabbitConfig()
+from .celery_configurator import get_rabbitmq_config_and_celery_app
setup_remote_debugging()
-# TODO: make it a singleton?
-app = Celery(
- rabbit_config.name, broker=rabbit_config.broker_url, backend=rabbit_config.backend
-)
-
-
-@app.task(name="comp.task", bind=True)
-def pipeline(self, user_id: str, project_id: str, node_id: str = None):
- try:
- next_task_nodes = wrap_async_call(
- run_sidecar(self.request.id, user_id, project_id, node_id)
- )
- self.update_state(state=states.SUCCESS)
-
- if next_task_nodes:
- for _node_id in next_task_nodes:
- _task = app.send_task(
- "comp.task", args=(user_id, project_id, _node_id), kwargs={}
- )
- except Exception: # pylint: disable=broad-except
- self.update_state(state=states.FAILURE)
- log.exception("Uncaught exception")
-
+rabbit_config, app = get_rabbitmq_config_and_celery_app()
__all__ = ["rabbit_config", "app"]
diff --git a/services/sidecar/src/simcore_service_sidecar/celery_configurator.py b/services/sidecar/src/simcore_service_sidecar/celery_configurator.py
new file mode 100644
index 00000000000..6c66bb76bda
--- /dev/null
+++ b/services/sidecar/src/simcore_service_sidecar/celery_configurator.py
@@ -0,0 +1,136 @@
+"""
+It is not possible to tell celery to refuse a task once it is sent.
+The solution is to use 2 separate queues, and have the CPU mode
+nodes accept all "comp.task".
+
+To decide where a task should be routed to, the current worker will
+use a look ahead function to check the type of upcoming task and
+schedule it accordingly.
+"""
+
+from typing import Tuple
+from celery import Celery, states
+from simcore_sdk.config.rabbit import Config as RabbitConfig
+from . import config
+from .cli import run_sidecar
+from .utils import wrap_async_call, is_gpu_node
+from .celery_log_setup import get_task_logger
+from .utils import assemble_celery_app
+from .core import does_task_require_gpu
+
+log = get_task_logger(__name__)
+
+
+# used by internal queues in this module
+_rabbit_config = RabbitConfig()
+_celery_app_cpu = assemble_celery_app("celery", _rabbit_config)
+_celery_app_gpu = assemble_celery_app("celery_gpu_mode", _rabbit_config)
+
+
+def dispatch_comp_task(user_id: str, project_id: str, node_id: str) -> None:
+ """Uses the director's API to determineate where the service needs
+ to be dispacted and sends it to the appropriate queue"""
+ # TODO: use _node_id to check if this service needs a GPU or NOT, ask director
+ # then schedule to the correct queue
+ # Add logging at #TODO: #1 and here to make sure the task with the same uuid is scheduled on the correct worker!
+ if node_id is None:
+ log.error("No node_id provided for project_id %s, skipping", project_id)
+ return
+
+ # query comp_tasks for the thing you need and see if it is false
+ try:
+ task_needs_gpu = wrap_async_call(does_task_require_gpu(node_id))
+ except Exception: # pylint: disable=broad-except
+ import traceback
+
+ log.error(
+ "%s\nThe above exception ocurred because it could not be "
+ "determined if task requires GPU for node_id %s",
+ traceback.format_exc(),
+ node_id,
+ )
+ return
+
+ if task_needs_gpu:
+ _dispatch_to_gpu_queue(user_id, project_id, node_id)
+ else:
+ _dispatch_to_cpu_queue(user_id, project_id, node_id)
+
+
+def _dispatch_to_cpu_queue(user_id: str, project_id: str, node_id: str) -> None:
+ _celery_app_cpu.send_task(
+ "comp.task.cpu", args=(user_id, project_id, node_id), kwargs={}
+ )
+
+
+def _dispatch_to_gpu_queue(user_id: str, project_id: str, node_id: str) -> None:
+ _celery_app_gpu.send_task(
+ "comp.task.gpu", args=(user_id, project_id, node_id), kwargs={}
+ )
+
+
+def cpu_gpu_shared_task(
+ celery_request, user_id: str, project_id: str, node_id: str = None
+) -> None:
+ """This is the original task which is run by either a GPU or CPU node"""
+ try:
+ log.info(
+ "Will dispatch to appropriate queue %s, %s, %s",
+ user_id,
+ project_id,
+ node_id,
+ )
+ next_task_nodes = wrap_async_call(
+ run_sidecar(celery_request.request.id, user_id, project_id, node_id)
+ )
+ celery_request.update_state(state=states.SUCCESS)
+
+ if next_task_nodes:
+ for _node_id in next_task_nodes:
+ dispatch_comp_task(user_id, project_id, _node_id)
+ except Exception: # pylint: disable=broad-except
+ celery_request.update_state(state=states.FAILURE)
+ log.exception("Uncaught exception")
+
+
+def configure_cpu_mode() -> Tuple[RabbitConfig, Celery]:
+ """Will configure and return a celery app targetting CPU mode nodes."""
+ log.info("Initializing celery app in CPU MODE ...")
+ app = _celery_app_cpu
+
+ # pylint: disable=unused-variable,unused-argument
+ @app.task(name="comp.task", bind=True, ignore_result=True)
+ def entrypoint(self, user_id: str, project_id: str, node_id: str = None) -> None:
+ cpu_gpu_shared_task(self, user_id, project_id, node_id)
+
+ @app.task(name="comp.task.cpu", bind=True)
+ def pipeline(self, user_id: str, project_id: str, node_id: str = None) -> None:
+ cpu_gpu_shared_task(self, user_id, project_id, node_id)
+
+ return (_rabbit_config, app)
+
+
+def configure_gpu_mode() -> Tuple[RabbitConfig, Celery]:
+ """Will configure and return a celery app targetting GPU mode nodes."""
+ log.info("Initializing celery app in GPU MODE ...")
+ app = _celery_app_gpu
+
+ # pylint: disable=unused-variable
+ @app.task(name="comp.task.gpu", bind=True)
+ def pipeline(self, user_id: str, project_id: str, node_id: str = None) -> None:
+ cpu_gpu_shared_task(self, user_id, project_id, node_id)
+
+ return (_rabbit_config, app)
+
+
+def get_rabbitmq_config_and_celery_app() -> Tuple[RabbitConfig, Celery]:
+ """Returns a CPU or GPU configured celery app"""
+ node_has_gpu_support = is_gpu_node()
+
+ if config.FORCE_START_CPU_MODE:
+ return configure_cpu_mode()
+
+ if config.FORCE_START_GPU_MODE or node_has_gpu_support:
+ return configure_gpu_mode()
+
+ return configure_cpu_mode()
diff --git a/services/sidecar/src/simcore_service_sidecar/config.py b/services/sidecar/src/simcore_service_sidecar/config.py
index e178fc31feb..46a8f7bca87 100644
--- a/services/sidecar/src/simcore_service_sidecar/config.py
+++ b/services/sidecar/src/simcore_service_sidecar/config.py
@@ -42,3 +42,7 @@
logging.getLogger("sqlalchemy.pool").setLevel(SIDECAR_LOGLEVEL)
RABBIT_CONFIG = RabbitConfig()
+
+# sidecar celery starting mode overwrite
+FORCE_START_CPU_MODE = os.environ.get("START_AS_MODE_CPU")
+FORCE_START_GPU_MODE = os.environ.get("START_AS_MODE_GPU")
diff --git a/services/sidecar/src/simcore_service_sidecar/core.py b/services/sidecar/src/simcore_service_sidecar/core.py
index 8b05e04bc81..c53c9459383 100644
--- a/services/sidecar/src/simcore_service_sidecar/core.py
+++ b/services/sidecar/src/simcore_service_sidecar/core.py
@@ -21,12 +21,31 @@
from .executor import Executor
from .rabbitmq import RabbitMQ
from .utils import execution_graph, find_entry_point, is_node_ready
+from .db import DBContextManager
log = get_task_logger(__name__)
log.setLevel(config.SIDECAR_LOGLEVEL)
node_port_log.setLevel(config.SIDECAR_LOGLEVEL)
+async def does_task_require_gpu(node_id: str) -> bool:
+ """Checks if the comp_task's image field if it requires to use the GPU"""
+ async with DBContextManager() as db_engine:
+ async with db_engine.acquire() as db_connection:
+ result = await db_connection.execute(
+ query=comp_tasks.select().where(comp_tasks.c.node_id == node_id)
+ )
+ task = await result.fetchone()
+
+ if not task:
+ log.warning("Task for node_id %s was not found", node_id)
+ raise exceptions.TaskNotFound("Could not find a relative task")
+
+ # Image has to following format
+ # {"name": "simcore/services/comp/itis/sleeper", "tag": "1.0.0", "requires_gpu": false}
+ return task["image"]["requires_gpu"]
+
+
async def _try_get_task_from_db(
db_connection: aiopg.sa.SAConnection,
graph: nx.DiGraph,
diff --git a/services/sidecar/src/simcore_service_sidecar/exceptions.py b/services/sidecar/src/simcore_service_sidecar/exceptions.py
index 48bd3d40bf3..1c7dc3e77c3 100644
--- a/services/sidecar/src/simcore_service_sidecar/exceptions.py
+++ b/services/sidecar/src/simcore_service_sidecar/exceptions.py
@@ -6,7 +6,7 @@ class SidecarException(Exception):
def __init__(self, msg: Optional[str] = None):
if msg is None:
- msg = "Unexpected error occured in director subpackage"
+ msg = "Unexpected error occurred in director subpackage"
super(SidecarException, self).__init__(msg)
@@ -15,3 +15,18 @@ class DatabaseError(SidecarException):
def __init__(self, msg: str):
super(DatabaseError, self).__init__(msg)
+
+
+class TaskNotFound(SidecarException):
+ """Task was not found """
+
+ def __init__(self, msg: str):
+ super().__init__(msg)
+
+
+class MoreThenOneItemDetected(Exception):
+ """Raised during the docker's container_id validation"""
+ def __init__(self, msg: Optional[str] = None):
+ if msg is None:
+ msg = "Unexpected error occurred in director subpackage"
+ super().__init__(msg)
diff --git a/services/sidecar/src/simcore_service_sidecar/utils.py b/services/sidecar/src/simcore_service_sidecar/utils.py
index e60ce11c2c8..f6bb313f73f 100644
--- a/services/sidecar/src/simcore_service_sidecar/utils.py
+++ b/services/sidecar/src/simcore_service_sidecar/utils.py
@@ -1,11 +1,18 @@
import asyncio
import logging
+import aiodocker
+import re
from typing import List
import aiopg
import networkx as nx
from simcore_postgres_database.sidecar_models import SUCCESS, comp_pipeline, comp_tasks
from sqlalchemy import and_
+from simcore_sdk.config.rabbit import Config as RabbitConfig
+from celery import Celery
+from .exceptions import MoreThenOneItemDetected
+
+logger = logging.getLogger(__name__)
def wrap_async_call(fct: asyncio.coroutine):
@@ -63,3 +70,74 @@ def execution_graph(pipeline: comp_pipeline) -> nx.DiGraph:
continue
G.add_edges_from([(node, n) for n in nodes])
return G
+
+
+def is_gpu_node() -> bool:
+ """Returns True if this node has support to GPU,
+ meaning that the `VRAM` label was added to it."""
+
+ def get_container_id_from_cgroup(cat_cgroup_content) -> str:
+ """Parses the result of cat cat /proc/self/cgroup and returns a container_id or
+ raises an error in case only one unique id was not found."""
+ possible_candidates = {x for x in cat_cgroup_content.split() if len(x) >= 64}
+ result_set = {x.split("/")[-1] for x in possible_candidates}
+ if len(result_set) != 1:
+ # pylint: disable=raising-format-tuple
+ raise MoreThenOneItemDetected(
+ "There should only be one entry in this set of possible container_ids"
+ ", have a look at %s" % possible_candidates
+ )
+ return_value = result_set.pop()
+ # check if length is 64 and all char match this regex [A-Fa-f0-9]
+ if len(return_value) != 64 and re.findall("[A-Fa-f0-9]{64}", return_value):
+ # pylint: disable=raising-format-tuple
+ raise ValueError(
+ "Found container ID is not a valid sha256 string %s", return_value
+ )
+ return return_value
+
+ async def async_is_gpu_node() -> bool:
+ cmd = "cat /proc/self/cgroup"
+ proc = await asyncio.create_subprocess_shell(
+ cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE,
+ )
+
+ stdout, _ = await proc.communicate()
+ container_id = get_container_id_from_cgroup(stdout.decode("utf-8").strip())
+
+ docker = aiodocker.Docker()
+
+ container = await docker.containers.get(container_id)
+ container_info = await container.show()
+ node_id = container_info["Config"]["Labels"]["com.docker.swarm.node.id"]
+ node_info = await docker.nodes.inspect(node_id=node_id)
+
+ generic_resources = (
+ node_info.get("Description", {})
+ .get("Resources", {})
+ .get("GenericResources", [])
+ )
+
+ has_gpu_support = False
+ for entry in generic_resources:
+ if entry.get("DiscreteResourceSpec", {}).get("Kind") == "VRAM":
+ has_gpu_support = True
+ break
+
+ await docker.close()
+
+ logger.info("Node GPU support: %s", has_gpu_support)
+ return has_gpu_support
+
+ return wrap_async_call(async_is_gpu_node())
+
+
+def assemble_celery_app(task_default_queue: str, rabbit_config: RabbitConfig) -> Celery:
+ """Returns an instance of Celery using a different RabbitMQ queue"""
+ app = Celery(
+ rabbit_config.name,
+ broker=rabbit_config.broker_url,
+ backend=rabbit_config.backend,
+ )
+ app.conf.task_default_queue = task_default_queue
+ return app
diff --git a/services/sidecar/tests/unit/test_celery_configurator.py b/services/sidecar/tests/unit/test_celery_configurator.py
new file mode 100644
index 00000000000..fe68735faa4
--- /dev/null
+++ b/services/sidecar/tests/unit/test_celery_configurator.py
@@ -0,0 +1,151 @@
+# pylint: disable=unused-argument,redefined-outer-name,no-member
+import pytest
+import asyncio
+
+from simcore_service_sidecar.celery_configurator import (
+ get_rabbitmq_config_and_celery_app,
+)
+from simcore_service_sidecar.utils import is_gpu_node
+from simcore_service_sidecar import config
+
+from celery import Celery
+from simcore_sdk.config.rabbit import Config as RabbitConfig
+
+
+def _toggle_gpu_mock(mocker, has_gpu: bool) -> None:
+ # mock ouput of cat /proc/self/cgroup
+ CAT_DATA = b"""
+ 12:hugetlb:/docker/2c52ab5a825dea0b074741fb1521c972866af7997a761eb312405b50ad289263
+ 11:freezer:/docker/2c52ab5a825dea0b074741fb1521c972866af7997a761eb312405b50ad289263
+ 10:blkio:/docker/2c52ab5a825dea0b074741fb1521c972866af7997a761eb312405b50ad289263
+ 9:devices:/docker/2c52ab5a825dea0b074741fb1521c972866af7997a761eb312405b50ad289263
+ 8:net_cls,net_prio:/docker/2c52ab5a825dea0b074741fb1521c972866af7997a761eb312405b50ad289263
+ 7:cpuset:/docker/2c52ab5a825dea0b074741fb1521c972866af7997a761eb312405b50ad289263
+ 6:perf_event:/docker/2c52ab5a825dea0b074741fb1521c972866af7997a761eb312405b50ad289263
+ 5:memory:/docker/2c52ab5a825dea0b074741fb1521c972866af7997a761eb312405b50ad289263
+ 4:rdma:/
+ 3:cpu,cpuacct:/docker/2c52ab5a825dea0b074741fb1521c972866af7997a761eb312405b50ad289263
+ 2:pids:/docker/2c52ab5a825dea0b074741fb1521c972866af7997a761eb312405b50ad289263
+ 1:name=systemd:/docker/2c52ab5a825dea0b074741fb1521c972866af7997a761eb312405b50ad289263
+ 0::/system.slice/containerd.service
+ """
+
+ future = asyncio.Future()
+ future.set_result((CAT_DATA, None))
+ comunicate = mocker.patch("asyncio.subprocess.Process.communicate")
+ comunicate.return_value = future
+
+ class MockContainer:
+ async def show(self):
+ data = {"Config": {"Labels": {"com.docker.swarm.node.id": "node_id"}}}
+ return data
+
+ future = asyncio.Future()
+ future.set_result(MockContainer())
+ containers_get = mocker.patch("aiodocker.containers.DockerContainers.get")
+ containers_get.return_value = future
+
+ def gpu_support_key():
+ """if GPU support is enabled this Kind key must be present"""
+ return "Kind" if has_gpu else "_"
+
+ payload = {
+ "Description": {
+ "Resources": {
+ "GenericResources": [
+ {"DiscreteResourceSpec": {gpu_support_key(): "VRAM"}}
+ ]
+ }
+ }
+ }
+
+ future = asyncio.Future()
+ future.set_result(payload)
+ containers_get = mocker.patch("aiodocker.nodes.DockerSwarmNodes.inspect")
+ containers_get.return_value = future
+
+
+@pytest.fixture()
+def mock_node_no_gpu(mocker) -> None:
+ _toggle_gpu_mock(mocker, False)
+
+
+@pytest.fixture()
+def mock_node_with_gpu(mocker) -> None:
+ _toggle_gpu_mock(mocker, True)
+
+
+@pytest.fixture(params=[True, False])
+def mock_node_has_gpu(request, mocker) -> None:
+ _toggle_gpu_mock(mocker, request.param)
+
+
+@pytest.fixture
+def force_cpu_mode(monkeypatch):
+ monkeypatch.setattr(config, "FORCE_START_CPU_MODE", "1", raising=True)
+
+
+@pytest.fixture
+def force_gpu_mode(monkeypatch):
+ monkeypatch.setattr(config, "FORCE_START_GPU_MODE", "1", raising=True)
+
+
+@pytest.mark.parametrize("gpu_support", [(pytest.lazy_fixture("mock_node_has_gpu")),])
+def test_force_start_cpu_mode(mocker, force_cpu_mode, gpu_support) -> None:
+ mocked_configure_cpu_mode = mocker.patch(
+ "simcore_service_sidecar.celery_configurator.configure_cpu_mode"
+ )
+
+ mocked_configure_cpu_mode.return_value = (None, None)
+
+ get_rabbitmq_config_and_celery_app()
+
+ mocked_configure_cpu_mode.assert_called()
+
+
+@pytest.mark.parametrize("gpu_support", [(pytest.lazy_fixture("mock_node_has_gpu")),])
+def test_force_start_gpu_mode(mocker, force_gpu_mode, gpu_support) -> None:
+ mocked_configure_gpu_mode = mocker.patch(
+ "simcore_service_sidecar.celery_configurator.configure_gpu_mode"
+ )
+ mocked_configure_gpu_mode.return_value = (None, None)
+
+ get_rabbitmq_config_and_celery_app()
+
+ mocked_configure_gpu_mode.assert_called()
+
+
+def test_auto_detects_gpu(mocker, mock_node_with_gpu) -> None:
+ mocked_configure_gpu_mode = mocker.patch(
+ "simcore_service_sidecar.celery_configurator.configure_gpu_mode"
+ )
+ mocked_configure_gpu_mode.return_value = (None, None)
+
+ get_rabbitmq_config_and_celery_app()
+
+ mocked_configure_gpu_mode.assert_called()
+
+
+@pytest.mark.parametrize(
+ "gpu_support,expected_value",
+ [
+ (pytest.lazy_fixture("mock_node_no_gpu"), False),
+ (pytest.lazy_fixture("mock_node_with_gpu"), True),
+ ],
+)
+def test_proper_has_gpu_mocking(expected_value, gpu_support) -> None:
+ assert is_gpu_node() is expected_value
+
+
+@pytest.mark.parametrize("gpu_support", [(pytest.lazy_fixture("mock_node_has_gpu")),])
+def test_force_start_cpu_ext_dep_mocking(force_cpu_mode, gpu_support) -> None:
+ rabbit_cfg, celery_app = get_rabbitmq_config_and_celery_app()
+ assert isinstance(rabbit_cfg, RabbitConfig)
+ assert isinstance(celery_app, Celery)
+
+
+@pytest.mark.parametrize("gpu_support", [(pytest.lazy_fixture("mock_node_has_gpu")),])
+def test_force_start_gpu_ext_dep_mocking(force_gpu_mode, gpu_support) -> None:
+ rabbit_cfg, celery_app = get_rabbitmq_config_and_celery_app()
+ assert isinstance(rabbit_cfg, RabbitConfig)
+ assert isinstance(celery_app, Celery)
diff --git a/services/web/server/src/simcore_service_webserver/computation_api.py b/services/web/server/src/simcore_service_webserver/computation_api.py
index 37795f4a0fb..53b1b0f0408 100644
--- a/services/web/server/src/simcore_service_webserver/computation_api.py
+++ b/services/web/server/src/simcore_service_webserver/computation_api.py
@@ -20,6 +20,9 @@
from simcore_postgres_database.models.comp_tasks import NodeClass
from simcore_postgres_database.webserver_models import comp_pipeline, comp_tasks
+# TODO: move this to computation_models
+from simcore_service_webserver.computation_models import to_node_class
+
from .director import director_api
log = logging.getLogger(__file__)
@@ -65,13 +68,37 @@ async def _get_node_details(
app, node_key, node_version
)
if not node_details:
- log.error("Error could not find service %s:%s", node_key, node_version)
+ log.error(
+ "Error (while getting node details) could not find service %s:%s",
+ node_key,
+ node_version,
+ )
raise web_exceptions.HTTPNotFound(
reason=f"details of service {node_key}:{node_version} could not be found"
)
return node_details
+async def _get_node_extras(
+ node_key: str, node_version: str, app: web.Application
+) -> Dict:
+ """Returns the service_extras if possible otherwise None"""
+ if to_node_class(node_key) == NodeClass.FRONTEND:
+ return None
+
+ node_extras = await director_api.get_services_extras(app, node_key, node_version)
+ if not node_extras:
+ log.error(
+ "Error (while getting node extras) could not find service %s:%s",
+ node_key,
+ node_version,
+ )
+ raise web_exceptions.HTTPNotFound(
+ reason=f"details of service {node_key}:{node_version} could not be found"
+ )
+ return node_extras
+
+
async def _build_adjacency_list(
node_uuid: str,
node_schema: Dict,
@@ -118,9 +145,6 @@ async def _build_adjacency_list(
async def _parse_project_data(pipeline_data: Dict, app: web.Application):
- # TODO: move this to computation_models
- from .computation_models import to_node_class
-
dag_adjacency_list = dict()
tasks = dict()
@@ -148,6 +172,8 @@ async def _parse_project_data(pipeline_data: Dict, app: web.Application):
)
node_details = await _get_node_details(node_key, node_version, app)
+ node_extras = await _get_node_extras(node_key, node_version, app)
+
log.debug(
"node %s:%s has schema:\n %s", node_key, node_version, pformat(node_details)
)
@@ -167,11 +193,23 @@ async def _parse_project_data(pipeline_data: Dict, app: web.Application):
"inputs": node_details["inputs"],
"outputs": node_details["outputs"],
}
+
+ # _get_node_extras returns None ins ome situation, the below check is required
+ requires_gpu = (
+ "GPU" in node_extras.get("node_requirements", [])
+ if node_extras is not None
+ else False
+ )
+
task = {
"schema": node_schema,
"inputs": node_inputs,
"outputs": node_outputs,
- "image": {"name": node_key, "tag": node_version},
+ "image": {
+ "name": node_key,
+ "tag": node_version,
+ "requires_gpu": requires_gpu,
+ },
"node_class": to_node_class(node_key),
}
diff --git a/services/web/server/src/simcore_service_webserver/director/director_api.py b/services/web/server/src/simcore_service_webserver/director/director_api.py
index 816af500e6a..351bcb38ca5 100644
--- a/services/web/server/src/simcore_service_webserver/director/director_api.py
+++ b/services/web/server/src/simcore_service_webserver/director/director_api.py
@@ -122,3 +122,26 @@ async def get_service_by_key_version(
if not services:
return
return services[0]
+
+
+async def get_services_extras(
+ app: web.Application, service_key: str, service_version: str
+) -> Optional[Dict]:
+ session, api_endpoint = _get_director_client(app)
+
+ url = (
+ api_endpoint
+ / "service_extras"
+ / urllib.parse.quote(service_key, safe="")
+ / service_version
+ )
+ async with session.get(url) as resp:
+ if resp.status != 200:
+ log.warning("Status not 200 %s", resp)
+ return
+ payload = await resp.json()
+ service_extras = payload["data"]
+ if not service_extras:
+ log.warning("Service extras is missing %s", resp)
+ return
+ return service_extras
From a329aeade349a279003c6a73b83e62fcc6bef71c Mon Sep 17 00:00:00 2001
From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com>
Date: Wed, 24 Jun 2020 12:52:01 +0200
Subject: [PATCH 11/43] Maintenance/cleanup api server (#1578)
During the design of this first version, there were many variants of the design that remained in the code. This PR cleans up all the deprecated code before extending it with new features. Some relevant changes:
- Pruned and upgraded requirements
- Removed trials sandbox folder
- Removed unused models, db repos and routes
- Left only basic authentication
- Added client wrapper for webserver sessions (see osparc-simcore/services/api-server/src/simcore_service_api_server/services/webserver.py)
- Reverted logger from loguru to default python logger
- documented dev: services/api-server/README.md
---
README.md | 3 +-
services/api-server/Makefile | 6 +-
services/api-server/README.md | 22 +-
services/api-server/requirements/_base.in | 23 +-
services/api-server/requirements/_base.txt | 22 +-
services/api-server/requirements/_test.in | 1 +
services/api-server/requirements/_test.txt | 31 ++-
.../api-server/sandbox/_test_client_sdk.py | 209 ------------------
services/api-server/sandbox/_test_schemas.py | 21 --
services/api-server/sandbox/api-key-auth.py | 38 ----
services/api-server/sandbox/get_app_state.py | 24 --
.../api-server/sandbox/model_conversions.py | 91 --------
.../api-server/sandbox/pydantic-settings.py | 51 -----
services/api-server/sandbox/simple_app.py | 45 ----
.../api/dependencies/auth_api_key.py | 46 ----
.../api/dependencies/auth_basic.py | 48 ----
.../api/dependencies/auth_oath2.py | 100 ---------
.../api/dependencies/authentication.py | 49 +++-
.../api/dependencies/database.py | 2 +-
.../api/dependencies/webserver.py | 26 ++-
.../simcore_service_api_server/api/root.py | 9 +-
.../api/routes/authentication/__init__.py | 0
.../api/routes/authentication/api_key.py | 15 --
.../api/routes/authentication/oauth2.py | 77 -------
.../api/routes/studies.py | 50 -----
.../api/routes/users.py | 56 ++---
.../core/application.py | 8 +-
.../simcore_service_api_server/core/events.py | 18 +-
.../core/openapi.py | 6 +-
.../simcore_service_api_server/db/events.py | 10 +-
.../db/repositories/__init__.py | 1 +
.../db/repositories/{base.py => _base.py} | 0
.../db/repositories/api_keys.py | 12 +-
.../db/repositories/users.py | 89 +-------
.../models/domain/users.py | 32 ---
.../models/schemas/api_keys.py | 12 -
.../models/schemas/tokens.py | 25 ---
.../models/schemas/users.py | 12 -
.../services/jwt.py | 66 ------
.../services/remote_debug.py | 9 +-
.../services/security.py | 25 ---
.../services/serialization.py | 4 -
.../services/webserver.py | 92 +++++++-
services/api-server/tests/unit/_helpers.py | 2 +-
services/api-server/tests/unit/conftest.py | 5 +-
.../api-server/tests/unit/test_api_meta.py | 1 -
services/api-server/tests/unit/test_jwt.py | 35 ---
.../api-server/tests/unit/test_security.py | 14 --
.../api-server/tests/unit/test_settings.py | 4 +-
49 files changed, 285 insertions(+), 1262 deletions(-)
delete mode 100644 services/api-server/sandbox/_test_client_sdk.py
delete mode 100644 services/api-server/sandbox/_test_schemas.py
delete mode 100644 services/api-server/sandbox/api-key-auth.py
delete mode 100644 services/api-server/sandbox/get_app_state.py
delete mode 100644 services/api-server/sandbox/model_conversions.py
delete mode 100644 services/api-server/sandbox/pydantic-settings.py
delete mode 100644 services/api-server/sandbox/simple_app.py
delete mode 100644 services/api-server/src/simcore_service_api_server/api/dependencies/auth_api_key.py
delete mode 100644 services/api-server/src/simcore_service_api_server/api/dependencies/auth_basic.py
delete mode 100644 services/api-server/src/simcore_service_api_server/api/dependencies/auth_oath2.py
delete mode 100644 services/api-server/src/simcore_service_api_server/api/routes/authentication/__init__.py
delete mode 100644 services/api-server/src/simcore_service_api_server/api/routes/authentication/api_key.py
delete mode 100644 services/api-server/src/simcore_service_api_server/api/routes/authentication/oauth2.py
delete mode 100644 services/api-server/src/simcore_service_api_server/api/routes/studies.py
rename services/api-server/src/simcore_service_api_server/db/repositories/{base.py => _base.py} (100%)
delete mode 100644 services/api-server/src/simcore_service_api_server/models/domain/users.py
delete mode 100644 services/api-server/src/simcore_service_api_server/models/schemas/api_keys.py
delete mode 100644 services/api-server/src/simcore_service_api_server/models/schemas/tokens.py
delete mode 100644 services/api-server/src/simcore_service_api_server/models/schemas/users.py
delete mode 100644 services/api-server/src/simcore_service_api_server/services/jwt.py
delete mode 100644 services/api-server/src/simcore_service_api_server/services/security.py
delete mode 100644 services/api-server/tests/unit/test_jwt.py
delete mode 100644 services/api-server/tests/unit/test_security.py
diff --git a/README.md b/README.md
index 580f6c4e3d5..b8186e962c9 100644
--- a/README.md
+++ b/README.md
@@ -9,7 +9,7 @@
[![Code style: black]](https://github.com/psf/black)
[![Requires.io]](https://requires.io/github/ITISFoundation/osparc-simcore/requirements/?branch=master "State of third party python dependencies")
[![travis-ci]](https://travis-ci.org/ITISFoundation/osparc-simcore "State of CI: build, test and pushing images")
-
+[![Github-CI Push/PR]](https://github.com/ITISFoundation/osparc-simcore/actions?query=workflow%3A%22Github-CI+Push%2FPR%22+branch%3Amaster)
[![coveralls.io]](https://coveralls.io/github/ITISFoundation/osparc-simcore?branch=master)
[![codecov.io]](https://codecov.io/gh/ITISFoundation/osparc-simcore)
[![github.io]](https://itisfoundation.github.io/)
@@ -26,6 +26,7 @@
[coveralls.io]:https://coveralls.io/repos/github/ITISFoundation/osparc-simcore/badge.svg?branch=master
[codecov.io]:https://codecov.io/gh/ITISFoundation/osparc-simcore/branch/master/graph/badge.svg
[license]:https://img.shields.io/github/license/ITISFoundation/osparc-simcore
+[Github-CI Push/PR]:https://github.com/ITISFoundation/osparc-simcore/workflows/Github-CI%20Push/PR/badge.svg
diff --git a/services/api-server/Makefile b/services/api-server/Makefile
index f14a4727c80..d85c03a936f 100644
--- a/services/api-server/Makefile
+++ b/services/api-server/Makefile
@@ -46,19 +46,21 @@ docker-compose.yml:
run-devel: .env docker-compose.yml down ## runs app on host with pg fixture for development [for development]
# Starting db (under $<)
docker-compose up --detach
+ # Creating db-tables: user=key, password=secret
+ @$(MAKE) db-tables
# start app (under $<)
uvicorn simcore_service_api_server.__main__:the_app \
--reload --reload-dir $(SRC_DIR) \
--port=8000 --host=0.0.0.0
.PHONY: db-tables
-db-tables: .env-devel ## upgrades and create tables [for development]
+db-tables: .env ## upgrades and create tables [for development]
# Upgrading and creating tables
export $(shell grep -v '^#' $< | xargs -d '\n'); \
python3 tests/utils/init-pg.py
.PHONY: db-migration
-db-migration: .env-devel ## runs discover and upgrade on running pg-db [for development]
+db-migration: .env ## runs discover and upgrade on running pg-db [for development]
# Creating tables
export $(shell grep -v '^#' $< | xargs -d '\n'); \
sc-pg discover && sc-pg upgrade
diff --git a/services/api-server/README.md b/services/api-server/README.md
index 4386afbe3a3..ce70592724c 100644
--- a/services/api-server/README.md
+++ b/services/api-server/README.md
@@ -1,7 +1,6 @@
# api-server
[![image-size]](https://microbadger.com/images/itisfoundation/api-server. "More on itisfoundation/api-server.:staging-latest image")
-
[![image-badge]](https://microbadger.com/images/itisfoundation/api-server "More on Public API Server image in registry")
[![image-version]](https://microbadger.com/images/itisfoundation/api-server "More on Public API Server image in registry")
[![image-commit]](https://microbadger.com/images/itisfoundation/api-server "More on Public API Server image in registry")
@@ -16,6 +15,27 @@ Platform's public API server
+## Development
+
+Setup environment
+```cmd
+make devenv
+source .venv/bin/activate
+cd services/api-service
+make install-dev
+```
+Then
+```cmd
+make run-devel
+```
+
+will start the api-server in development-mode together with a postgres db initialized with test data. Open the following sites and use the test credentials ``user=key, password=secret`` to manually test the API:
+
+- http://127.0.0.1:8000/docs: redoc documentation
+- http://127.0.0.1:8000/dev/docs: swagger type of documentation
+
+
+
## References
diff --git a/services/api-server/requirements/_base.in b/services/api-server/requirements/_base.in
index 89ed918052c..c7d7f1aae2b 100644
--- a/services/api-server/requirements/_base.in
+++ b/services/api-server/requirements/_base.in
@@ -5,18 +5,21 @@
-r ../../../packages/postgres-database/requirements/_base.in
+# fastapi and extensions
fastapi[all]
-aiopg[sa]
-tenacity
-passlib[bcrypt]
-loguru
+async-exit-stack # not needed when python>=3.7
+async-generator # not needed when python>=3.7
+
+# data models
pydantic[dotenv]
-cryptography
-httpx
-# TODO: check alternative https://github.com/latchset/jwcrypto/
-pyjwt>=1.7.1 # Vulnerable SEE https://auth0.com/blog/critical-vulnerabilities-in-json-web-token-libraries/?_ga=2.21160507.1609921856.1592236287-1918774871.1591379535
+# database
+aiopg[sa]
+# web client
+httpx
-async-exit-stack # not needed when python>=3.7
-async-generator # not needed when python>=3.7
+#
+attrs
+tenacity
+cryptography
diff --git a/services/api-server/requirements/_base.txt b/services/api-server/requirements/_base.txt
index 8bed8f166a8..9b443040e7a 100644
--- a/services/api-server/requirements/_base.txt
+++ b/services/api-server/requirements/_base.txt
@@ -4,30 +4,29 @@
#
# pip-compile --output-file=requirements/_base.txt requirements/_base.in
#
-aiocontextvars==0.2.2 # via loguru
aiofiles==0.5.0 # via fastapi
aiopg[sa]==1.0.0 # via -r requirements/_base.in
aniso8601==7.0.0 # via graphene
async-exit-stack==1.0.1 # via -r requirements/_base.in, fastapi
async-generator==1.10 # via -r requirements/_base.in, fastapi
-bcrypt==3.1.7 # via passlib
-certifi==2020.4.5.2 # via httpx, requests
-cffi==1.14.0 # via bcrypt, cryptography
+attrs==19.3.0 # via -r requirements/_base.in
+certifi==2020.6.20 # via httpx, requests
+cffi==1.14.0 # via cryptography
chardet==3.0.4 # via httpx, requests
click==7.1.2 # via uvicorn
-contextvars==2.4 # via aiocontextvars, sniffio
+contextvars==2.4 # via sniffio
cryptography==2.9.2 # via -r requirements/_base.in
dataclasses==0.7 # via pydantic
dnspython==1.16.0 # via email-validator
email-validator==1.1.1 # via fastapi
-fastapi[all]==0.57.0 # via -r requirements/_base.in
+fastapi[all]==0.58.0 # via -r requirements/_base.in
graphene==2.1.8 # via fastapi
graphql-core==2.3.2 # via graphene, graphql-relay
graphql-relay==2.0.1 # via graphene
h11==0.9.0 # via httpcore, uvicorn
h2==3.2.0 # via httpcore
hpack==3.0.0 # via h2
-hstspreload==2020.6.9 # via httpx
+hstspreload==2020.6.23 # via httpx
httpcore==0.9.1 # via httpx
httptools==0.1.1 # via uvicorn
httpx==0.13.3 # via -r requirements/_base.in
@@ -36,23 +35,20 @@ idna==2.9 # via email-validator, httpx, requests, yarl
immutables==0.14 # via contextvars
itsdangerous==1.1.0 # via fastapi
jinja2==2.11.2 # via fastapi
-loguru==0.5.1 # via -r requirements/_base.in
markupsafe==1.1.1 # via jinja2
multidict==4.7.6 # via yarl
-orjson==3.1.0 # via fastapi
-passlib[bcrypt]==1.7.2 # via -r requirements/_base.in
+orjson==3.1.2 # via fastapi
promise==2.3 # via graphql-core, graphql-relay
psycopg2-binary==2.8.5 # via aiopg, sqlalchemy
pycparser==2.20 # via cffi
pydantic[dotenv]==1.5.1 # via -r requirements/_base.in, fastapi
-pyjwt==1.7.1 # via -r requirements/_base.in
python-dotenv==0.13.0 # via pydantic
python-multipart==0.0.5 # via fastapi
pyyaml==5.3.1 # via fastapi
-requests==2.23.0 # via fastapi
+requests==2.24.0 # via fastapi
rfc3986==1.4.0 # via httpx
rx==1.6.1 # via graphql-core
-six==1.15.0 # via bcrypt, cryptography, graphene, graphql-core, graphql-relay, python-multipart, tenacity
+six==1.15.0 # via cryptography, graphene, graphql-core, graphql-relay, python-multipart, tenacity
sniffio==1.1.0 # via httpcore, httpx
sqlalchemy[postgresql_psycopg2binary]==1.3.17 # via -r requirements/../../../packages/postgres-database/requirements/_base.in, aiopg
starlette==0.13.4 # via fastapi
diff --git a/services/api-server/requirements/_test.in b/services/api-server/requirements/_test.in
index 9e44fc7dc40..18fbc1d038a 100644
--- a/services/api-server/requirements/_test.in
+++ b/services/api-server/requirements/_test.in
@@ -19,6 +19,7 @@ asgi_lifespan
# fixtures
faker
+passlib[bcrypt]
# db migration
alembic
diff --git a/services/api-server/requirements/_test.txt b/services/api-server/requirements/_test.txt
index cacef8fad5c..e66f5e54f44 100644
--- a/services/api-server/requirements/_test.txt
+++ b/services/api-server/requirements/_test.txt
@@ -4,7 +4,6 @@
#
# pip-compile --output-file=requirements/_test.txt requirements/_test.in
#
-aiocontextvars==0.2.2 # via -r requirements/_base.txt, loguru
aiofiles==0.5.0 # via -r requirements/_base.txt, fastapi
aiohttp==3.6.2 # via pytest-aiohttp
aiopg[sa]==1.0.0 # via -r requirements/_base.txt
@@ -15,16 +14,16 @@ astroid==2.4.2 # via pylint
async-exit-stack==1.0.1 # via -r requirements/_base.txt, asgi-lifespan, fastapi
async-generator==1.10 # via -r requirements/_base.txt, fastapi
async-timeout==3.0.1 # via aiohttp
-attrs==19.3.0 # via aiohttp, pytest, pytest-docker
-bcrypt==3.1.7 # via -r requirements/_base.txt, passlib
-certifi==2020.4.5.2 # via -r requirements/_base.txt, httpx, requests
+attrs==19.3.0 # via -r requirements/_base.txt, aiohttp, pytest, pytest-docker
+bcrypt==3.1.7 # via passlib
+certifi==2020.6.20 # via -r requirements/_base.txt, httpx, requests
cffi==1.14.0 # via -r requirements/_base.txt, bcrypt, cryptography
change-case==0.5.2 # via -r requirements/_test.in
chardet==3.0.4 # via -r requirements/_base.txt, aiohttp, httpx, requests
click==7.1.2 # via -r requirements/_base.txt, uvicorn
-codecov==2.1.6 # via -r requirements/_test.in
-contextvars==2.4 # via -r requirements/_base.txt, aiocontextvars, sniffio
-coverage==4.5.4 # via codecov, coveralls, pytest-cov
+codecov==2.1.7 # via -r requirements/_test.in
+contextvars==2.4 # via -r requirements/_base.txt, sniffio
+coverage==5.1 # via codecov, coveralls, pytest-cov
coveralls==2.0.0 # via -r requirements/_test.in
cryptography==2.9.2 # via -r requirements/_base.txt
dataclasses==0.7 # via -r requirements/_base.txt, pydantic
@@ -32,15 +31,15 @@ dnspython==1.16.0 # via -r requirements/_base.txt, email-validator
docker==4.2.1 # via -r requirements/_test.in
docopt==0.6.2 # via coveralls
email-validator==1.1.1 # via -r requirements/_base.txt, fastapi
-faker==4.1.0 # via -r requirements/_test.in
-fastapi[all]==0.57.0 # via -r requirements/_base.txt
+faker==4.1.1 # via -r requirements/_test.in
+fastapi[all]==0.58.0 # via -r requirements/_base.txt
graphene==2.1.8 # via -r requirements/_base.txt, fastapi
graphql-core==2.3.2 # via -r requirements/_base.txt, graphene, graphql-relay
graphql-relay==2.0.1 # via -r requirements/_base.txt, graphene
h11==0.9.0 # via -r requirements/_base.txt, httpcore, uvicorn
h2==3.2.0 # via -r requirements/_base.txt, httpcore
hpack==3.0.0 # via -r requirements/_base.txt, h2
-hstspreload==2020.6.9 # via -r requirements/_base.txt, httpx
+hstspreload==2020.6.23 # via -r requirements/_base.txt, httpx
httpcore==0.9.1 # via -r requirements/_base.txt, httpx
httptools==0.1.1 # via -r requirements/_base.txt, uvicorn
httpx==0.13.3 # via -r requirements/_base.txt
@@ -53,23 +52,21 @@ isort==4.3.21 # via pylint
itsdangerous==1.1.0 # via -r requirements/_base.txt, fastapi
jinja2==2.11.2 # via -r requirements/_base.txt, -r requirements/_test.in, fastapi
lazy-object-proxy==1.4.3 # via astroid
-loguru==0.5.1 # via -r requirements/_base.txt
mako==1.1.3 # via alembic
markupsafe==1.1.1 # via -r requirements/_base.txt, jinja2, mako
mccabe==0.6.1 # via pylint
more-itertools==8.4.0 # via pytest
multidict==4.7.6 # via -r requirements/_base.txt, aiohttp, yarl
-orjson==3.1.0 # via -r requirements/_base.txt, fastapi
+orjson==3.1.2 # via -r requirements/_base.txt, fastapi
packaging==20.4 # via pytest
-passlib[bcrypt]==1.7.2 # via -r requirements/_base.txt
+passlib[bcrypt]==1.7.2 # via -r requirements/_test.in
pluggy==0.13.1 # via pytest
promise==2.3 # via -r requirements/_base.txt, graphql-core, graphql-relay
psycopg2-binary==2.8.5 # via -r requirements/_base.txt, aiopg, sqlalchemy
ptvsd==4.3.2 # via -r requirements/_test.in
-py==1.8.1 # via pytest
+py==1.8.2 # via pytest
pycparser==2.20 # via -r requirements/_base.txt, cffi
pydantic[dotenv]==1.5.1 # via -r requirements/_base.txt, fastapi
-pyjwt==1.7.1 # via -r requirements/_base.txt
pylint==2.5.3 # via -r requirements/_test.in
pyparsing==2.4.7 # via packaging
pytest-aiohttp==0.3.0 # via -r requirements/_test.in
@@ -83,7 +80,7 @@ python-dotenv==0.13.0 # via -r requirements/_base.txt, pydantic
python-editor==1.0.4 # via alembic
python-multipart==0.0.5 # via -r requirements/_base.txt, fastapi
pyyaml==5.3.1 # via -r requirements/_base.txt, fastapi
-requests==2.23.0 # via -r requirements/_base.txt, codecov, coveralls, docker, fastapi
+requests==2.24.0 # via -r requirements/_base.txt, codecov, coveralls, docker, fastapi
rfc3986==1.4.0 # via -r requirements/_base.txt, httpx
rx==1.6.1 # via -r requirements/_base.txt, graphql-core
six==1.15.0 # via -r requirements/_base.txt, astroid, bcrypt, cryptography, docker, graphene, graphql-core, graphql-relay, packaging, promise, python-dateutil, python-multipart, tenacity, websocket-client
@@ -99,7 +96,7 @@ ujson==3.0.0 # via -r requirements/_base.txt, fastapi
urllib3==1.25.9 # via -r requirements/_base.txt, requests
uvicorn==0.11.5 # via -r requirements/_base.txt, fastapi
uvloop==0.14.0 # via -r requirements/_base.txt, uvicorn
-wcwidth==0.2.4 # via pytest
+wcwidth==0.2.5 # via pytest
websocket-client==0.57.0 # via docker
websockets==8.1 # via -r requirements/_base.txt, uvicorn
wrapt==1.12.1 # via astroid
diff --git a/services/api-server/sandbox/_test_client_sdk.py b/services/api-server/sandbox/_test_client_sdk.py
deleted file mode 100644
index f2a17a19081..00000000000
--- a/services/api-server/sandbox/_test_client_sdk.py
+++ /dev/null
@@ -1,209 +0,0 @@
-# pylint: skip-file
-# fmt: off
-
-# simcore_api_sdk.abc.py
-import abc as _abc
-import json
-from pprint import pprint
-from typing import Any, Dict, List, Optional
-
-import aiohttp
-# DEV ---------------------------------------------------------------------
-import attr
-import pytest
-# simcore_api_sdk/v0/me_api.py
-from attr import NOTHING
-from starlette.testclient import TestClient
-from yarl import URL
-
-from simcore_service_api_server import application, endpoints_check
-from simcore_service_api_server.__version__ import api_vtag
-from simcore_service_api_server.settings import AppSettings
-
-
-@pytest.fixture
-def client(monkeypatch) -> TestClient:
- monkeypatch.setenv("POSTGRES_USER", "test")
- monkeypatch.setenv("POSTGRES_PASSWORD", "test")
- monkeypatch.setenv("POSTGRES_DB", "test")
- monkeypatch.setenv("LOGLEVEL", "debug")
- monkeypatch.setenv("SC_BOOT_MODE", "production")
-
- # app
- test_settings = AppSettings()
- app = application.create(settings=test_settings)
-
- # routes
- app.include_router(endpoints_check.router, tags=["check"])
-
- # test client:
- # Context manager to trigger events: https://fastapi.tiangolo.com/advanced/testing-events/
- with TestClient(app) as cli:
- yield cli
-
-
-@attr.s(auto_attribs=True)
-class ApiResponse:
- status: int
- headers: Dict
- body: Dict
-
-
-@attr.s(auto_attribs=True)
-class ApiConfig:
- session: aiohttp.ClientSession
- api_key: str = attr.ib(repr=False)
- api_secret: str = attr.ib(repr=False)
- base_url: URL = URL(f"https://api.osparc.io/{api_vtag}/")
-
- # TODO: add validation here
-
-
-class API(_abc.ABC):
- def __init__(self, cfg: ApiConfig, *, parent=None):
- self._cfg = cfg
-
- async def _make_request(
- self,
- method: str,
- url: str,
- *,
- url_params: Optional[Dict] = None,
- body_params: Optional[Dict] = None,
- headers: Optional[Dict] = None,
- body: bytes = b"",
- **requester_params: Any,
- ) -> ApiResponse:
- filled_url = self._cfg.base_url # format_url(url, url_params)
-
- # TODO: it is always json !!
- if body_params is not None:
- body = json.dumps(body_params)
-
- resp: aiohttp.ClientResponse = await self._cfg.session.request(
- method, filled_url, body, headers, **requester_params
- )
-
- response = ApiResponse(
- status=resp.status, headers=resp.headers, body=await resp.json()
- )
- return response
-
-
-# simcore_api_sdk/v0/__init__.py
-# from ._openapi import ApiSession
-
-
-class MeAPI(API):
- async def get(self):
- pass
-
- async def update(self, *, name: str = NOTHING, full_name: str = NOTHING):
- """
- Only writable fields can be updated
- """
-
-
-@attr.s(auto_attribs=True)
-class StudiesAPI(API):
- _next_page_token: int = NOTHING
-
- async def list(
- self,
- *,
- page_size: int = NOTHING,
- keep_page_token: bool = False,
- order_by: str = NOTHING,
- filter_fields: str = NOTHING,
- ):
- pass
-
- async def get(self, uid: str):
- pass
-
- async def create(self):
- pass
-
- async def update(self, uid: str, *, from_other=None, **study_fields):
- # TODO: how update fields like a.b.c.??
- pass
-
- async def remove(self, uid: str) -> None:
- # wait ??
- pass
-
-
-# simcore_api_sdk/v0/_openapi.py
-class ApiSession:
- def __init__(
- self, api_key: str, api_secret: str, base_url: URL = NOTHING,
- ):
- # TODO: setup auth here
- self.session = aiohttp.ClientSession(auth=None)
-
- cfg = ApiConfig(self.session, api_key, api_secret, base_url)
- self._cfg = cfg
-
- # API
- self.me = MeAPI(cfg)
- self.studies = StudiesAPI(cfg)
-
- async def __aenter__(self):
- return self
-
- async def __aexit__(self, exc_type, exc, tb):
- await self.session.close()
-
-
-# ----------------------------------------------------
-@pytest.mark.skip(reason="Under dev")
-async def test_client_sdk():
- # TODO: design SDK for these calls
- # TODO: these examples should run test tests and automaticaly added to redoc
-
- # from simcore_api_sdk.v0 import ApiSession
-
- async with ApiSession(api_key="1234", api_secret="secret") as api:
-
- # GET /me is a special resource that is unique
- me: Profile = await api.me.get()
- pprint(me)
-
- # can update SOME entries
- await api.me.update(name="pcrespov", full_name="Pedro Crespo")
-
- # corresponds to the studies I have access ??
-
- ## https://cloud.google.com/apis/design/standard_methods
-
- # GET /studies
- studies: List[Dict] = await api.studies.list()
-
- # Implements Pagination: https://cloud.google.com/apis/design/design_patterns#list_pagination
- first_studies = await api.studies.list(page_size=3, keep_page_token=True)
- assert api.studies._next_page_token != NOTHING
-
- next_5_studies = await api.studies.list(page_size=5)
-
- # Results ordering: https://cloud.google.com/apis/design/design_patterns#sorting_order
- sorted_studies: List[Dict] = await api.studies.list(order_by="foo desc,bar")
-
- # List filter field: https://cloud.google.com/apis/design/naming_convention#list_filter_field
- studies: List[Dict] = await api.studies.list(filter_fields="foo.zoo, bar")
- assert studies[0]
-
- # GET /studies/{prj_id}
- prj: Dict = await api.studies.get("1234")
-
- # POST /studies
- new_prj: Study = await api.studies.create()
-
- # PUT or PATCH /studies/{prj_id}
- # this is a patch
- await api.studies.update(prj.id, description="Bar")
-
- # this is a put: using copy_from
- await api.studies.update(prj.id, copy_from=new_prj)
-
- # DELETE /studies/{prj_id}
- await api.studies.remove(prj.id)
diff --git a/services/api-server/sandbox/_test_schemas.py b/services/api-server/sandbox/_test_schemas.py
deleted file mode 100644
index 7482c3aba47..00000000000
--- a/services/api-server/sandbox/_test_schemas.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from typing import Optional
-
-import sqlalchemy as sa
-from aiopg.sa.engine import Engine
-from aiopg.sa.result import ResultProxy, RowProxy
-
-import simcore_api_server.model.pg_tables as tbl
-from simcore_api_server.schemas import UserInDB
-
-
-async def test_row_proxy_into_model(engine: Engine):
- # test how RowProxy converts into into UserInDB
-
- with engine.acquire() as conn:
- stmt = sa.select([tbl.users,]).where(tbl.users.c.id == 1)
-
- res: ResultProxy = await conn.execute(stmt)
- row: Optional[RowProxy] = await res.fetchone()
-
- user = UserInDB.from_orm(row)
- assert user
diff --git a/services/api-server/sandbox/api-key-auth.py b/services/api-server/sandbox/api-key-auth.py
deleted file mode 100644
index 824fdb5f8e4..00000000000
--- a/services/api-server/sandbox/api-key-auth.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# pylint: skip-file
-# fmt: off
-
-import uvicorn
-from fastapi import Depends, FastAPI, HTTPException, Security
-from fastapi.security.api_key import APIKeyHeader
-from starlette.status import HTTP_403_FORBIDDEN
-
-API_KEY = "1234567asdfgh"
-API_KEY_NAME = "access_token" # this is acces
-
-
-def get_active_user(
- api_key: str = Security(APIKeyHeader(name=API_KEY_NAME, scheme_name="ApiKeyAuth"))
-) -> str:
- # the api_key is a jwt created upon login
- #
- # - decode jwt
- # - authenticate user
-
- if api_key != API_KEY:
- raise HTTPException(
- status_code=HTTP_403_FORBIDDEN, detail="Invalid credentials"
- )
-
- return user_id
-
-
-app = FastAPI()
-
-
-@app.get("/foo")
-def foo(user_id: str = Depends(get_active_user)):
- return f"hi {user_id}"
-
-
-if __name__ == "__main__":
- uvicorn.run(app, host="0.0.0.0", port=8000)
diff --git a/services/api-server/sandbox/get_app_state.py b/services/api-server/sandbox/get_app_state.py
deleted file mode 100644
index 342971bd1dd..00000000000
--- a/services/api-server/sandbox/get_app_state.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import uvicorn
-from fastapi import Depends, FastAPI
-from fastapi.applications import State
-from fastapi.requests import Request
-
-app = FastAPI(title="get_app_state")
-
-# Dependences WITH arguents
-def _get_app(request: Request) -> FastAPI:
- return request.app
-
-
-def _get_app_state(request: Request) -> State:
- return request.app.state
-
-
-@app.get("/app")
-async def get_server_ip(my_app: FastAPI = Depends(_get_app)):
- assert my_app == app
- return my_app.title
-
-
-if __name__ == "__main__":
- uvicorn.run(app, host="0.0.0.0", port=8000)
diff --git a/services/api-server/sandbox/model_conversions.py b/services/api-server/sandbox/model_conversions.py
deleted file mode 100644
index 0c2c4dfc86c..00000000000
--- a/services/api-server/sandbox/model_conversions.py
+++ /dev/null
@@ -1,91 +0,0 @@
-from pprint import pprint
-from typing import List
-
-import attr
-from pydantic import BaseModel, ValidationError, constr
-from sqlalchemy import Column, Integer, String
-from sqlalchemy.dialects.postgresql import ARRAY
-from sqlalchemy.ext.declarative import declarative_base
-
-# https://pydantic-docs.helpmanual.io/usage/models/#orm-mode-aka-arbitrary-class-instances
-
-Base = declarative_base()
-
-
-class CompanyOrm(Base):
- __tablename__ = "companies"
- id = Column(Integer, primary_key=True, nullable=False)
- public_key = Column(String(20), index=True, nullable=False, unique=True)
- name = Column(String(63), unique=True)
- domains = Column(ARRAY(String(255)))
-
-
-class Bar(BaseModel):
- apple = "x"
- banana = "y"
-
-
-class CompanyModel(BaseModel):
- id: int
- public_key: constr(max_length=20)
- name: constr(max_length=63)
- # NO DOMAINS!
- other_value: int = 33
-
- foo: Bar = Bar()
-
- class Config:
- orm_mode = True
-
-
-@attr.s(auto_attribs=True)
-class Company:
- id: int
- name: str
- public_key: str = 55
-
-
-if __name__ == "__main__":
-
- co_orm = CompanyOrm(
- id=123,
- public_key="foobar",
- name="Testing",
- domains=["example.com", "foobar.com"],
- )
- pprint(co_orm)
-
- print("-" * 30)
-
- co_model = CompanyModel.from_orm(co_orm)
-
- print(co_model.__fields_set__)
- assert "other_value" not in co_model.__fields_set__
- assert "foo" not in co_model.__fields_set__
-
- print("-" * 30)
- assert "other_value" in co_model.__fields__
-
- pprint(co_model)
- pprint(co_model.dict())
- # co_model.json()
-
- print("-" * 30)
- pprint(co_model.schema())
- # co_model.schema_json() ->
-
- print("-" * 30)
- print(co_model.__config__)
-
- # CAN convert from attr type! ORM is everything with attributes?
- obj = Company(22, "pedro", "foo")
-
- import pdb
-
- pdb.set_trace()
- co_model.from_orm(obj)
-
- try:
- co_model.parse_obj(obj)
- except ValidationError as ee:
- print("obj has to be a dict!")
diff --git a/services/api-server/sandbox/pydantic-settings.py b/services/api-server/sandbox/pydantic-settings.py
deleted file mode 100644
index 77bfdbeb8ca..00000000000
--- a/services/api-server/sandbox/pydantic-settings.py
+++ /dev/null
@@ -1,51 +0,0 @@
-## https://pydantic-docs.helpmanual.io/usage/settings/#dotenv-env-support
-import os
-from pathlib import Path
-
-from pydantic import BaseSettings, SecretStr
-
-env_path = Path(".env-ignore")
-
-env_path.write_text(
- """
-# ignore comment
-ENVIRONMENT="production"
-REDIS_ADDRESS=localhost:6379
-MEANING_OF_LIFE=4000000
-MY_VAR='Hello world'
-POSTGRES_USER=test
-POSTGRES_PASSWORD=test
-POSTGRES_DB=test
-"""
-)
-
-
-os.environ["MEANING_OF_LIFE"] = "42"
-
-
-class PostgresSettings(BaseSettings):
- user: str
- password: SecretStr
- db: str
-
- class Config:
- env_file = env_path
- env_prefix = "POSTGRES_"
-
-
-class Settings(BaseSettings):
- environment: str
- meaning_of_life: int = 33
-
- pg = PostgresSettings()
-
- class Config:
- env_file = env_path
-
-
-settings = Settings()
-
-print(settings.json())
-assert settings.meaning_of_life == 42
-assert settings.environment == "production"
-assert settings.pg.password.get_secret_value() == "test"
diff --git a/services/api-server/sandbox/simple_app.py b/services/api-server/sandbox/simple_app.py
deleted file mode 100644
index 7fccb34353c..00000000000
--- a/services/api-server/sandbox/simple_app.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# pylint: skip-file
-# fmt: off
-
-import json
-from pathlib import Path
-from typing import Dict, List, Optional, Tuple
-
-import uvicorn
-from fastapi import Depends, FastAPI
-from fastapi.requests import Request
-from pydantic import BaseModel, Field
-
-app = FastAPI(title="My app")
-
-
-def _get_app(request: Request) -> FastAPI:
- return request.app
-
-
-def get_my_user_id(app: FastAPI):
- return 3
-
-
-class ItemFOO(BaseModel):
- name: str
- description: str = None
- price: float
- tax: Optional[float] = Field(None, description="description tax")
-
-
-@app.post("/studies/{study_id}")
-async def get_studies(q: int, study_id: int, body: List[ItemFOO]) -> ItemFOO:
-
- return body
-
-
-def dump_oas():
- Path("openapi-ignore.json").write_text(json.dumps(app.openapi(), indent=2))
-
-
-app.add_event_handler("startup", dump_oas)
-
-if __name__ == "__main__":
-
- uvicorn.run("simple_app:app", reload=True, port=8002)
diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/auth_api_key.py b/services/api-server/src/simcore_service_api_server/api/dependencies/auth_api_key.py
deleted file mode 100644
index 9bcb8e6a962..00000000000
--- a/services/api-server/src/simcore_service_api_server/api/dependencies/auth_api_key.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from typing import Optional
-
-from fastapi import Depends, HTTPException, Security, status
-from fastapi.security.api_key import APIKeyHeader
-
-from ...db.repositories.users import UsersRepository
-from ...models.schemas.tokens import TokenData
-from ...services.jwt import get_access_token_data
-from .database import get_repository
-
-# Declaration of security scheme:
-# - Adds components.securitySchemes['APiKey'] to openapi.yaml
-# - callable with request as argument -> extracts token from Authentication header
-#
-
-
-API_KEY_NAME = "APIKey"
-api_key_scheme = APIKeyHeader(name=API_KEY_NAME)
-
-
-async def get_current_user_id(
- access_token: str = Security(api_key_scheme),
- users_repo: UsersRepository = Depends(get_repository(UsersRepository)),
-) -> int:
- def _create_credentials_exception(msg: str):
-
- return HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail=msg,
- headers={"WWW-Authenticate": API_KEY_NAME},
- )
-
- # decodes and validates jwt-based access token
- token_data: Optional[TokenData] = get_access_token_data(access_token)
- if token_data is None:
- raise _create_credentials_exception("Could not validate credentials")
-
- # identify user
- identified = await users_repo.any_user_with_id(token_data.user_id)
- if not identified:
- raise _create_credentials_exception("Could not validate credentials")
-
- return token_data.user_id
-
-
-get_active_user_id = get_current_user_id
diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/auth_basic.py b/services/api-server/src/simcore_service_api_server/api/dependencies/auth_basic.py
deleted file mode 100644
index 809298acf33..00000000000
--- a/services/api-server/src/simcore_service_api_server/api/dependencies/auth_basic.py
+++ /dev/null
@@ -1,48 +0,0 @@
-from fastapi import Depends, HTTPException, Security, status
-from fastapi.security import HTTPBasic, HTTPBasicCredentials
-
-from ...db.repositories.api_keys import ApiKeysRepository
-from ...db.repositories.users import UsersRepository
-from .database import get_repository
-
-# SEE https://swagger.io/docs/specification/authentication/basic-authentication/
-basic_scheme = HTTPBasic()
-
-
-def _create_exception():
- _unauthorized_headers = {
- "WWW-Authenticate": f'Basic realm="{basic_scheme.realm}"'
- if basic_scheme.realm
- else "Basic"
- }
- return HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail="Invalid API credentials",
- headers=_unauthorized_headers,
- )
-
-
-async def get_current_user_id(
- credentials: HTTPBasicCredentials = Security(basic_scheme),
- apikeys_repo: ApiKeysRepository = Depends(get_repository(ApiKeysRepository)),
-) -> int:
- user_id = await apikeys_repo.get_user_id(
- api_key=credentials.username, api_secret=credentials.password
- )
- if not user_id:
- raise _create_exception()
- return user_id
-
-
-async def get_active_user_email(
- user_id: int = Depends(get_current_user_id),
- users_repo: UsersRepository = Depends(get_repository(UsersRepository)),
-) -> str:
- email = await users_repo.get_email_from_user_id(user_id)
- if not email:
- raise _create_exception()
- return email
-
-
-# alias
-get_active_user_id = get_current_user_id
diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/auth_oath2.py b/services/api-server/src/simcore_service_api_server/api/dependencies/auth_oath2.py
deleted file mode 100644
index 806ae19c519..00000000000
--- a/services/api-server/src/simcore_service_api_server/api/dependencies/auth_oath2.py
+++ /dev/null
@@ -1,100 +0,0 @@
-""" This submodule includes responsibilities from authorization server
-
- +--------+ +---------------+
- | |--(A)- Authorization Request ->| Resource |
- | | | Owner | Authorization request
- | |<-(B)-- Authorization Grant ---| |
- | | +---------------+
- | |
- | | +---------------+
- | |--(C)-- Authorization Grant -->| Authorization |
- | Client | | Server | Token request
- | |<-(D)----- Access Token -------| |
- | | +---------------+
- | |
- | | +---------------+
- | |--(E)----- Access Token ------>| Resource |
- | | | Server |
- | |<-(F)--- Protected Resource ---| |
- +--------+ +---------------+
-
- Figure 1: Abstract Protocol Flow
-
-SEE
- - https://oauth.net/2/
- - https://tools.ietf.org/html/rfc6749
-"""
-# TODO: this module shall delegate the auth functionality to a separate service
-
-from typing import Optional
-
-from fastapi import Depends, HTTPException, Security, status
-from fastapi.security import OAuth2PasswordBearer, SecurityScopes
-from loguru import logger
-
-from ...__version__ import api_vtag
-from ...db.repositories.users import UsersRepository
-from ...models.schemas.tokens import TokenData
-from ...services.jwt import get_access_token_data
-from .database import get_repository
-
-# Declaration of security scheme:
-# - Adds components.securitySchemes['OAuth2PasswordBearer'] to openapi.yaml
-# - callable with request as argument -> extracts token from Authentication header
-#
-# TODO: check organization of scopes in other APIs
-oauth2_scheme = OAuth2PasswordBearer(
- tokenUrl=f"{api_vtag}/token",
- scopes={"read": "Read-only access", "write": "Write access"},
-)
-
-
-async def get_current_user_id(
- security_scopes: SecurityScopes,
- access_token: str = Depends(oauth2_scheme),
- users_repo: UsersRepository = Depends(get_repository(UsersRepository)),
-) -> int:
- """
- access_token: extracted access_token from request header
- security_scopes: iterable with all REQUIRED scopes to run operation
- """
-
- def _create_credentials_exception(msg: str):
- authenticate_value = "Bearer"
- if security_scopes.scopes:
- authenticate_value += f' scope="{security_scopes.scope_str}"'
-
- return HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail=msg,
- headers={"WWW-Authenticate": authenticate_value},
- )
-
- # decodes and validates jwt-based access token
- token_data: Optional[TokenData] = get_access_token_data(access_token)
- if token_data is None:
- raise _create_credentials_exception("Could not validate credentials")
-
- # identify user
- identified = await users_repo.any_user_with_id(token_data.user_id)
- if not identified:
- raise _create_credentials_exception("Could not validate credentials")
-
- # Checks whether user has ALL required scopes for this call
- for required_scope in security_scopes.scopes:
- if required_scope not in token_data.scopes:
- logger.debug(
- "Access denied. Client is missing required scope '{}' ", required_scope
- )
- raise _create_credentials_exception(
- "Missing required scope for this operation"
- )
-
- return token_data.user_id
-
-
-async def get_active_user_id(
- current_user_id: int = Security(get_current_user_id, scopes=["read"])
-) -> int:
- # FIXME: Adds read scope. rename properly and activate scopes
- return current_user_id
diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/authentication.py b/services/api-server/src/simcore_service_api_server/api/dependencies/authentication.py
index 84cbbf6d0c6..809298acf33 100644
--- a/services/api-server/src/simcore_service_api_server/api/dependencies/authentication.py
+++ b/services/api-server/src/simcore_service_api_server/api/dependencies/authentication.py
@@ -1,3 +1,48 @@
-from .auth_basic import get_active_user_email, get_active_user_id
+from fastapi import Depends, HTTPException, Security, status
+from fastapi.security import HTTPBasic, HTTPBasicCredentials
-__all__ = ["get_active_user_id", "get_active_user_email"]
+from ...db.repositories.api_keys import ApiKeysRepository
+from ...db.repositories.users import UsersRepository
+from .database import get_repository
+
+# SEE https://swagger.io/docs/specification/authentication/basic-authentication/
+basic_scheme = HTTPBasic()
+
+
+def _create_exception():
+ _unauthorized_headers = {
+ "WWW-Authenticate": f'Basic realm="{basic_scheme.realm}"'
+ if basic_scheme.realm
+ else "Basic"
+ }
+ return HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Invalid API credentials",
+ headers=_unauthorized_headers,
+ )
+
+
+async def get_current_user_id(
+ credentials: HTTPBasicCredentials = Security(basic_scheme),
+ apikeys_repo: ApiKeysRepository = Depends(get_repository(ApiKeysRepository)),
+) -> int:
+ user_id = await apikeys_repo.get_user_id(
+ api_key=credentials.username, api_secret=credentials.password
+ )
+ if not user_id:
+ raise _create_exception()
+ return user_id
+
+
+async def get_active_user_email(
+ user_id: int = Depends(get_current_user_id),
+ users_repo: UsersRepository = Depends(get_repository(UsersRepository)),
+) -> str:
+ email = await users_repo.get_email_from_user_id(user_id)
+ if not email:
+ raise _create_exception()
+ return email
+
+
+# alias
+get_active_user_id = get_current_user_id
diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/database.py b/services/api-server/src/simcore_service_api_server/api/dependencies/database.py
index 96903570029..ef94b71e26c 100644
--- a/services/api-server/src/simcore_service_api_server/api/dependencies/database.py
+++ b/services/api-server/src/simcore_service_api_server/api/dependencies/database.py
@@ -4,7 +4,7 @@
from fastapi import Depends
from fastapi.requests import Request
-from ...db.repositories.base import BaseRepository
+from ...db.repositories import BaseRepository
def _get_db_engine(request: Request) -> Engine:
diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/webserver.py b/services/api-server/src/simcore_service_api_server/api/dependencies/webserver.py
index dd387f12c9a..e36d8390fc9 100644
--- a/services/api-server/src/simcore_service_api_server/api/dependencies/webserver.py
+++ b/services/api-server/src/simcore_service_api_server/api/dependencies/webserver.py
@@ -3,16 +3,20 @@
from typing import Dict, Optional
from cryptography.fernet import Fernet
-from fastapi import Depends, HTTPException, status
+from fastapi import Depends, FastAPI, HTTPException, status
from fastapi.requests import Request
-from httpx import AsyncClient
from ...core.settings import AppSettings, WebServerSettings
+from ...services.webserver import AuthSession
from .authentication import get_active_user_email
UNAVAILBLE_MSG = "backend service is disabled or unreachable"
+def _get_app(request: Request) -> FastAPI:
+ return request.app
+
+
def _get_settings(request: Request) -> WebServerSettings:
app_settings: AppSettings = request.app.state.settings
return app_settings.webserver
@@ -22,13 +26,6 @@ def _get_encrypt(request: Request) -> Optional[Fernet]:
return getattr(request.app.state, "webserver_fernet", None)
-def get_webserver_client(request: Request) -> AsyncClient:
- client = getattr(request.app.state, "webserver_client", None)
- if not client:
- raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE, detail=UNAVAILBLE_MSG)
- return client
-
-
def get_session_cookie(
identity: str = Depends(get_active_user_email),
settings: WebServerSettings = Depends(_get_settings),
@@ -53,3 +50,14 @@ def get_session_cookie(
encrypted_cookie_data = fernet.encrypt(cookie_data).decode("utf-8")
return {cookie_name: encrypted_cookie_data}
+
+
+def get_webserver_session(
+ app: FastAPI = Depends(_get_app),
+ session_cookies: Dict = Depends(get_session_cookie),
+) -> AuthSession:
+ """
+ Lifetime of AuthSession wrapper is one request because it needs different session cookies
+ Lifetime of embedded client is attached to the app lifetime
+ """
+ return AuthSession.create(app, session_cookies)
diff --git a/services/api-server/src/simcore_service_api_server/api/root.py b/services/api-server/src/simcore_service_api_server/api/root.py
index cab791b02c1..b3a6454d7ae 100644
--- a/services/api-server/src/simcore_service_api_server/api/root.py
+++ b/services/api-server/src/simcore_service_api_server/api/root.py
@@ -4,12 +4,7 @@
router = APIRouter()
router.include_router(health.router)
-router.include_router(meta.router, tags=["meta"], prefix="/meta")
-
-# TODO: keeps for oauth or apikey schemes
-# router.include_router(authentication.router, tags=["authentication"], prefix="/users")
+# API
+router.include_router(meta.router, tags=["meta"], prefix="/meta")
router.include_router(users.router, tags=["users"], prefix="/me")
-
-## TODO: disables studies for the moment
-# router.include_router(studies.router, tags=["studies"], prefix="/studies")
diff --git a/services/api-server/src/simcore_service_api_server/api/routes/authentication/__init__.py b/services/api-server/src/simcore_service_api_server/api/routes/authentication/__init__.py
deleted file mode 100644
index e69de29bb2d..00000000000
diff --git a/services/api-server/src/simcore_service_api_server/api/routes/authentication/api_key.py b/services/api-server/src/simcore_service_api_server/api/routes/authentication/api_key.py
deleted file mode 100644
index 385887dbbbe..00000000000
--- a/services/api-server/src/simcore_service_api_server/api/routes/authentication/api_key.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# FIXME: Until tests
-# pylint: skip-file
-#
-
-from fastapi import APIRouter
-
-router = APIRouter()
-
-
-@router.post("/login", response_model=UserInResponse, name="auth:login")
-async def login(
- user_login: UserInLogin = Body(..., embed=True, alias="user"),
- users_repo: UsersRepository = Depends(get_repository(UsersRepository)),
-) -> UserInResponse:
- pass
diff --git a/services/api-server/src/simcore_service_api_server/api/routes/authentication/oauth2.py b/services/api-server/src/simcore_service_api_server/api/routes/authentication/oauth2.py
deleted file mode 100644
index 1502be49133..00000000000
--- a/services/api-server/src/simcore_service_api_server/api/routes/authentication/oauth2.py
+++ /dev/null
@@ -1,77 +0,0 @@
-from io import StringIO
-from typing import Optional
-
-from fastapi import APIRouter, Depends, HTTPException
-from fastapi.security import OAuth2PasswordRequestForm
-from loguru import logger
-
-from ....db.repositories.users import UsersRepository
-from ....models.schemas.tokens import Token, TokenData
-from ....services.jwt import create_access_token
-from ....services.serialization import json_dumps
-from ...dependencies.database import get_repository
-
-router = APIRouter()
-
-
-def _compose_msg(*, fd=None, rd=None) -> str:
- assert not (fd ^ rd), "Mutally exclusive" # nosec
-
- stream = StringIO()
-
- if fd:
- print("Form Request", "-" * 20, file=stream)
- for (
- attr
- ) in "grant_type username password scopes client_id client_secret".split():
- print("-", attr, ":", getattr(fd, attr), file=stream)
- print("-" * 20, file=stream)
- elif rd:
- print("{:-^30}".format("/token response"), file=stream)
- print(json_dumps(rd), file=stream)
- print("-" * 30, file=stream)
-
- return stream.getvalue()
-
-
-# NOTE: this path has to be the same as simcore_service_api_server.auth.oauth2_scheme
-@router.post("/token", response_model=Token)
-async def login_for_access_token(
- form_data: OAuth2PasswordRequestForm = Depends(),
- users_repo: UsersRepository = Depends(get_repository(UsersRepository)),
-):
- """
- Returns an access-token provided a valid authorization grant
- """
-
- #
- # - This entrypoint is part of the Authorization Server
- # - Implements access point to obtain access-tokens
- #
- # | | +---------------+
- # | |--(C)-- Authorization Grant -->| Authorization |
- # | Client | | Server | Token request
- # | |<-(D)----- Access Token -------| |
- # | | +---------------+
- #
-
- logger.debug(_compose_msg(fd=form_data))
-
- user_id: Optional[int] = await users_repo.get_user_id(
- user=form_data.username, password=form_data.password
- )
-
- # TODO: check is NOT banned
-
- if not user_id:
- raise HTTPException(status_code=400, detail="Incorrect username or password")
-
- # FIXME: expiration disabled since for the moment we do NOT have any renewal mechanims in place!!!
- access_token = create_access_token(TokenData(user_id), expires_in_mins=None)
-
- # NOTE: this reponse is defined in Oath2
- resp_data = {"access_token": access_token, "token_type": "bearer"}
-
- logger.debug(_compose_msg(rd=resp_data))
-
- return resp_data
diff --git a/services/api-server/src/simcore_service_api_server/api/routes/studies.py b/services/api-server/src/simcore_service_api_server/api/routes/studies.py
deleted file mode 100644
index 953e096a331..00000000000
--- a/services/api-server/src/simcore_service_api_server/api/routes/studies.py
+++ /dev/null
@@ -1,50 +0,0 @@
-from fastapi import APIRouter, Security
-
-from ..dependencies.authentication import get_active_user_id
-
-router = APIRouter()
-
-
-@router.get("")
-async def list_studies(user_id: int = Security(get_active_user_id, scopes=["read"])):
- # TODO: Replace code by calls to web-server api
- return [{"project_id": "Foo", "owner": user_id}]
-
-
-@router.get("/{study_id}")
-async def get_study(
- study_id: str, user_id: int = Security(get_active_user_id, scopes=["read"]),
-):
- # TODO: Replace code by calls to web-server api
- return [{"project_id": study_id, "owner": user_id}]
-
-
-@router.post("")
-async def create_study(user_id: int = Security(get_active_user_id, scopes=["write"])):
- # TODO: Replace code by calls to web-server api
- return {"project_id": "Foo", "owner": user_id}
-
-
-@router.put("/{study_id}")
-async def replace_study(
- study_id: str, user_id: int = Security(get_active_user_id, scopes=["write"]),
-):
- # TODO: Replace code by calls to web-server api
- return {"project_id": study_id, "owner": user_id}
-
-
-@router.patch("/{study_id}")
-async def update_study(
- study_id: str, user_id: int = Security(get_active_user_id, scopes=["write"]),
-):
- # TODO: Replace code by calls to web-server api
- return {"project_id": study_id, "owner": user_id}
-
-
-@router.delete("/{study_id}")
-async def delete_study(
- study_id: str, user_id: int = Security(get_active_user_id, scopes=["write"]),
-):
- # TODO: Replace code by calls to web-server api
- _data = {"project_id": study_id, "owner": user_id}
- return None
diff --git a/services/api-server/src/simcore_service_api_server/api/routes/users.py b/services/api-server/src/simcore_service_api_server/api/routes/users.py
index b1076647bf1..096c39e2b9e 100644
--- a/services/api-server/src/simcore_service_api_server/api/routes/users.py
+++ b/services/api-server/src/simcore_service_api_server/api/routes/users.py
@@ -1,64 +1,48 @@
-from typing import Dict
+import logging
from fastapi import APIRouter, Depends, HTTPException, Security
-from httpx import AsyncClient, Response, StatusCode
-from loguru import logger
-
-# SEE: https://www.python-httpx.org/async/
-# TODO: path mapping and operation
-# TODO: if fails, raise for status and translates to service unavailable if fails
-#
from pydantic import ValidationError
from starlette import status
from ...models.schemas.profiles import Profile, ProfileUpdate
-from ..dependencies.webserver import get_session_cookie, get_webserver_client
+from ..dependencies.webserver import AuthSession, get_webserver_session
+
+logger = logging.getLogger(__name__)
+
router = APIRouter()
+# SEE: https://www.python-httpx.org/async/
+# TODO: path mapping and operation
@router.get("", response_model=Profile)
async def get_my_profile(
- client: AsyncClient = Depends(get_webserver_client),
- session_cookies: Dict = Depends(get_session_cookie),
+ client: AuthSession = Depends(get_webserver_session),
) -> Profile:
- resp = await client.get("/v0/me", cookies=session_cookies)
-
- if resp.status_code == status.HTTP_200_OK:
- data = resp.json()["data"]
- try:
- # FIXME: temporary patch until web-API is reviewed
- data["role"] = data["role"].upper()
- profile = Profile.parse_obj(data)
- return profile
- except ValidationError:
- logger.exception("webserver response invalid")
- raise
-
- elif StatusCode.is_server_error(resp.status_code):
- logger.error("webserver failed :{}", resp.reason_phrase)
+ data = await client.get("/me")
+
+ # FIXME: temporary patch until web-API is reviewed
+ data["role"] = data["role"].upper()
+ try:
+ profile = Profile.parse_obj(data)
+ except ValidationError:
+ logger.exception("webserver invalid response")
raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE)
- raise HTTPException(resp.status_code, resp.reason_phrase)
+ return profile
@router.put("", response_model=Profile)
async def update_my_profile(
profile_update: ProfileUpdate,
- client: AsyncClient = Depends(get_webserver_client),
- session_cookies: Dict = Security(get_session_cookie, scopes=["write"]),
+ client: AuthSession = Security(get_webserver_session, scopes=["write"]),
) -> Profile:
# FIXME: replace by patch
# TODO: improve. from patch -> put, we need to ensure it has a default in place
profile_update.first_name = profile_update.first_name or ""
profile_update.last_name = profile_update.last_name or ""
- resp: Response = await client.put(
- "/v0/me", json=profile_update.dict(), cookies=session_cookies
- )
- if StatusCode.is_error(resp.status_code):
- logger.error("webserver failed: {}", resp.reason_phrase)
- raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE)
+ await client.put("/me", body=profile_update.dict())
- profile = await get_my_profile(client, session_cookies)
+ profile = await get_my_profile(client)
return profile
diff --git a/services/api-server/src/simcore_service_api_server/core/application.py b/services/api-server/src/simcore_service_api_server/core/application.py
index 5f452974dd4..2cff3fbd259 100644
--- a/services/api-server/src/simcore_service_api_server/core/application.py
+++ b/services/api-server/src/simcore_service_api_server/core/application.py
@@ -1,9 +1,8 @@
-import sys
+import logging
from typing import Optional
from fastapi import FastAPI
from fastapi.exceptions import RequestValidationError
-from loguru import logger
from starlette.exceptions import HTTPException
from ..__version__ import api_version, api_vtag
@@ -16,12 +15,15 @@
from .redoc import create_redoc_handler
from .settings import AppSettings
+logger = logging.getLogger(__name__)
+
def init_app(settings: Optional[AppSettings] = None) -> FastAPI:
if settings is None:
settings = AppSettings.create_default()
- logger.add(sys.stderr, level=settings.loglevel)
+ logging.basicConfig(level=settings.loglevel)
+ logging.root.setLevel(settings.loglevel)
app = FastAPI(
debug=settings.debug,
diff --git a/services/api-server/src/simcore_service_api_server/core/events.py b/services/api-server/src/simcore_service_api_server/core/events.py
index 1a1fd646857..4ca73d97686 100644
--- a/services/api-server/src/simcore_service_api_server/core/events.py
+++ b/services/api-server/src/simcore_service_api_server/core/events.py
@@ -1,13 +1,15 @@
+import logging
from typing import Callable
from fastapi import FastAPI
-from loguru import logger
from ..db.events import close_db_connection, connect_to_db
from ..services.remote_debug import setup_remote_debugging
from ..services.webserver import close_webserver, setup_webserver
from .settings import BootModeEnum
+logger = logging.getLogger(__name__)
+
def create_start_app_handler(app: FastAPI) -> Callable:
async def start_app() -> None:
@@ -29,12 +31,14 @@ async def start_app() -> None:
def create_stop_app_handler(app: FastAPI) -> Callable:
- @logger.catch
async def stop_app() -> None:
- logger.info("Application stopping")
- if app.state.settings.postgres.enabled:
- await close_db_connection(app)
- if app.state.settings.webserver.enabled:
- await close_webserver(app)
+ try:
+ logger.info("Application stopping")
+ if app.state.settings.postgres.enabled:
+ await close_db_connection(app)
+ if app.state.settings.webserver.enabled:
+ await close_webserver(app)
+ except Exception: # pylint: disable=broad-except
+ logger.exception("Stopping application")
return stop_app
diff --git a/services/api-server/src/simcore_service_api_server/core/openapi.py b/services/api-server/src/simcore_service_api_server/core/openapi.py
index f1c3df7b1a7..b0b3286f372 100644
--- a/services/api-server/src/simcore_service_api_server/core/openapi.py
+++ b/services/api-server/src/simcore_service_api_server/core/openapi.py
@@ -1,4 +1,5 @@
import json
+import logging
import types
from pathlib import Path
from typing import Dict
@@ -7,10 +8,11 @@
from fastapi import FastAPI
from fastapi.openapi.utils import get_openapi
from fastapi.routing import APIRoute
-from loguru import logger
from .redoc import add_vendor_extensions, compose_long_description
+logger = logging.getLogger(__name__)
+
def override_openapi_method(app: FastAPI):
# TODO: test openapi(*) member does not change interface
@@ -55,7 +57,7 @@ def use_route_names_as_operation_ids(app: FastAPI) -> None:
def dump_openapi(app: FastAPI, filepath: Path):
- logger.info("Dumping openapi specs as {}", filepath)
+ logger.info("Dumping openapi specs as %s", filepath)
with open(filepath, "wt") as fh:
if filepath.suffix == ".json":
json.dump(app.openapi(), fh, indent=2)
diff --git a/services/api-server/src/simcore_service_api_server/db/events.py b/services/api-server/src/simcore_service_api_server/db/events.py
index 8523967ddb4..eebdcb7c9d8 100644
--- a/services/api-server/src/simcore_service_api_server/db/events.py
+++ b/services/api-server/src/simcore_service_api_server/db/events.py
@@ -3,11 +3,13 @@
from aiopg.sa import Engine, create_engine
from fastapi import FastAPI
-from loguru import logger
from tenacity import before_sleep_log, retry, stop_after_attempt, wait_fixed
from ..core.settings import PostgresSettings
+logger = logging.getLogger(__name__)
+
+
ENGINE_ATTRS = "closed driver dsn freesize maxsize minsize name size timeout".split()
@@ -30,7 +32,7 @@ def _compose_info_on_engine(app: FastAPI) -> str:
@retry(**pg_retry_policy)
async def connect_to_db(app: FastAPI) -> None:
- logger.debug("Connenting db ...")
+ logger.debug("Connecting db ...")
cfg: PostgresSettings = app.state.settings.postgres
engine: Engine = await create_engine(
@@ -39,7 +41,7 @@ async def connect_to_db(app: FastAPI) -> None:
minsize=cfg.minsize,
maxsize=cfg.maxsize,
)
- logger.debug("Connected to {}", engine.dsn)
+ logger.debug("Connected to %s", engine.dsn)
app.state.engine = engine
logger.debug(_compose_info_on_engine(app))
@@ -51,4 +53,4 @@ async def close_db_connection(app: FastAPI) -> None:
engine: Engine = app.state.engine
engine.close()
await engine.wait_closed()
- logger.debug("Disconnected from {}", engine.dsn)
+ logger.debug("Disconnected from %s", engine.dsn)
diff --git a/services/api-server/src/simcore_service_api_server/db/repositories/__init__.py b/services/api-server/src/simcore_service_api_server/db/repositories/__init__.py
index e69de29bb2d..a5eeffe1ff5 100644
--- a/services/api-server/src/simcore_service_api_server/db/repositories/__init__.py
+++ b/services/api-server/src/simcore_service_api_server/db/repositories/__init__.py
@@ -0,0 +1 @@
+from ._base import BaseRepository
diff --git a/services/api-server/src/simcore_service_api_server/db/repositories/base.py b/services/api-server/src/simcore_service_api_server/db/repositories/_base.py
similarity index 100%
rename from services/api-server/src/simcore_service_api_server/db/repositories/base.py
rename to services/api-server/src/simcore_service_api_server/db/repositories/_base.py
diff --git a/services/api-server/src/simcore_service_api_server/db/repositories/api_keys.py b/services/api-server/src/simcore_service_api_server/db/repositories/api_keys.py
index 7602e872daf..e24ebc30d32 100644
--- a/services/api-server/src/simcore_service_api_server/db/repositories/api_keys.py
+++ b/services/api-server/src/simcore_service_api_server/db/repositories/api_keys.py
@@ -1,15 +1,17 @@
+import logging
from typing import Optional
import sqlalchemy as sa
-from loguru import logger
from psycopg2 import DatabaseError
from .. import tables as tbl
-from .base import BaseRepository
+from ._base import BaseRepository
-# from ...models.domain.users import User, UserInDB
+logger = logging.getLogger(__name__)
-# For psycopg2 errors SEE https://www.psycopg.org/docs/errors.html#sqlstate-exception-classes
+
+# TODO: see if can use services/api-server/src/simcore_service_api_server/models/domain/api_keys.py
+# NOTE: For psycopg2 errors SEE https://www.psycopg.org/docs/errors.html#sqlstate-exception-classes
class ApiKeysRepository(BaseRepository):
@@ -25,7 +27,7 @@ async def get_user_id(self, api_key: str, api_secret: str) -> Optional[int]:
user_id: Optional[int] = await self.connection.scalar(stmt)
except DatabaseError as err:
- logger.debug(f"Failed to get user id: {err}")
+ logger.debug("Failed to get user id: %s", err)
user_id = None
return user_id
diff --git a/services/api-server/src/simcore_service_api_server/db/repositories/users.py b/services/api-server/src/simcore_service_api_server/db/repositories/users.py
index f10ab46e081..c60400544ce 100644
--- a/services/api-server/src/simcore_service_api_server/db/repositories/users.py
+++ b/services/api-server/src/simcore_service_api_server/db/repositories/users.py
@@ -1,12 +1,9 @@
-import hashlib
-from typing import List, Optional
+from typing import Optional
import sqlalchemy as sa
-from aiopg.sa.result import RowProxy
-from ...models.schemas.profiles import Profile
-from ..tables import GroupType, api_keys, groups, user_to_groups, users
-from .base import BaseRepository
+from ..tables import api_keys, users
+from ._base import BaseRepository
class UsersRepository(BaseRepository):
@@ -18,7 +15,6 @@ async def get_user_id(self, api_key: str, api_secret: str) -> Optional[int]:
return user_id
async def any_user_with_id(self, user_id: int) -> bool:
- # FIXME: shall identify api_key or api_secret instead
stmt = sa.select([api_keys.c.user_id,]).where(api_keys.c.user_id == user_id)
return (await self.connection.scalar(stmt)) is not None
@@ -26,82 +22,3 @@ async def get_email_from_user_id(self, user_id: int) -> Optional[str]:
stmt = sa.select([users.c.email,]).where(users.c.id == user_id)
email: Optional[str] = await self.connection.scalar(stmt)
return email
-
- # TEMPORARY ----
- async def get_profile_from_userid(self, user_id: int) -> Optional[Profile]:
- stmt = (
- sa.select(
- [
- users.c.email,
- users.c.role,
- users.c.name,
- users.c.primary_gid,
- groups.c.gid,
- groups.c.name,
- groups.c.description,
- groups.c.type,
- ],
- use_labels=True,
- )
- .select_from(
- users.join(
- user_to_groups.join(groups, user_to_groups.c.gid == groups.c.gid),
- users.c.id == user_to_groups.c.uid,
- )
- )
- .where(users.c.id == user_id)
- .order_by(sa.asc(groups.c.name))
- )
-
- # all user_group combinations but only the group changes
- result = await self.connection.execute(stmt)
- user_groups: List[RowProxy] = await result.fetchall()
-
- if not user_groups:
- return None
-
- # get the primary group and the all group
- user_primary_group = all_group = {}
- other_groups = []
- for user_group in user_groups:
- if user_group["users_primary_gid"] == user_group["groups_gid"]:
- user_primary_group = user_group
- elif user_group["groups_type"] == GroupType.EVERYONE:
- all_group = user_group
- else:
- other_groups.append(user_group)
-
- parts = user_primary_group["users_name"].split(".") + [""]
- return Profile.parse_obj(
- {
- "login": user_primary_group["users_email"],
- "first_name": parts[0],
- "last_name": parts[1],
- "role": user_primary_group["users_role"].name.capitalize(),
- "gravatar_id": gravatar_hash(user_primary_group["users_email"]),
- "groups": {
- "me": {
- "gid": user_primary_group["groups_gid"],
- "label": user_primary_group["groups_name"],
- "description": user_primary_group["groups_description"],
- },
- "organizations": [
- {
- "gid": group["groups_gid"],
- "label": group["groups_name"],
- "description": group["groups_description"],
- }
- for group in other_groups
- ],
- "all": {
- "gid": all_group["groups_gid"],
- "label": all_group["groups_name"],
- "description": all_group["groups_description"],
- },
- },
- }
- )
-
-
-def gravatar_hash(email: str) -> str:
- return hashlib.md5(email.lower().encode("utf-8")).hexdigest() # nosec
diff --git a/services/api-server/src/simcore_service_api_server/models/domain/users.py b/services/api-server/src/simcore_service_api_server/models/domain/users.py
deleted file mode 100644
index 5b35d3fc388..00000000000
--- a/services/api-server/src/simcore_service_api_server/models/domain/users.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from pydantic import BaseModel, EmailStr, Field
-
-from simcore_postgres_database.models.users import UserRole, UserStatus
-
-from .groups import Groups
-
-
-class UserBase(BaseModel):
- first_name: str
- last_name: str
-
-
-class User(UserBase):
- login: EmailStr
- role: str
- groups: Groups
- gravatar_id: str
-
-
-class UserInDB(BaseModel):
- id_: int = Field(0, alias="id")
- name: str
- email: str
- password_hash: str
- primary_gid: int
- status: UserStatus
- role: UserRole
-
- # TODO: connect name <-> first_name, last_name
-
- class Config:
- orm_mode = True
diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/api_keys.py b/services/api-server/src/simcore_service_api_server/models/schemas/api_keys.py
deleted file mode 100644
index 8201495e219..00000000000
--- a/services/api-server/src/simcore_service_api_server/models/schemas/api_keys.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from pydantic import BaseModel
-
-from ..domain.api_keys import ApiKey
-
-
-class ApiKeyInLogin(ApiKey):
- pass
-
-
-class ApiKeyInResponse(BaseModel):
- display_name: str
- token: str
diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/tokens.py b/services/api-server/src/simcore_service_api_server/models/schemas/tokens.py
deleted file mode 100644
index 89fb7692f24..00000000000
--- a/services/api-server/src/simcore_service_api_server/models/schemas/tokens.py
+++ /dev/null
@@ -1,25 +0,0 @@
-from datetime import datetime
-from typing import List
-
-from pydantic import BaseModel
-
-
-class JWTMeta(BaseModel):
- exp: datetime
- sub: str
-
-
-class JWTUser(BaseModel):
- username: str
-
-
-class Token(BaseModel):
- access_token: str
- token_type: str
-
-
-class TokenData(BaseModel):
- """ application data encoded in the JWT """
-
- user_id: int
- scopes: List[str] = []
diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/users.py b/services/api-server/src/simcore_service_api_server/models/schemas/users.py
deleted file mode 100644
index 32e51ae900f..00000000000
--- a/services/api-server/src/simcore_service_api_server/models/schemas/users.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from pydantic import BaseModel
-
-from ..domain.users import User
-
-
-class UserInResponse(User):
- pass
-
-
-class UserInUpdate(BaseModel):
- first_name: str
- last_name: str
diff --git a/services/api-server/src/simcore_service_api_server/services/jwt.py b/services/api-server/src/simcore_service_api_server/services/jwt.py
deleted file mode 100644
index c3aed9445ce..00000000000
--- a/services/api-server/src/simcore_service_api_server/services/jwt.py
+++ /dev/null
@@ -1,66 +0,0 @@
-""" Utility functions related with security
-
-"""
-import os
-from datetime import datetime, timedelta
-from typing import Dict, Optional
-
-import jwt
-from jwt import PyJWTError
-from loguru import logger
-from pydantic import ValidationError
-
-from ..models.schemas.tokens import TokenData
-
-# JSON WEB TOKENS (JWT) --------------------------------------------------------------
-
-__SIGNING_KEY__ = os.environ.get("SECRET_KEY")
-__ALGORITHM__ = "HS256"
-ACCESS_TOKEN_EXPIRE_MINUTES = 30
-
-
-def create_access_token(
- data: TokenData, *, expires_in_mins: Optional[int] = ACCESS_TOKEN_EXPIRE_MINUTES
-) -> str:
- """
- To disable expiration, set 'expires_in_mins' to None
- """
- # JWT specs define "Claim Names" for the encoded payload
- # SEE https://tools.ietf.org/html/rfc7519#section-4
- payload = {
- "sub": data.user_id,
- "scopes": data.scopes or [],
- }
-
- if expires_in_mins is not None:
- exp = datetime.utcnow() + timedelta(minutes=expires_in_mins)
- payload["exp"] = exp
-
- encoded_jwt = jwt.encode(payload, __SIGNING_KEY__, algorithm=__ALGORITHM__)
- return encoded_jwt
-
-
-def get_access_token_data(encoded_jwt: str) -> Optional[TokenData]:
- """
- Decodes and validates JWT and returns TokenData
- Returns None, if invalid token
- """
- try:
- # decode JWT [header.payload.signature] and get payload:
- payload: Dict = jwt.decode(
- encoded_jwt, __SIGNING_KEY__, algorithms=[__ALGORITHM__]
- )
-
- token_data = TokenData(
- user_id=payload.get("sub"), token_scopes=payload.get("scopes", [])
- )
-
- except PyJWTError:
- logger.debug("Invalid token", exc_info=True)
- return None
-
- except ValidationError:
- logger.warning("Token data corrupted? Check payload -> TokenData conversion")
- return None
-
- return token_data
diff --git a/services/api-server/src/simcore_service_api_server/services/remote_debug.py b/services/api-server/src/simcore_service_api_server/services/remote_debug.py
index 67b9b279d44..a9568a73b12 100644
--- a/services/api-server/src/simcore_service_api_server/services/remote_debug.py
+++ b/services/api-server/src/simcore_service_api_server/services/remote_debug.py
@@ -2,9 +2,10 @@
"""
+import logging
import os
-from loguru import logger
+logger = logging.getLogger(__name__)
REMOTE_DEBUG_PORT = 3000
@@ -30,9 +31,11 @@ def setup_remote_debugging(force_enabled=False, *, boot_mode=None):
"Cannot enable remote debugging. Please install ptvsd first"
)
- logger.info(f"Remote debugging enabled: listening port {REMOTE_DEBUG_PORT}")
+ logger.info("Remote debugging enabled: listening port %s", REMOTE_DEBUG_PORT)
else:
- logger.debug(f"Booting without remote debugging since SC_BOOT_MODE={boot_mode}")
+ logger.debug(
+ "Booting without remote debugging since SC_BOOT_MODE=%s", boot_mode
+ )
__all__ = ["setup_remote_debugging"]
diff --git a/services/api-server/src/simcore_service_api_server/services/security.py b/services/api-server/src/simcore_service_api_server/services/security.py
deleted file mode 100644
index 2dea4396d82..00000000000
--- a/services/api-server/src/simcore_service_api_server/services/security.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import subprocess # nosec
-from subprocess import CalledProcessError, CompletedProcess # nosec
-
-from passlib.context import CryptContext
-
-__pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
-
-
-def verify_password(plain_password: str, hashed_password: str) -> bool:
- return __pwd_context.verify(plain_password, hashed_password)
-
-
-def get_password_hash(password: str) -> str:
- return __pwd_context.hash(password)
-
-
-def create_secret_key() -> str:
- # NOTICE that this key is reset when server is restarted!
- try:
- proc: CompletedProcess = subprocess.run( # nosec
- "openssl rand -hex 32", check=True, shell=True
- )
- except (CalledProcessError, FileNotFoundError) as why:
- raise ValueError("Cannot create secret key") from why
- return str(proc.stdout).strip()
diff --git a/services/api-server/src/simcore_service_api_server/services/serialization.py b/services/api-server/src/simcore_service_api_server/services/serialization.py
index b4266fbf491..4848851bd08 100644
--- a/services/api-server/src/simcore_service_api_server/services/serialization.py
+++ b/services/api-server/src/simcore_service_api_server/services/serialization.py
@@ -3,10 +3,6 @@
from typing import Dict
-def to_bool(s: str) -> bool:
- return s.lower() in ["true", "1", "yes"]
-
-
def _jsoncoverter(obj):
if isinstance(obj, datetime):
return obj.__str__()
diff --git a/services/api-server/src/simcore_service_api_server/services/webserver.py b/services/api-server/src/simcore_service_api_server/services/webserver.py
index 74631f5a33a..6d53909fdc8 100644
--- a/services/api-server/src/simcore_service_api_server/services/webserver.py
+++ b/services/api-server/src/simcore_service_api_server/services/webserver.py
@@ -1,12 +1,18 @@
import base64
+import json
+import logging
+from typing import Dict, Optional
+import attr
from cryptography import fernet
-from fastapi import FastAPI
-from httpx import AsyncClient
-from loguru import logger
+from fastapi import FastAPI, HTTPException
+from httpx import AsyncClient, Response, StatusCode
+from starlette import status
from ..core.settings import WebServerSettings
+logger = logging.getLogger(__name__)
+
def _get_secret_key(settings: WebServerSettings):
secret_key_bytes = settings.session_secret_key.get_secret_value().encode("utf-8")
@@ -29,7 +35,7 @@ def setup_webserver(app: FastAPI) -> None:
app.state.webserver_fernet = fernet.Fernet(secret_key)
# init client
- logger.debug(f"Setup webserver at {settings.base_url}...")
+ logger.debug("Setup webserver at %s...", settings.base_url)
client = AsyncClient(base_url=settings.base_url)
app.state.webserver_client = client
@@ -48,5 +54,79 @@ async def close_webserver(app: FastAPI) -> None:
logger.debug("Webserver closed successfully")
-def get_webserver_client(app: FastAPI) -> AsyncClient:
- return app.state.webserver_client
+@attr.s(auto_attribs=True)
+class AuthSession:
+ """
+ - wrapper around thin-client to simplify webserver's API
+ - sets endspoint upon construction
+ - MIME type: application/json
+ - processes responses, returning data or raising formatted HTTP exception
+ - The lifetime of an AuthSession is ONE request.
+
+ SEE services/api-server/src/simcore_service_api_server/api/dependencies/webserver.py
+ """
+
+ client: AsyncClient # Its lifetime is attached to app
+ vtag: str
+ session_cookies: Dict = None
+
+ @classmethod
+ def create(cls, app: FastAPI, session_cookies: Dict):
+ return cls(
+ client=app.state.webserver_client,
+ vtag=app.state.settings.webserver.vtag,
+ session_cookies=session_cookies,
+ )
+
+ def _url(self, path: str) -> str:
+ return f"/{self.vtag}/{path.lstrip('/')}"
+
+ @classmethod
+ def _process(cls, resp: Response) -> Optional[Dict]:
+ # enveloped answer
+ data, error = None, None
+ try:
+ body = resp.json()
+ data, error = body["data"], body["error"]
+ except (json.JSONDecodeError, KeyError):
+ logger.warning("Failed to unenvelop webserver response", exc_info=True)
+
+ if StatusCode.is_server_error(resp.status_code):
+ logger.error(
+ "webserver error %d [%s]: %s",
+ resp.status_code,
+ resp.reason_phrase,
+ error,
+ )
+ raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE)
+
+ if StatusCode.is_client_error(resp.status_code):
+ msg = error or resp.reason_phrase
+ raise HTTPException(resp.status_code, detail=msg)
+
+ return data
+
+ # OPERATIONS
+ # TODO: refactor and code below
+ # TODO: policy to retry if NetworkError/timeout?
+ # TODO: add ping to healthcheck
+
+ async def get(self, path: str) -> Optional[Dict]:
+ url = self._url(path)
+ try:
+ resp = await self.client.get(url, cookies=self.session_cookies)
+ except Exception:
+ logger.exception("Failed to get %s", url)
+ raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE)
+
+ return self._process(resp)
+
+ async def put(self, path: str, body: Dict) -> Optional[Dict]:
+ url = self._url(path)
+ try:
+ resp = await self.client.put(url, json=body, cookies=self.session_cookies)
+ except Exception:
+ logger.exception("Failed to put %s", url)
+ raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE)
+
+ return self._process(resp)
diff --git a/services/api-server/tests/unit/_helpers.py b/services/api-server/tests/unit/_helpers.py
index 2ba444489a6..6375d331b16 100644
--- a/services/api-server/tests/unit/_helpers.py
+++ b/services/api-server/tests/unit/_helpers.py
@@ -6,7 +6,7 @@
from aiopg.sa.result import RowProxy
import simcore_service_api_server.db.tables as orm
-from simcore_service_api_server.db.repositories.base import BaseRepository
+from simcore_service_api_server.db.repositories import BaseRepository
from simcore_service_api_server.db.repositories.users import UsersRepository
from simcore_service_api_server.models.domain.api_keys import ApiKeyInDB
diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py
index b69ea0e2666..c2bce8e28c0 100644
--- a/services/api-server/tests/unit/conftest.py
+++ b/services/api-server/tests/unit/conftest.py
@@ -35,14 +35,11 @@ def environment() -> Dict:
env = {
"WEBSERVER_HOST": "webserver",
"WEBSERVER_SESSION_SECRET_KEY": "REPLACE ME with a key of at least length 32.",
-
"POSTGRES_HOST": "localhost",
"POSTGRES_USER": "test",
"POSTGRES_PASSWORD": "test",
"POSTGRES_DB": "test",
-
"LOG_LEVEL": "debug",
-
"SC_BOOT_MODE": "production",
}
return env
@@ -162,7 +159,7 @@ def is_postgres_responsive() -> bool:
return config
-@pytest.fixture("session")
+@pytest.fixture(scope="session")
def make_engine(postgres_service: Dict) -> Callable:
dsn = postgres_service["dsn"] # session scope freezes dsn
diff --git a/services/api-server/tests/unit/test_api_meta.py b/services/api-server/tests/unit/test_api_meta.py
index d8df761a822..0e966a5fb81 100644
--- a/services/api-server/tests/unit/test_api_meta.py
+++ b/services/api-server/tests/unit/test_api_meta.py
@@ -1,7 +1,6 @@
# pylint: disable=unused-variable
# pylint: disable=unused-argument
# pylint: disable=redefined-outer-name
-# from fastapi.testclient import TestClient
from httpx import AsyncClient
diff --git a/services/api-server/tests/unit/test_jwt.py b/services/api-server/tests/unit/test_jwt.py
deleted file mode 100644
index 5f13d17d4b4..00000000000
--- a/services/api-server/tests/unit/test_jwt.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# pylint: disable=unused-variable
-# pylint: disable=unused-argument
-# pylint: disable=redefined-outer-name
-
-import importlib
-
-import pytest
-
-from simcore_service_api_server.models.schemas.tokens import TokenData
-from simcore_service_api_server.services.jwt import (
- create_access_token,
- get_access_token_data,
-)
-
-
-@pytest.fixture()
-def mock_secret_key(monkeypatch):
- monkeypatch.setenv("SECRET_KEY", "your-256-bit-secret")
-
- import simcore_service_api_server.services.jwt
-
- importlib.reload(simcore_service_api_server.services.jwt)
-
-
-def test_access_token_data(mock_secret_key):
-
- data = TokenData(user_id=33, scopes=[])
- jwt = create_access_token(data, expires_in_mins=None)
-
- # checks jwt against https://jwt.io/#debugger-io
- # assert jwt == b"ey ...
-
- received_data = get_access_token_data(jwt)
-
- assert data == received_data
diff --git a/services/api-server/tests/unit/test_security.py b/services/api-server/tests/unit/test_security.py
deleted file mode 100644
index 325b001b963..00000000000
--- a/services/api-server/tests/unit/test_security.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# pylint: disable=unused-variable
-# pylint: disable=unused-argument
-# pylint: disable=redefined-outer-name
-
-from simcore_service_api_server.services.security import (
- get_password_hash,
- verify_password,
-)
-
-
-def test_has_password():
- hashed_pass = get_password_hash("secret")
- assert hashed_pass != "secret"
- assert verify_password("secret", hashed_pass)
diff --git a/services/api-server/tests/unit/test_settings.py b/services/api-server/tests/unit/test_settings.py
index 8263d46ae03..cfe542f36c4 100644
--- a/services/api-server/tests/unit/test_settings.py
+++ b/services/api-server/tests/unit/test_settings.py
@@ -14,7 +14,9 @@
# bring .env-devel in here
def test_min_environ_for_settings(monkeypatch):
monkeypatch.setenv("WEBSERVER_HOST", "production_webserver")
- monkeypatch.setenv("WEBSERVER_SESSION_SECRET_KEY", "REPLACE ME with a key of at least length 32.")
+ monkeypatch.setenv(
+ "WEBSERVER_SESSION_SECRET_KEY", "REPLACE ME with a key of at least length 32."
+ )
monkeypatch.setenv("POSTGRES_HOST", "production_postgres")
monkeypatch.setenv("POSTGRES_USER", "test")
From 95df8daeb02a895b867d2d618ca30479905db583 Mon Sep 17 00:00:00 2001
From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com>
Date: Fri, 26 Jun 2020 16:32:34 +0200
Subject: [PATCH 12/43] Cleanup catalog service (#1582)
Cleanup of catalog service before adding more functionality
* Major refactoring
- api/dependencies: dependencies injected in handlers
- api/routes: endpoint handlers and routing
- api/root.py: join all openapi-specs into a single router
- models: domain (orm and business logic models) and schema (i/o schemas for openapi) models
- db: database tables (i.e. sa schemas for tables) and repositories (crud layer between handlers and db calls)
- core: init application and settings (parses values by args, environs, .env or default, in this order)
- services: modules/plugins with logic for the app
- __main__,__version__ : main entrypoint and version
---
ci/github/unit-testing/catalog.bash | 2 +-
ci/helpers/ensure_python_pip.bash | 2 +-
ci/travis/unit-testing/catalog.bash | 2 +-
packages/postgres-database/docker/Dockerfile | 2 +-
.../src/pytest_simcore/postgres_service2.py | 148 ++++
requirements.txt | 1 +
scripts/openapi/oas_resolver/Dockerfile | 2 +-
services/api-server/Dockerfile | 9 +-
services/api-server/Makefile | 48 +-
services/api-server/openapi.json | 2 +-
services/catalog/.cookiecutterrc | 23 -
services/catalog/.env-devel | 13 +
services/catalog/.gitignore | 2 +
services/catalog/Dockerfile | 118 +--
services/catalog/Makefile | 67 +-
services/catalog/README.md | 49 +-
services/catalog/docker/boot.sh | 37 +-
services/catalog/docker/entrypoint.sh | 87 ++-
services/catalog/openapi.json | 715 ++++++++++++++++++
services/catalog/requirements/_base.in | 13 +-
services/catalog/requirements/_base.txt | 44 +-
services/catalog/requirements/_test.in | 5 +
services/catalog/requirements/_test.txt | 86 ++-
services/catalog/requirements/dev.txt | 2 +-
services/catalog/setup.cfg | 4 -
.../src/simcore_service_catalog/__main__.py | 24 +-
.../simcore_service_catalog/__version__.py | 2 +-
.../{endpoints => api}/__init__.py | 0
.../{schemas => api/dependencies}/__init__.py | 0
.../api/dependencies/database.py | 21 +
.../src/simcore_service_catalog/api/root.py | 10 +
.../{utils => api/routes}/__init__.py | 0
.../{endpoints => api/routes}/dags.py | 59 +-
.../api/routes/health.py | 8 +
.../api/routes/meta.py | 15 +
.../api/v0/openapi.yaml | 452 -----------
.../src/simcore_service_catalog/config.py | 56 --
.../simcore_service_catalog/core/__init__.py | 0
.../core/application.py | 57 ++
.../simcore_service_catalog/core/errors.py | 0
.../simcore_service_catalog/core/events.py | 38 +
.../simcore_service_catalog/core/settings.py | 84 ++
.../catalog/src/simcore_service_catalog/db.py | 60 --
.../simcore_service_catalog/db/__init__.py | 0
.../src/simcore_service_catalog/db/errors.py | 0
.../src/simcore_service_catalog/db/events.py | 56 ++
.../db/repositories/__init__.py | 1 +
.../db/repositories/_base.py | 15 +
.../db/repositories/dags.py | 60 ++
.../{orm.py => db/tables.py} | 4 +-
.../endpoints/diagnostics.py | 17 -
.../src/simcore_service_catalog/main.py | 78 --
.../models/__init__.py | 0
.../models/domain/__init__.py | 0
.../schemas_dags.py => models/domain/dag.py} | 14 +-
.../{schemas => models/domain}/project.py | 0
.../models/schemas/__init__.py | 0
.../models/schemas/dag.py | 20 +
.../models/schemas/meta.py | 27 +
.../services/__init__.py | 0
.../{utils => services}/remote_debug.py | 19 +-
.../simcore_service_catalog/store/__init__.py | 3 -
.../store/crud_dags.py | 60 --
.../simcore_service_catalog/utils/helpers.py | 2 -
services/catalog/tests/unit/conftest.py | 12 +-
services/catalog/tests/unit/test_package.py | 21 +-
services/catalog/tests/unit/test_schemas.py | 40 +-
.../catalog/tests/unit/with_dbs/conftest.py | 76 +-
.../unit/with_dbs/test_entrypoint_dags.py | 25 +-
services/director/Dockerfile | 4 +-
services/docker-compose.devel.yml | 7 +-
services/docker-compose.local.yml | 2 -
services/docker-compose.yml | 3 +-
services/sidecar/Dockerfile | 4 +-
services/storage/Dockerfile | 2 +-
services/web/Dockerfile | 4 +-
76 files changed, 1755 insertions(+), 1190 deletions(-)
create mode 100644 packages/pytest-simcore/src/pytest_simcore/postgres_service2.py
delete mode 100644 services/catalog/.cookiecutterrc
create mode 100644 services/catalog/.env-devel
create mode 100644 services/catalog/.gitignore
create mode 100644 services/catalog/openapi.json
rename services/catalog/src/simcore_service_catalog/{endpoints => api}/__init__.py (100%)
rename services/catalog/src/simcore_service_catalog/{schemas => api/dependencies}/__init__.py (100%)
create mode 100644 services/catalog/src/simcore_service_catalog/api/dependencies/database.py
create mode 100644 services/catalog/src/simcore_service_catalog/api/root.py
rename services/catalog/src/simcore_service_catalog/{utils => api/routes}/__init__.py (100%)
rename services/catalog/src/simcore_service_catalog/{endpoints => api/routes}/dags.py (68%)
create mode 100644 services/catalog/src/simcore_service_catalog/api/routes/health.py
create mode 100644 services/catalog/src/simcore_service_catalog/api/routes/meta.py
delete mode 100644 services/catalog/src/simcore_service_catalog/api/v0/openapi.yaml
delete mode 100644 services/catalog/src/simcore_service_catalog/config.py
create mode 100644 services/catalog/src/simcore_service_catalog/core/__init__.py
create mode 100644 services/catalog/src/simcore_service_catalog/core/application.py
create mode 100644 services/catalog/src/simcore_service_catalog/core/errors.py
create mode 100644 services/catalog/src/simcore_service_catalog/core/events.py
create mode 100644 services/catalog/src/simcore_service_catalog/core/settings.py
delete mode 100644 services/catalog/src/simcore_service_catalog/db.py
create mode 100644 services/catalog/src/simcore_service_catalog/db/__init__.py
create mode 100644 services/catalog/src/simcore_service_catalog/db/errors.py
create mode 100644 services/catalog/src/simcore_service_catalog/db/events.py
create mode 100644 services/catalog/src/simcore_service_catalog/db/repositories/__init__.py
create mode 100644 services/catalog/src/simcore_service_catalog/db/repositories/_base.py
create mode 100644 services/catalog/src/simcore_service_catalog/db/repositories/dags.py
rename services/catalog/src/simcore_service_catalog/{orm.py => db/tables.py} (62%)
delete mode 100644 services/catalog/src/simcore_service_catalog/endpoints/diagnostics.py
delete mode 100644 services/catalog/src/simcore_service_catalog/main.py
create mode 100644 services/catalog/src/simcore_service_catalog/models/__init__.py
create mode 100644 services/catalog/src/simcore_service_catalog/models/domain/__init__.py
rename services/catalog/src/simcore_service_catalog/{schemas/schemas_dags.py => models/domain/dag.py} (71%)
rename services/catalog/src/simcore_service_catalog/{schemas => models/domain}/project.py (100%)
create mode 100644 services/catalog/src/simcore_service_catalog/models/schemas/__init__.py
create mode 100644 services/catalog/src/simcore_service_catalog/models/schemas/dag.py
create mode 100644 services/catalog/src/simcore_service_catalog/models/schemas/meta.py
create mode 100644 services/catalog/src/simcore_service_catalog/services/__init__.py
rename services/catalog/src/simcore_service_catalog/{utils => services}/remote_debug.py (58%)
delete mode 100644 services/catalog/src/simcore_service_catalog/store/__init__.py
delete mode 100644 services/catalog/src/simcore_service_catalog/store/crud_dags.py
delete mode 100644 services/catalog/src/simcore_service_catalog/utils/helpers.py
diff --git a/ci/github/unit-testing/catalog.bash b/ci/github/unit-testing/catalog.bash
index 72c3530c91d..c1004dea7dd 100755
--- a/ci/github/unit-testing/catalog.bash
+++ b/ci/github/unit-testing/catalog.bash
@@ -12,7 +12,7 @@ install() {
test() {
pytest --cov=simcore_service_catalog --durations=10 --cov-append \
--color=yes --cov-report=term-missing --cov-report=xml \
- -v -m "not travis" services/catalog/tests
+ -v -m "not travis" services/catalog/tests/unit
}
# Check if the function exists (bash specific)
diff --git a/ci/helpers/ensure_python_pip.bash b/ci/helpers/ensure_python_pip.bash
index ea5c9111411..13f7f9bd5ff 100755
--- a/ci/helpers/ensure_python_pip.bash
+++ b/ci/helpers/ensure_python_pip.bash
@@ -9,7 +9,7 @@ set -euo pipefail
IFS=$'\n\t'
# Pin pip version to a compatible release https://www.python.org/dev/peps/pep-0440/#compatible-release
-PIP_VERSION=19.3.1
+PIP_VERSION=20.1.1
echo "INFO:" "$(python --version)" "@" "$(command -v python)"
diff --git a/ci/travis/unit-testing/catalog.bash b/ci/travis/unit-testing/catalog.bash
index e4d91aa3f61..0f8cf8c9ca0 100755
--- a/ci/travis/unit-testing/catalog.bash
+++ b/ci/travis/unit-testing/catalog.bash
@@ -34,7 +34,7 @@ script() {
then
pytest --cov=simcore_service_catalog --durations=10 --cov-append \
--color=yes --cov-report=term-missing --cov-report=xml \
- -v -m "not travis" services/catalog/tests
+ -v -m "not travis" services/catalog/tests/unit
else
echo "No changes detected. Skipping unit-testing of catalog."
fi
diff --git a/packages/postgres-database/docker/Dockerfile b/packages/postgres-database/docker/Dockerfile
index 9bcd9b37935..a03ad77c13e 100644
--- a/packages/postgres-database/docker/Dockerfile
+++ b/packages/postgres-database/docker/Dockerfile
@@ -23,7 +23,7 @@ RUN apt-get update &&\
RUN python -m venv ${VIRTUAL_ENV}
RUN pip --no-cache-dir install --upgrade \
- pip~=20.0.2 \
+ pip~=20.1.1 \
wheel \
setuptools
diff --git a/packages/pytest-simcore/src/pytest_simcore/postgres_service2.py b/packages/pytest-simcore/src/pytest_simcore/postgres_service2.py
new file mode 100644
index 00000000000..fc4b593b7b5
--- /dev/null
+++ b/packages/pytest-simcore/src/pytest_simcore/postgres_service2.py
@@ -0,0 +1,148 @@
+"""
+ sets up a docker-compose
+
+IMPORTANT: incompatible with pytest_simcore.docker_compose and pytest_simcore.postgres_service
+
+"""
+# pylint:disable=unused-variable
+# pylint:disable=unused-argument
+# pylint:disable=redefined-outer-name
+
+import os
+import shutil
+import subprocess
+import sys
+from pathlib import Path
+from typing import Callable, Coroutine, Dict, Union
+
+import aiopg.sa
+import pytest
+import sqlalchemy as sa
+import yaml
+from dotenv import dotenv_values
+
+import simcore_postgres_database.cli as pg_cli
+from simcore_postgres_database.models.base import metadata
+
+current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
+
+
+@pytest.fixture(scope="session")
+def env_devel_file(project_slug_dir: Path) -> Path:
+ # takes as a bas
+ env_devel_path = project_slug_dir / ".env-devel"
+ assert env_devel_path.exists()
+ return env_devel_path
+
+
+@pytest.fixture(scope="session")
+def test_environment(env_devel_file: Path) -> Dict[str, str]:
+ env = dotenv_values(env_devel_file, verbose=True, interpolate=True)
+ return env
+
+
+@pytest.fixture(scope="session")
+def test_docker_compose_file(pytestconfig) -> Path:
+ """Get an absolute path to the `docker-compose.yml` file.
+ Override this fixture in your tests if you need a custom location.
+ """
+ return os.path.join(str(pytestconfig.rootdir), "tests", "docker-compose.yml")
+
+
+@pytest.fixture(scope="session")
+def docker_compose_file(test_environment: Dict[str, str], tmpdir_factory, test_docker_compose_file) -> Path:
+ # Overrides fixture in https://github.com/avast/pytest-docker
+
+ environ = dict(
+ os.environ
+ ) # NOTE: do not forget to add the current environ here, otherwise docker-compose fails
+ environ.update(test_environment)
+
+ # assumes prototype in cwd
+ src_path = test_docker_compose_file
+ assert src_path.exists, f"Expected prototype at cwd, i.e. {src_path.resolve()}"
+
+ dst_path = Path(
+ str(
+ tmpdir_factory.mktemp("docker_compose_file_fixture").join(
+ "docker-compose.yml"
+ )
+ )
+ )
+
+ shutil.copy(src_path, dst_path.parent)
+ assert dst_path.exists()
+
+ # configs
+ subprocess.run(
+ f'docker-compose --file "{src_path}" config > "{dst_path}"',
+ shell=True,
+ check=True,
+ env=environ,
+ )
+
+ return dst_path
+
+
+@pytest.fixture(scope="session")
+def postgres_service2(docker_services, docker_ip, docker_compose_file: Path) -> Dict:
+
+ # check docker-compose's environ is resolved properly
+ config = yaml.safe_load(docker_compose_file.read_text())
+ environ = config["services"]["postgres"]["environment"]
+
+ # builds DSN
+ config = dict(
+ user=environ["POSTGRES_USER"],
+ password=environ["POSTGRES_PASSWORD"],
+ host=docker_ip,
+ port=docker_services.port_for("postgres", 5432),
+ database=environ["POSTGRES_DB"],
+ )
+
+ dsn = "postgresql://{user}:{password}@{host}:{port}/{database}".format(**config)
+
+ def _create_checker() -> Callable:
+ def is_postgres_responsive() -> bool:
+ try:
+ engine = sa.create_engine(dsn)
+ conn = engine.connect()
+ conn.close()
+ except sa.exc.OperationalError:
+ return False
+ return True
+
+ return is_postgres_responsive
+
+ # Wait until service is responsive.
+ docker_services.wait_until_responsive(
+ check=_create_checker(), timeout=30.0, pause=0.1,
+ )
+
+ config["dsn"] = dsn
+ return config
+
+
+@pytest.fixture(scope="session")
+def make_engine(postgres_service2: Dict) -> Callable:
+ dsn = postgres_service2["dsn"] # session scope freezes dsn
+
+ def maker(is_async=True) -> Union[Coroutine, Callable]:
+ return aiopg.sa.create_engine(dsn) if is_async else sa.create_engine(dsn)
+
+ return maker
+
+
+@pytest.fixture
+def apply_migration(postgres_service2: Dict, make_engine) -> None:
+ kwargs = postgres_service2.copy()
+ kwargs.pop("dsn")
+ pg_cli.discover.callback(**kwargs)
+ pg_cli.upgrade.callback("head")
+ yield
+ pg_cli.downgrade.callback("base")
+ pg_cli.clean.callback()
+
+ # FIXME: deletes all because downgrade is not reliable!
+ engine = make_engine(False)
+ metadata.drop_all(engine)
diff --git a/requirements.txt b/requirements.txt
index 26e833fe8f0..305907057c6 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -15,6 +15,7 @@
# formatter
black
+isort
# dependency manager
pip-tools
# version manager
diff --git a/scripts/openapi/oas_resolver/Dockerfile b/scripts/openapi/oas_resolver/Dockerfile
index 58420fbffed..6c0bda718ee 100644
--- a/scripts/openapi/oas_resolver/Dockerfile
+++ b/scripts/openapi/oas_resolver/Dockerfile
@@ -12,7 +12,7 @@ WORKDIR /src
# update pip
RUN pip install --no-cache-dir --upgrade \
- pip~=20.0.2 \
+ pip~=20.1.1 \
wheel \
setuptools
diff --git a/services/api-server/Dockerfile b/services/api-server/Dockerfile
index 172ae927177..9cbe8cf8873 100644
--- a/services/api-server/Dockerfile
+++ b/services/api-server/Dockerfile
@@ -1,5 +1,5 @@
ARG PYTHON_VERSION="3.6.10"
-FROM python:${PYTHON_VERSION}-slim as base
+FROM python:${PYTHON_VERSION}-slim-buster as base
#
# USAGE:
# cd sercices/api-server
@@ -43,7 +43,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
# those from our virtualenv.
ENV PATH="${VIRTUAL_ENV}/bin:$PATH"
-EXPOSE 8001
+EXPOSE 8000
EXPOSE 3000
# -------------------------- Build stage -------------------
@@ -64,7 +64,7 @@ RUN apt-get update &&\
RUN python -m venv ${VIRTUAL_ENV}
RUN pip install --upgrade --no-cache-dir \
- pip~=20.0.2 \
+ pip~=20.1.1 \
wheel \
setuptools
@@ -111,7 +111,10 @@ ENV PYTHONOPTIMIZE=TRUE
WORKDIR /home/scu
+# Starting from clean base image, copies pre-installed virtualenv from cache
COPY --chown=scu:scu --from=cache ${VIRTUAL_ENV} ${VIRTUAL_ENV}
+
+# Copies booting scripts
COPY --chown=scu:scu services/api-server/docker services/api-server/docker
RUN chmod +x services/api-server/docker/*.sh
diff --git a/services/api-server/Makefile b/services/api-server/Makefile
index d85c03a936f..88e1ecd1891 100644
--- a/services/api-server/Makefile
+++ b/services/api-server/Makefile
@@ -4,10 +4,13 @@
include ../../scripts/common.Makefile
# Custom variables
-APP_NAME := $(notdir $(CURDIR))
-APP_CLI_NAME := simcore-service-$(APP_NAME)
-export APP_VERSION = $(shell cat VERSION)
-SRC_DIR := $(abspath $(CURDIR)/src/$(subst -,_,$(APP_CLI_NAME)))
+APP_NAME := $(notdir $(CURDIR))
+APP_CLI_NAME := simcore-service-$(APP_NAME)
+APP_PACKAGE_NAME := $(subst -,_,$(APP_CLI_NAME))
+APP_VERSION := $(shell cat VERSION)
+SRC_DIR := $(abspath $(CURDIR)/src/$(APP_PACKAGE_NAME))
+
+export APP_VERSION
.PHONY: reqs
reqs: ## compiles pip requirements (.in -> .txt)
@@ -71,18 +74,32 @@ down: docker-compose.yml ## stops pg fixture
# killing any process using port 8000
-@fuser --kill --verbose --namespace tcp 8000
-######################
-.PHONY: build
-build: ## builds docker image (using main services/docker-compose-build.yml)
- @$(MAKE_C) ${REPO_BASE_DIR} target=${APP_NAME} $@
+# BUILD ########
+
+
+.PHONY: build build-nc build-devel build-devel-nc build-cache build-cache-nc
+build build-nc build-devel build-devel-nc build-cache build-cache-nc: ## builds docker image (using main services/docker-compose-build.yml)
+ @$(MAKE_C) ${REPO_BASE_DIR} $@ target=${APP_NAME}
+
+.PHONY: openapi-specs openapi.json
+openapi-specs: openapi.json
+openapi.json: .env
+ # generating openapi specs file
+ python3 -c "import json; from $(APP_PACKAGE_NAME).__main__ import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@
-# GENERATION python client -------------------------------------------------
+
+# GENERATION python client ########
.PHONY: python-client generator-help
+
# SEE https://openapi-generator.tech/docs/usage#generate
# SEE https://openapi-generator.tech/docs/generators/python
+#
+# TODO: put instead to additional-props.yaml and --config=openapi-generator/python-config.yaml
+# TODO: copy this code to https://github.com/ITISFoundation/osparc-simcore-python-client/blob/master/Makefile
+#
# NOTE: assumes this repo exists
GIT_USER_ID := ITISFoundation
@@ -91,7 +108,6 @@ GIT_REPO_ID := osparc-simcore-python-client
SCRIPTS_DIR := $(abspath $(CURDIR)/../../scripts)
GENERATOR_NAME := python
-# TODO: put instead to additional-props.yaml and --config=openapi-generator/python-config.yaml
ADDITIONAL_PROPS := \
generateSourceCodeOnly=false\
hideGenerationTimestamp=true\
@@ -106,21 +122,13 @@ null :=
space := $(null) #
comma := ,
-# TODO: fix this, shall be generated upon start when flag is provided
-
-
-
-# TODO: code_samples still added by hand!
client:
# cloning $(GIT_USER_ID)/$(GIT_REPO_ID) -> $@
git clone git@github.com:$(GIT_USER_ID)/$(GIT_REPO_ID).git $@
cd client; git checkout -b "upgrade-${APP_VERSION}"
-python-client: client ## runs python client generator
- # download openapi.json
- curl -O http://localhost:8000/api/v0/openapi.json
-
+python-client: client openapi.json ## runs python client generator
cd $(CURDIR); \
$(SCRIPTS_DIR)/openapi-generator-cli.bash generate \
--generator-name=$(GENERATOR_NAME) \
@@ -134,8 +142,6 @@ python-client: client ## runs python client generator
--release-note="Updated to $(APP_VERSION)"
-
-
generator-help: ## help on client-api generator
# generate help
@$(SCRIPTS_DIR)/openapi-generator-cli.bash help generate
diff --git a/services/api-server/openapi.json b/services/api-server/openapi.json
index cc38013fa9e..c22bbf0a345 100644
--- a/services/api-server/openapi.json
+++ b/services/api-server/openapi.json
@@ -2,7 +2,7 @@
"openapi": "3.0.2",
"info": {
"title": "Public API Server",
- "description": "**osparc-simcore Public RESTful API Specifications**\n## Python Client\n- Github [repo](https://github.com/ITISFoundation/osparc-simcore-python-client)\n- Quick install: ``pip install git+https://github.com/ITISFoundation/osparc-simcore-python-client.git``\n",
+ "description": "**osparc-simcore Public RESTful API Specifications**\n## Python Library\n- Check the [documentation](https://itisfoundation.github.io/osparc-simcore-python-client)\n- Quick install: ``pip install git+https://github.com/ITISFoundation/osparc-simcore-python-client.git``\n",
"version": "0.3.0",
"x-logo": {
"url": "https://raw.githubusercontent.com/ITISFoundation/osparc-manual/b809d93619512eb60c827b7e769c6145758378d0/_media/osparc-logo.svg",
diff --git a/services/catalog/.cookiecutterrc b/services/catalog/.cookiecutterrc
deleted file mode 100644
index 4a1cb9d42fa..00000000000
--- a/services/catalog/.cookiecutterrc
+++ /dev/null
@@ -1,23 +0,0 @@
-# This file exists so you can easily regenerate your project.
-#
-# cookiecutter --overwrite-if-exists --config-file=.cookiecutterrc ../cookiecutter-simcore-py-fastapi/
-#
-
-default_context:
-
- _extensions: ['jinja2_time.TimeExtension']
- _template: '../cookiecutter-simcore-py-fastapi/'
- command_line_interface_bin_name: 'simcore-service-catalog'
- distribution_name: 'simcore-service-catalog'
- dockercompose_service_api_port: '8000'
- dockercompose_service_name: 'catalog'
- enable_aiohttp_swagger: 'false'
- full_name: 'Pedro Crespo'
- github_username: 'pcrespov'
- openapi_specs_version: 'v0'
- package_name: 'simcore_service_catalog'
- project_name: 'Components Catalog Service'
- project_short_description: 'Manages and maintains a catalog of all published components (e.g. macro-algorithms, scripts, etc)'
- project_slug: 'catalog'
- version: '0.3.2'
- year: '2020'
diff --git a/services/catalog/.env-devel b/services/catalog/.env-devel
new file mode 100644
index 00000000000..d77e2e84be5
--- /dev/null
+++ b/services/catalog/.env-devel
@@ -0,0 +1,13 @@
+#
+# Environment variables used to configure this service
+#
+
+LOG_LEVEL=DEBUG
+
+POSTGRES_USER=test
+POSTGRES_PASSWORD=test
+POSTGRES_DB=test
+POSTGRES_HOST=localhost
+
+# Enables debug
+SC_BOOT_MODE=debug-ptvsd
diff --git a/services/catalog/.gitignore b/services/catalog/.gitignore
new file mode 100644
index 00000000000..2bb202b8906
--- /dev/null
+++ b/services/catalog/.gitignore
@@ -0,0 +1,2 @@
+docker-compose.yml
+.env
diff --git a/services/catalog/Dockerfile b/services/catalog/Dockerfile
index 48868012a0b..53b48c4180b 100644
--- a/services/catalog/Dockerfile
+++ b/services/catalog/Dockerfile
@@ -1,4 +1,6 @@
-FROM python:3.6.10-alpine3.11 as base
+ARG PYTHON_VERSION="3.6.10"
+FROM python:${PYTHON_VERSION}-slim-buster as base
+#
#
# USAGE:
# cd sercices/catalog
@@ -9,20 +11,42 @@ FROM python:3.6.10-alpine3.11 as base
LABEL maintainer=pcrespov
-RUN adduser -D -u 8004 -s /bin/sh -h /home/scu scu
+RUN set -eux; \
+ apt-get update; \
+ apt-get install -y gosu; \
+ rm -rf /var/lib/apt/lists/*; \
+# verify that the binary works
+ gosu nobody true
+
+# simcore-user uid=8004(scu) gid=8004(scu) groups=8004(scu)
+ENV SC_USER_ID=8004 \
+ SC_USER_NAME=scu \
+ SC_BUILD_TARGET=base \
+ SC_BOOT_MODE=default
+
+RUN adduser \
+ --uid ${SC_USER_ID} \
+ --disabled-password \
+ --gecos "" \
+ --shell /bin/sh \
+ --home /home/${SC_USER_NAME} \
+ ${SC_USER_NAME}
+
-RUN apk add --no-cache \
- su-exec
+# Sets utf-8 encoding for Python et al
+ENV LANG=C.UTF-8
-ENV PATH "/home/scu/.local/bin:$PATH"
+# Turns off writing .pyc files; superfluous on an ephemeral container.
+ENV PYTHONDONTWRITEBYTECODE=1 \
+ VIRTUAL_ENV=/home/scu/.venv
-# NOTE: All SC_ variables are customized
-ENV SC_BUILD_TARGET base
+# Ensures that the python and pip executables used in the image will be
+# those from our virtualenv.
+ENV PATH="${VIRTUAL_ENV}/bin:$PATH"
EXPOSE 8000
EXPOSE 3000
-
# -------------------------- Build stage -------------------
# Installs build/package management tools and third party dependencies
#
@@ -30,36 +54,27 @@ EXPOSE 3000
#
FROM base as build
-ENV SC_BUILD_TARGET build
+ENV SC_BUILD_TARGET=build
-# Installing client libraries and any other package you need
-#
-# libpq: client library for PostgreSQL https://www.postgresql.org/docs/9.5/libpq.html
-# libstdc++: needed in ujson https://github.com/kohlschutter/junixsocket/issues/33
-#
-RUN apk update && \
- apk add --no-cache \
- libpq \
- libstdc++
-
-RUN apk add --no-cache \
- alpine-sdk \
- python3-dev \
- musl-dev \
- postgresql-dev
-
-RUN pip3 --no-cache-dir install --upgrade \
- pip~=20.0.2 \
- wheel \
- setuptools
+RUN apt-get update &&\
+ apt-get install -y --no-install-recommends \
+ build-essential
+
+# NOTE: python virtualenv is used here such that installed
+# packages may be moved to production image easily by copying the venv
+RUN python -m venv ${VIRTUAL_ENV}
+
+RUN pip install --upgrade --no-cache-dir \
+ pip~=20.1.1 \
+ wheel \
+ setuptools
WORKDIR /build
# install base 3rd party dependencies
-COPY services/catalog/requirements/*.txt \
- services/catalog/requirements/
-
-RUN pip3 --no-cache-dir install -r services/catalog/requirements/_base.txt
+# NOTE: copies to /build to avoid overwriting later which would invalidate this layer
+COPY --chown=scu:scu services/catalog/requirements/_base.txt .
+RUN pip --no-cache-dir install -r _base.txt
# --------------------------Cache stage -------------------
@@ -88,28 +103,28 @@ RUN pip3 --no-cache-dir install -r requirements/prod.txt &&\
# + /home/scu $HOME = WORKDIR
# + services/catalog [scu:scu]
#
-FROM cache as production
+FROM base as production
-ENV SC_BUILD_TARGET production
-ENV SC_BOOT_MODE production
+ENV SC_BUILD_TARGET=production \
+ SC_BOOT_MODE=production
+
+ENV PYTHONOPTIMIZE=TRUE
WORKDIR /home/scu
-RUN mkdir -p services/catalog &&\
- chown scu:scu services/catalog &&\
- mv /build/services/catalog/docker services/catalog/docker &&\
- rm -rf /build
+# Starting from clean base image, copies pre-installed virtualenv from cache
+COPY --chown=scu:scu --from=cache ${VIRTUAL_ENV} ${VIRTUAL_ENV}
+
+# Copies booting scripts
+COPY --chown=scu:scu services/catalog/docker services/catalog/docker
+RUN chmod +x services/catalog/docker/*.sh
-RUN apk del --no-cache\
- alpine-sdk \
- python3-dev \
- musl-dev
HEALTHCHECK --interval=30s \
- --timeout=20s \
- --start-period=30s \
- --retries=3 \
- CMD ["python3", "services/catalog/docker/healthcheck.py", "http://localhost:8000/"]
+ --timeout=20s \
+ --start-period=30s \
+ --retries=3 \
+ CMD ["python3", "services/catalog/docker/healthcheck.py", "http://localhost:8000/"]
ENTRYPOINT [ "/bin/sh", "services/catalog/docker/entrypoint.sh" ]
CMD ["/bin/sh", "services/catalog/docker/boot.sh"]
@@ -125,12 +140,11 @@ CMD ["/bin/sh", "services/catalog/docker/boot.sh"]
#
FROM build as development
-ENV SC_BUILD_TARGET development
-ENV SC_BOOT_MODE development
+ENV SC_BUILD_TARGET=development
WORKDIR /devel
-VOLUME /devel/packages
-VOLUME /devel/services/catalog/
+
+RUN chown -R scu:scu ${VIRTUAL_ENV}
ENTRYPOINT ["/bin/sh", "services/catalog/docker/entrypoint.sh"]
CMD ["/bin/sh", "services/catalog/docker/boot.sh"]
diff --git a/services/catalog/Makefile b/services/catalog/Makefile
index 268b99fe573..df3401cae86 100644
--- a/services/catalog/Makefile
+++ b/services/catalog/Makefile
@@ -4,13 +4,16 @@
include ../../scripts/common.Makefile
# Custom variables
-APP_NAME := $(notdir $(CURDIR))
-APP_CLI_NAME := simcore-service-catalog
-export APP_VERSION = $(shell cat VERSION)
+APP_NAME := $(notdir $(CURDIR))
+APP_CLI_NAME := simcore-service-catalog
+APP_PACKAGE_NAME := $(subst -,_,$(APP_CLI_NAME))
+APP_VERSION := $(shell cat VERSION)
+SRC_DIR := $(abspath $(CURDIR)/src/$(APP_PACKAGE_NAME))
+export APP_VERSION
-.PHONY: requirements
-requirements: ## compiles pip requirements (.in -> .txt)
+.PHONY: requirements reqs
+requirements reqs: ## (or reqs) compiles pip requirements (.in -> .txt)
@$(MAKE_C) requirements reqs
@@ -34,12 +37,35 @@ tests-integration: ## runs integration tests against local+production images
pytest -vv --exitfirst --failed-first --durations=10 --pdb $(CURDIR)/tests/integration
-.PHONY: run-devel down-pg up-pg
-run-devel run-prod: up-pg ## runs app with pg service
+# DEVELOPMENT ########
+
+
+.env:
+ cp .env-devel $@
+
+docker-compose.yml:
+ cp $(CURDIR)/tests/unit/with_dbs/docker-compose.yml $@
+
+
+.PHONY: run-devel down up-pg
+
+up-pg: docker-compose.yml down-pg
+ # starting pg database ...
+ docker-compose -f $< up --detach
+
+down-pg: docker-compose.yml ## stops pg fixture
+ # stopping extra services
+ -@docker-compose -f $< down
+
+
+run-devel run-prod: .env up-pg ## runs app with pg service
# starting service ...
ifeq ($(subst run-,,$@),devel)
# development mode (with reload upon change)
- uvicorn simcore_service_catalog.main:app --reload
+ # start app (under $<)
+ uvicorn simcore_service_catalog.__main__:the_app \
+ --reload --reload-dir $(SRC_DIR) \
+ --port=8000 --host=0.0.0.0
else
# production mode
simcore-service-catalog
@@ -47,13 +73,7 @@ endif
# stop
-up-pg: down-pg
- # starting pg database ...
- docker-compose -f $(CURDIR)/tests/unit/with_dbs/docker-compose.yml up --detach
-
-down-pg: ## stops pg fixture
- docker-compose -f $(CURDIR)/tests/unit/with_dbs/docker-compose.yml down
-
+# BUILD #####################
.PHONY: build build-nc build-devel build-devel-nc build-cache build-cache-nc
build build-nc build-devel build-devel-nc build-cache build-cache-nc: ## docker image build in many flavours
@@ -61,15 +81,8 @@ build build-nc build-devel build-devel-nc build-cache build-cache-nc: ## docker
@$(MAKE_C) ${REPO_BASE_DIR} $@ target=${APP_NAME}
-.PHONY: openapi-specs
-openapi-specs: install-dev ## TODO: implementing a way to serialize openapi
- python3 -c "from simcore_service_catalog.main import *; dump_openapi()"
-
-
-.PHONY: replay
-replay: .cookiecutterrc ## re-applies cookiecutter
- # Replaying ../cookiecutter-simcore-py-fastapi/ ...
- @cookiecutter --no-input --overwrite-if-exists \
- --config-file=$< \
- --output-dir="$(abspath $(CURDIR)/..)" \
- "../cookiecutter-simcore-py-fastapi/"
+.PHONY: openapi-specs openapi.json
+openapi-specs: openapi.json
+openapi.json:
+ # generating openapi specs file
+ python3 -c "import json; from $(APP_PACKAGE_NAME).__main__ import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@
diff --git a/services/catalog/README.md b/services/catalog/README.md
index 6f1e91dba5b..29bbe27fb58 100644
--- a/services/catalog/README.md
+++ b/services/catalog/README.md
@@ -7,42 +7,33 @@
Manages and maintains a catalog of all published components (e.g. macro-algorithms, scripts, etc)
+## Development
+
Typical development workflow:
```cmd
+make devenv
+source .venv/bin/activate
+
+cd services/api-service
+make install-dev
+```
-$ cd services/catalog
-$ make help
-Recipes for 'catalog':
-
-devenv build development environment (using main services/docker-compose-build.yml)
-requirements compiles pip requirements (.in -> .txt)
-install-dev install-prod install-ci install app in development/production or CI mode
-tests-unit runs unit tests
-tests-integration runs integration tests against local+production images
-run-devel runs app with pg fixture for development
-down stops pg fixture
-build builds docker image (using main services/docker-compose-build.yml)
-autoformat runs black python formatter on this service's code [https://black.readthedocs.io/en/stable/]
-version-patch commits version with bug fixes not affecting the cookiecuter config
-version-minor commits version with backwards-compatible API addition or changes (i.e. can replay)
-version-major commits version with backwards-INcompatible addition or changes
-replay re-applies cookiecutter
-info displays information
-clean cleans all unversioned files in project and temp files create by this makefile
-help this colorful help
-
-
-$ make devenv
-$ make install-dev
-$ make run-devel
-
-
-$ make tests
-$ make build
+Then
+```cmd
+make run-devel
```
+will start the service in development-mode together with a postgres db initialized with test data. The API can be query using
+- http://127.0.0.1:8000/dev/docs: swagger-UI API doc
+Finally
+```cmd
+make tests
+make build-devel
+make build
+```
+
diff --git a/services/catalog/docker/boot.sh b/services/catalog/docker/boot.sh
index ef94369f3ff..6c011f49bed 100755
--- a/services/catalog/docker/boot.sh
+++ b/services/catalog/docker/boot.sh
@@ -1,37 +1,36 @@
#!/bin/sh
-#
+set -o errexit
+set -o nounset
+
+IFS=$(printf '\n\t')
+
INFO="INFO: [$(basename "$0")] "
# BOOTING application ---------------------------------------------
echo "$INFO" "Booting in ${SC_BOOT_MODE} mode ..."
-echo " User :$(id "$(whoami)")"
-echo " Workdir :$(pwd)"
-
-if [ "${SC_BUILD_TARGET}" = "development" ]
-then
- echo " Environment :"
- printenv | sed 's/=/: /' | sed 's/^/ /' | sort
- #--------------------
+echo "$INFO" "User :$(id "$(whoami)")"
+echo "$INFO" "Workdir : $(pwd)"
- cd /devel/services/catalog || exit
- pip3 --no-cache-dir install --user -r requirements/dev.txt
- cd /devel || exit
-
- #--------------------
- echo "$INFO" " Python :"
+if [ "${SC_BUILD_TARGET}" = "development" ]; then
+ echo "$INFO" "Environment :"
+ printenv | sed 's/=/: /' | sed 's/^/ /' | sort
+ echo "$INFO" "Python :"
python --version | sed 's/^/ /'
command -v python | sed 's/^/ /'
- echo "$INFO" " PIP :"
- pip3 --no-cache-dir list | sed 's/^/ /'
-fi
+ cd services/catalog || exit 1
+ pip --quiet --no-cache-dir install -r requirements/dev.txt
+ cd - || exit 1
+ echo "$INFO" "PIP :"
+ pip list | sed 's/^/ /'
+fi
# RUNNING application ----------------------------------------
if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ]
then
# NOTE: ptvsd is programmatically enabled inside of the service
# this way we can have reload in place as well
- exec uvicorn simcore_service_catalog.main:app --reload --host 0.0.0.0
+ exec uvicorn simcore_service_catalog.__main__:the_app --reload --host 0.0.0.0
else
exec simcore-service-catalog
fi
diff --git a/services/catalog/docker/entrypoint.sh b/services/catalog/docker/entrypoint.sh
index 1aa3bc90a9e..9e734b5db40 100755
--- a/services/catalog/docker/entrypoint.sh
+++ b/services/catalog/docker/entrypoint.sh
@@ -1,6 +1,11 @@
#!/bin/sh
-#
+set -o errexit
+set -o nounset
+
+IFS=$(printf '\n\t')
+
INFO="INFO: [$(basename "$0")] "
+WARNING="WARNING: [$(basename "$0")] "
ERROR="ERROR: [$(basename "$0")] "
# This entrypoint script:
@@ -10,45 +15,61 @@ ERROR="ERROR: [$(basename "$0")] "
# *runs* as non-root user [scu]
#
echo "$INFO" "Entrypoint for stage ${SC_BUILD_TARGET} ..."
-echo " User :$(id "$(whoami)")"
-echo " Workdir :$(pwd)"
-
+echo "$INFO" "User :$(id "$(whoami)")"
+echo "$INFO" "Workdir : $(pwd)"
+echo "$INFO" "User : $(id scu)"
+echo "$INFO" "python : $(command -v python)"
+echo "$INFO" "pip : $(command -v pip)"
-if [ "${SC_BUILD_TARGET}" = "development" ]
-then
- # NOTE: expects docker run ... -v $(pwd):/devel/services/catalog
- DEVEL_MOUNT=/devel/services/catalog
+USERNAME=scu
+GROUPNAME=scu
- stat $DEVEL_MOUNT > /dev/null 2>&1 || \
- (echo "$ERROR" ": You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1) # FIXME: exit does not stop script
+if [ "${SC_BUILD_TARGET}" = "development" ]; then
+ echo "$INFO" "development mode detected..."
+ # NOTE: expects docker run ... -v $(pwd):$DEVEL_MOUNT
+ DEVEL_MOUNT=/devel/services/catalog
+ stat $DEVEL_MOUNT >/dev/null 2>&1 ||
+ (echo "$ERROR" "You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1)
- USERID=$(stat -c %u $DEVEL_MOUNT)
- GROUPID=$(stat -c %g $DEVEL_MOUNT)
- GROUPNAME=$(getent group "${GROUPID}" | cut -d: -f1)
-
- if [ "$USERID" -eq 0 ]
- then
- addgroup scu root
+ echo "$INFO" "setting correct user id/group id..."
+ HOST_USERID=$(stat --format=%u "${DEVEL_MOUNT}")
+ HOST_GROUPID=$(stat --format=%g "${DEVEL_MOUNT}")
+ CONT_GROUPNAME=$(getent group "${HOST_GROUPID}" | cut --delimiter=: --fields=1)
+ if [ "$HOST_USERID" -eq 0 ]; then
+ echo "$WARNING" "Folder mounted owned by root user... adding $SC_USER_NAME to root..."
+ adduser "$SC_USER_NAME" root
+ else
+ echo "$INFO" "Folder mounted owned by user $HOST_USERID:$HOST_GROUPID-'$CONT_GROUPNAME'..."
+ # take host's credentials in $SC_USER_NAME
+ if [ -z "$CONT_GROUPNAME" ]; then
+ echo "$WARNING" "Creating new group grp$SC_USER_NAME"
+ CONT_GROUPNAME=grp$SC_USER_NAME
+ addgroup --gid "$HOST_GROUPID" "$CONT_GROUPNAME"
else
- # take host's credentials in host_group
- if [ -z "$GROUPNAME" ]
- then
- GROUPNAME=host_group
- addgroup -g "$GROUPID" $GROUPNAME
- else
- addgroup scu $GROUPNAME
- fi
-
- deluser scu > /dev/null 2>&1
- adduser -u "$USERID" -G $GROUPNAME -D -s /bin/sh scu
+ echo "$INFO" "group already exists"
fi
+ echo "$INFO" "Adding $SC_USER_NAME to group $CONT_GROUPNAME..."
+ adduser "$SC_USER_NAME" "$CONT_GROUPNAME"
+
+ echo "$WARNING" "Changing ownership [this could take some time]"
+ echo "$INFO" "Changing $SC_USER_NAME:$SC_USER_NAME ($SC_USER_ID:$SC_USER_ID) to $SC_USER_NAME:$CONT_GROUPNAME ($HOST_USERID:$HOST_GROUPID)"
+ usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME"
+
+ echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME"
+ find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \;
+ # change user property of files already around
+ echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME"
+ find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \;
+ fi
fi
-if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ]
-then
+if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ]; then
# NOTE: production does NOT pre-installs ptvsd
- python3 -m pip install ptvsd
+ pip install --no-cache-dir ptvsd
fi
-echo "$INFO" "Starting boot ..."
-exec su-exec scu "$@"
+echo "$INFO Starting $* ..."
+echo " $SC_USER_NAME rights : $(id "$SC_USER_NAME")"
+echo " local dir : $(ls -al)"
+
+exec gosu "$SC_USER_NAME" "$@"
diff --git a/services/catalog/openapi.json b/services/catalog/openapi.json
new file mode 100644
index 00000000000..1f9fa770653
--- /dev/null
+++ b/services/catalog/openapi.json
@@ -0,0 +1,715 @@
+{
+ "openapi": "3.0.2",
+ "info": {
+ "title": "Components Catalog Service",
+ "description": "Manages and maintains a **catalog** of all published components (e.g. macro-algorithms, scripts, etc)",
+ "version": "0.3.2"
+ },
+ "paths": {
+ "/v0/meta": {
+ "get": {
+ "tags": [
+ "meta"
+ ],
+ "summary": "Get Service Metadata",
+ "operationId": "get_service_metadata_v0_meta_get",
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Meta"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "/v0/dags": {
+ "get": {
+ "tags": [
+ "DAG"
+ ],
+ "summary": "List Dags",
+ "operationId": "list_dags_v0_dags_get",
+ "parameters": [
+ {
+ "description": "Requests a specific page of the list results",
+ "required": false,
+ "schema": {
+ "title": "Page Token",
+ "type": "string",
+ "description": "Requests a specific page of the list results"
+ },
+ "name": "page_token",
+ "in": "query"
+ },
+ {
+ "description": "Maximum number of results to be returned by the server",
+ "required": false,
+ "schema": {
+ "title": "Page Size",
+ "minimum": 0.0,
+ "type": "integer",
+ "description": "Maximum number of results to be returned by the server",
+ "default": 0
+ },
+ "name": "page_size",
+ "in": "query"
+ },
+ {
+ "description": "Sorts in ascending order comma-separated fields",
+ "required": false,
+ "schema": {
+ "title": "Order By",
+ "type": "string",
+ "description": "Sorts in ascending order comma-separated fields"
+ },
+ "name": "order_by",
+ "in": "query"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "title": "Response List Dags V0 Dags Get",
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/DAGOut"
+ }
+ }
+ }
+ }
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ }
+ }
+ }
+ },
+ "post": {
+ "tags": [
+ "DAG"
+ ],
+ "summary": "Create Dag",
+ "operationId": "create_dag_v0_dags_post",
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DAGIn"
+ }
+ }
+ },
+ "required": true
+ },
+ "responses": {
+ "201": {
+ "description": "Successfully created",
+ "content": {
+ "application/json": {
+ "schema": {
+ "title": "Response Create Dag V0 Dags Post",
+ "type": "integer"
+ }
+ }
+ }
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "/v0/dags:batchGet": {
+ "get": {
+ "tags": [
+ "DAG"
+ ],
+ "summary": "Batch Get Dags",
+ "operationId": "batch_get_dags_v0_dags_batchGet_get",
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": {
+ "application/json": {
+ "schema": {}
+ }
+ }
+ }
+ }
+ }
+ },
+ "/v0/dags:search": {
+ "get": {
+ "tags": [
+ "DAG"
+ ],
+ "summary": "Search Dags",
+ "operationId": "search_dags_v0_dags_search_get",
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": {
+ "application/json": {
+ "schema": {}
+ }
+ }
+ }
+ }
+ }
+ },
+ "/v0/dags/{dag_id}": {
+ "get": {
+ "tags": [
+ "DAG"
+ ],
+ "summary": "Get Dag",
+ "operationId": "get_dag_v0_dags__dag_id__get",
+ "parameters": [
+ {
+ "required": true,
+ "schema": {
+ "title": "Dag Id",
+ "type": "integer"
+ },
+ "name": "dag_id",
+ "in": "path"
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DAGOut"
+ }
+ }
+ }
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ }
+ }
+ }
+ },
+ "put": {
+ "tags": [
+ "DAG"
+ ],
+ "summary": "Replace Dag",
+ "operationId": "replace_dag_v0_dags__dag_id__put",
+ "parameters": [
+ {
+ "required": true,
+ "schema": {
+ "title": "Dag Id",
+ "type": "integer"
+ },
+ "name": "dag_id",
+ "in": "path"
+ }
+ ],
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DAGIn"
+ }
+ }
+ },
+ "required": true
+ },
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DAGOut"
+ }
+ }
+ }
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ }
+ }
+ }
+ },
+ "delete": {
+ "tags": [
+ "DAG"
+ ],
+ "summary": "Delete Dag",
+ "operationId": "delete_dag_v0_dags__dag_id__delete",
+ "parameters": [
+ {
+ "required": true,
+ "schema": {
+ "title": "Dag Id",
+ "type": "integer"
+ },
+ "name": "dag_id",
+ "in": "path"
+ }
+ ],
+ "responses": {
+ "204": {
+ "description": "Successfully deleted"
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ }
+ }
+ }
+ },
+ "patch": {
+ "tags": [
+ "DAG"
+ ],
+ "summary": "Udpate Dag",
+ "operationId": "udpate_dag_v0_dags__dag_id__patch",
+ "parameters": [
+ {
+ "required": true,
+ "schema": {
+ "title": "Dag Id",
+ "type": "integer"
+ },
+ "name": "dag_id",
+ "in": "path"
+ }
+ ],
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DAGIn"
+ }
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/DAGOut"
+ }
+ }
+ }
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "components": {
+ "schemas": {
+ "Connection": {
+ "title": "Connection",
+ "type": "object",
+ "properties": {
+ "nodeUuid": {
+ "title": "Nodeuuid",
+ "type": "string"
+ },
+ "output": {
+ "title": "Output",
+ "type": "string"
+ }
+ }
+ },
+ "DAGIn": {
+ "title": "DAGIn",
+ "required": [
+ "key",
+ "version",
+ "name"
+ ],
+ "type": "object",
+ "properties": {
+ "key": {
+ "title": "Key",
+ "pattern": "^(simcore)/(services)(/demodec)?/(comp|dynamic|frontend)(/[^\\s]+)+$",
+ "type": "string",
+ "example": "simcore/services/frontend/nodes-group/macros/1"
+ },
+ "version": {
+ "title": "Version",
+ "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$",
+ "type": "string",
+ "example": "1.0.0"
+ },
+ "name": {
+ "title": "Name",
+ "type": "string"
+ },
+ "description": {
+ "title": "Description",
+ "type": "string"
+ },
+ "contact": {
+ "title": "Contact",
+ "type": "string",
+ "format": "email"
+ },
+ "workbench": {
+ "title": "Workbench",
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/components/schemas/Node"
+ }
+ }
+ }
+ },
+ "DAGOut": {
+ "title": "DAGOut",
+ "required": [
+ "key",
+ "version",
+ "name",
+ "id"
+ ],
+ "type": "object",
+ "properties": {
+ "key": {
+ "title": "Key",
+ "pattern": "^(simcore)/(services)(/demodec)?/(comp|dynamic|frontend)(/[^\\s]+)+$",
+ "type": "string",
+ "example": "simcore/services/frontend/nodes-group/macros/1"
+ },
+ "version": {
+ "title": "Version",
+ "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$",
+ "type": "string",
+ "example": "1.0.0"
+ },
+ "name": {
+ "title": "Name",
+ "type": "string"
+ },
+ "description": {
+ "title": "Description",
+ "type": "string"
+ },
+ "contact": {
+ "title": "Contact",
+ "type": "string",
+ "format": "email"
+ },
+ "id": {
+ "title": "Id",
+ "type": "integer"
+ },
+ "workbench": {
+ "title": "Workbench",
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/components/schemas/Node"
+ }
+ }
+ }
+ },
+ "FilePickerOutput": {
+ "title": "FilePickerOutput",
+ "required": [
+ "store",
+ "path",
+ "label"
+ ],
+ "type": "object",
+ "properties": {
+ "store": {
+ "title": "Store",
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "integer"
+ }
+ ]
+ },
+ "dataset": {
+ "title": "Dataset",
+ "type": "string"
+ },
+ "path": {
+ "title": "Path",
+ "type": "string"
+ },
+ "label": {
+ "title": "Label",
+ "type": "string"
+ }
+ }
+ },
+ "HTTPValidationError": {
+ "title": "HTTPValidationError",
+ "type": "object",
+ "properties": {
+ "detail": {
+ "title": "Detail",
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/ValidationError"
+ }
+ }
+ }
+ },
+ "Meta": {
+ "title": "Meta",
+ "required": [
+ "name",
+ "version"
+ ],
+ "type": "object",
+ "properties": {
+ "name": {
+ "title": "Name",
+ "type": "string"
+ },
+ "version": {
+ "title": "Version",
+ "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$",
+ "type": "string"
+ },
+ "released": {
+ "title": "Released",
+ "type": "object",
+ "additionalProperties": {
+ "type": "string",
+ "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$"
+ },
+ "description": "Maps every route's path tag with a released version"
+ }
+ },
+ "example": {
+ "name": "simcore_service_foo",
+ "version": "2.4.45",
+ "released": {
+ "v1": "1.3.4",
+ "v2": "2.4.45"
+ }
+ }
+ },
+ "Node": {
+ "title": "Node",
+ "required": [
+ "key",
+ "version",
+ "label",
+ "position"
+ ],
+ "type": "object",
+ "properties": {
+ "key": {
+ "title": "Key",
+ "pattern": "^(simcore)/(services)(/demodec)?/(comp|dynamic|frontend)(/[^\\s]+)+$",
+ "type": "string",
+ "example": "simcore/services/comp/sleeper"
+ },
+ "version": {
+ "title": "Version",
+ "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$",
+ "type": "string",
+ "example": "6.2.0"
+ },
+ "label": {
+ "title": "Label",
+ "type": "string"
+ },
+ "progress": {
+ "title": "Progress",
+ "maximum": 100.0,
+ "minimum": 0.0,
+ "type": "number",
+ "default": 0
+ },
+ "thumbnail": {
+ "title": "Thumbnail",
+ "type": "string"
+ },
+ "inputs": {
+ "title": "Inputs",
+ "type": "object",
+ "additionalProperties": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "$ref": "#/components/schemas/Connection"
+ },
+ {
+ "$ref": "#/components/schemas/FilePickerOutput"
+ }
+ ]
+ }
+ },
+ "inputAccess": {
+ "title": "Inputaccess",
+ "type": "object",
+ "additionalProperties": {
+ "enum": [
+ "ReadAndWrite",
+ "Invisible",
+ "ReadOnly"
+ ],
+ "type": "string"
+ }
+ },
+ "inputNodes": {
+ "title": "Inputnodes",
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "default": []
+ },
+ "outputs": {
+ "title": "Outputs",
+ "type": "object",
+ "additionalProperties": {
+ "anyOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "$ref": "#/components/schemas/FilePickerOutput"
+ }
+ ]
+ }
+ },
+ "outputNode": {
+ "title": "Outputnode",
+ "type": "boolean",
+ "deprecated": true
+ },
+ "outputNodes": {
+ "title": "Outputnodes",
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "default": []
+ },
+ "parent": {
+ "title": "Parent",
+ "type": "string",
+ "description": "Parent's (group-nodes') node ID s.",
+ "example": "nodeUUid1"
+ },
+ "position": {
+ "$ref": "#/components/schemas/Position"
+ }
+ }
+ },
+ "Position": {
+ "title": "Position",
+ "required": [
+ "x",
+ "y"
+ ],
+ "type": "object",
+ "properties": {
+ "x": {
+ "title": "X",
+ "type": "integer"
+ },
+ "y": {
+ "title": "Y",
+ "type": "integer"
+ }
+ }
+ },
+ "ValidationError": {
+ "title": "ValidationError",
+ "required": [
+ "loc",
+ "msg",
+ "type"
+ ],
+ "type": "object",
+ "properties": {
+ "loc": {
+ "title": "Location",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "msg": {
+ "title": "Message",
+ "type": "string"
+ },
+ "type": {
+ "title": "Error Type",
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/services/catalog/requirements/_base.in b/services/catalog/requirements/_base.in
index aa4eb284375..12465f1409c 100644
--- a/services/catalog/requirements/_base.in
+++ b/services/catalog/requirements/_base.in
@@ -6,9 +6,16 @@
pyyaml>=5.3 # Vulnerable
+# fastapi and extensions
fastapi[all]
-aiopg[sa]
-tenacity
-
async-exit-stack # not needed when python>=3.7
async-generator # not needed when python>=3.7
+
+# data models
+pydantic[dotenv]
+
+# database
+aiopg[sa]
+
+# other
+tenacity
diff --git a/services/catalog/requirements/_base.txt b/services/catalog/requirements/_base.txt
index c6a2bafafa0..e5f6e2414fd 100644
--- a/services/catalog/requirements/_base.txt
+++ b/services/catalog/requirements/_base.txt
@@ -4,42 +4,44 @@
#
# pip-compile --output-file=requirements/_base.txt requirements/_base.in
#
-aiofiles==0.4.0 # via fastapi
+aiofiles==0.5.0 # via fastapi
aiopg[sa]==1.0.0 # via -r requirements/_base.in
aniso8601==7.0.0 # via graphene
async-exit-stack==1.0.1 # via -r requirements/_base.in, fastapi
async-generator==1.10 # via -r requirements/_base.in, fastapi
-certifi==2019.11.28 # via requests
+certifi==2020.6.20 # via requests
chardet==3.0.4 # via requests
-click==7.0 # via uvicorn
+click==7.1.2 # via uvicorn
dataclasses==0.7 # via pydantic
dnspython==1.16.0 # via email-validator
-email-validator==1.0.5 # via fastapi
-fastapi[all]==0.48.0 # via -r requirements/_base.in
+email-validator==1.1.1 # via fastapi
+fastapi[all]==0.58.0 # via -r requirements/_base.in
graphene==2.1.8 # via fastapi
-graphql-core==2.3.1 # via graphene, graphql-relay
+graphql-core==2.3.2 # via graphene, graphql-relay
graphql-relay==2.0.1 # via graphene
h11==0.9.0 # via uvicorn
-httptools==0.0.13 # via uvicorn
-idna==2.8 # via email-validator, requests, yarl
+httptools==0.1.1 # via uvicorn
+idna==2.9 # via email-validator, requests, yarl
itsdangerous==1.1.0 # via fastapi
-jinja2==2.11.1 # via fastapi
+jinja2==2.11.2 # via fastapi
markupsafe==1.1.1 # via jinja2
-multidict==4.7.4 # via yarl
+multidict==4.7.6 # via yarl
+orjson==3.1.2 # via fastapi
promise==2.3 # via graphql-core, graphql-relay
-psycopg2-binary==2.8.4 # via aiopg, sqlalchemy
-pydantic==1.4 # via fastapi
+psycopg2-binary==2.8.5 # via aiopg, sqlalchemy
+pydantic[dotenv]==1.5.1 # via -r requirements/_base.in, fastapi
+python-dotenv==0.13.0 # via pydantic
python-multipart==0.0.5 # via fastapi
-pyyaml==5.3 # via -r requirements/_base.in, fastapi
-requests==2.22.0 # via fastapi
+pyyaml==5.3.1 # via -r requirements/_base.in, fastapi
+requests==2.24.0 # via fastapi
rx==1.6.1 # via graphql-core
-six==1.14.0 # via graphene, graphql-core, graphql-relay, python-multipart, tenacity
-sqlalchemy[postgresql_psycopg2binary]==1.3.13 # via -r requirements/../../../packages/postgres-database/requirements/_base.in, aiopg
-starlette==0.12.9 # via fastapi
-tenacity==6.0.0 # via -r requirements/_base.in
-ujson==1.35 # via fastapi
-urllib3==1.25.8 # via requests
-uvicorn==0.11.2 # via fastapi
+six==1.15.0 # via graphene, graphql-core, graphql-relay, python-multipart, tenacity
+sqlalchemy[postgresql_psycopg2binary]==1.3.17 # via -r requirements/../../../packages/postgres-database/requirements/_base.in, aiopg
+starlette==0.13.4 # via fastapi
+tenacity==6.2.0 # via -r requirements/_base.in
+ujson==3.0.0 # via fastapi
+urllib3==1.25.9 # via requests
+uvicorn==0.11.5 # via fastapi
uvloop==0.14.0 # via uvicorn
websockets==8.1 # via uvicorn
yarl==1.4.2 # via -r requirements/../../../packages/postgres-database/requirements/_base.in
diff --git a/services/catalog/requirements/_test.in b/services/catalog/requirements/_test.in
index b75eddfd81a..e9fd578a097 100644
--- a/services/catalog/requirements/_test.in
+++ b/services/catalog/requirements/_test.in
@@ -8,6 +8,7 @@
# 'services/catalog/tests/unit' dependencies
+
# testing
pytest
pytest-aiohttp # incompatible with pytest-asyncio. See https://github.com/pytest-dev/pytest-asyncio/issues/76
@@ -19,6 +20,10 @@ pytest-docker
# fixtures
Faker
+# migration due to pytest_simcore.postgres_service2
+alembic
+docker
+
# tools
pylint
coveralls
diff --git a/services/catalog/requirements/_test.txt b/services/catalog/requirements/_test.txt
index f2b0284c413..4ef4eaebc95 100644
--- a/services/catalog/requirements/_test.txt
+++ b/services/catalog/requirements/_test.txt
@@ -4,76 +4,84 @@
#
# pip-compile --output-file=requirements/_test.txt requirements/_test.in
#
-aiofiles==0.4.0 # via -r requirements/_base.txt, fastapi
+aiofiles==0.5.0 # via -r requirements/_base.txt, fastapi
aiohttp==3.6.2 # via pytest-aiohttp
aiopg[sa]==1.0.0 # via -r requirements/_base.txt
+alembic==1.4.2 # via -r requirements/_test.in
aniso8601==7.0.0 # via -r requirements/_base.txt, graphene
-astroid==2.3.3 # via pylint
+astroid==2.4.2 # via pylint
async-exit-stack==1.0.1 # via -r requirements/_base.txt, fastapi
async-generator==1.10 # via -r requirements/_base.txt, fastapi
async-timeout==3.0.1 # via aiohttp
attrs==19.3.0 # via aiohttp, pytest, pytest-docker
-certifi==2019.11.28 # via -r requirements/_base.txt, requests
+certifi==2020.6.20 # via -r requirements/_base.txt, requests
chardet==3.0.4 # via -r requirements/_base.txt, aiohttp, requests
-click==7.0 # via -r requirements/_base.txt, uvicorn
-codecov==2.0.16 # via -r requirements/_test.in
-coverage==5.0.3 # via codecov, coveralls, pytest-cov
-coveralls==1.11.1 # via -r requirements/_test.in
+click==7.1.2 # via -r requirements/_base.txt, uvicorn
+codecov==2.1.7 # via -r requirements/_test.in
+coverage==5.1 # via codecov, coveralls, pytest-cov
+coveralls==2.0.0 # via -r requirements/_test.in
dataclasses==0.7 # via -r requirements/_base.txt, pydantic
dnspython==1.16.0 # via -r requirements/_base.txt, email-validator
+docker==4.2.1 # via -r requirements/_test.in
docopt==0.6.2 # via coveralls
-email-validator==1.0.5 # via -r requirements/_base.txt, fastapi
-faker==4.0.2 # via -r requirements/_test.in
-fastapi[all]==0.48.0 # via -r requirements/_base.txt
+email-validator==1.1.1 # via -r requirements/_base.txt, fastapi
+faker==4.1.1 # via -r requirements/_test.in
+fastapi[all]==0.58.0 # via -r requirements/_base.txt
graphene==2.1.8 # via -r requirements/_base.txt, fastapi
-graphql-core==2.3.1 # via -r requirements/_base.txt, graphene, graphql-relay
+graphql-core==2.3.2 # via -r requirements/_base.txt, graphene, graphql-relay
graphql-relay==2.0.1 # via -r requirements/_base.txt, graphene
h11==0.9.0 # via -r requirements/_base.txt, uvicorn
-httptools==0.0.13 # via -r requirements/_base.txt, uvicorn
+httptools==0.1.1 # via -r requirements/_base.txt, uvicorn
idna-ssl==1.1.0 # via aiohttp
-idna==2.8 # via -r requirements/_base.txt, email-validator, requests, yarl
-importlib-metadata==1.5.0 # via pluggy, pytest
+idna==2.9 # via -r requirements/_base.txt, email-validator, requests, yarl
+importlib-metadata==1.6.1 # via pluggy, pytest
isort==4.3.21 # via pylint
itsdangerous==1.1.0 # via -r requirements/_base.txt, fastapi
-jinja2==2.11.1 # via -r requirements/_base.txt, fastapi
+jinja2==2.11.2 # via -r requirements/_base.txt, fastapi
lazy-object-proxy==1.4.3 # via astroid
-markupsafe==1.1.1 # via -r requirements/_base.txt, jinja2
+mako==1.1.3 # via alembic
+markupsafe==1.1.1 # via -r requirements/_base.txt, jinja2, mako
mccabe==0.6.1 # via pylint
-more-itertools==8.2.0 # via pytest
-multidict==4.7.4 # via -r requirements/_base.txt, aiohttp, yarl
-packaging==20.3 # via pytest
+more-itertools==8.4.0 # via pytest
+multidict==4.7.6 # via -r requirements/_base.txt, aiohttp, yarl
+orjson==3.1.2 # via -r requirements/_base.txt, fastapi
+packaging==20.4 # via pytest
pluggy==0.13.1 # via pytest
promise==2.3 # via -r requirements/_base.txt, graphql-core, graphql-relay
-psycopg2-binary==2.8.4 # via -r requirements/_base.txt, aiopg, sqlalchemy
+psycopg2-binary==2.8.5 # via -r requirements/_base.txt, aiopg, sqlalchemy
ptvsd==4.3.2 # via -r requirements/_test.in
-py==1.8.1 # via pytest
-pydantic==1.4 # via -r requirements/_base.txt, fastapi
-pylint==2.4.4 # via -r requirements/_test.in
-pyparsing==2.4.6 # via packaging
+py==1.9.0 # via pytest
+pydantic[dotenv]==1.5.1 # via -r requirements/_base.txt, fastapi
+pylint==2.5.3 # via -r requirements/_test.in
+pyparsing==2.4.7 # via packaging
pytest-aiohttp==0.3.0 # via -r requirements/_test.in
-pytest-cov==2.8.1 # via -r requirements/_test.in
+pytest-cov==2.10.0 # via -r requirements/_test.in
pytest-docker==0.7.2 # via -r requirements/_test.in
-pytest-mock==2.0.0 # via -r requirements/_test.in
+pytest-mock==3.1.1 # via -r requirements/_test.in
pytest-runner==5.2 # via -r requirements/_test.in
pytest==5.4.3 # via -r requirements/_test.in, pytest-aiohttp, pytest-cov, pytest-mock
-python-dateutil==2.8.1 # via faker
+python-dateutil==2.8.1 # via alembic, faker
+python-dotenv==0.13.0 # via -r requirements/_base.txt, pydantic
+python-editor==1.0.4 # via alembic
python-multipart==0.0.5 # via -r requirements/_base.txt, fastapi
-pyyaml==5.3 # via -r requirements/_base.txt, fastapi
-requests==2.22.0 # via -r requirements/_base.txt, codecov, coveralls, fastapi
+pyyaml==5.3.1 # via -r requirements/_base.txt, fastapi
+requests==2.24.0 # via -r requirements/_base.txt, codecov, coveralls, docker, fastapi
rx==1.6.1 # via -r requirements/_base.txt, graphql-core
-six==1.14.0 # via -r requirements/_base.txt, astroid, graphene, graphql-core, graphql-relay, packaging, promise, python-dateutil, python-multipart, tenacity
-sqlalchemy[postgresql_psycopg2binary]==1.3.13 # via -r requirements/_base.txt, aiopg
-starlette==0.12.9 # via -r requirements/_base.txt, fastapi
-tenacity==6.0.0 # via -r requirements/_base.txt
+six==1.15.0 # via -r requirements/_base.txt, astroid, docker, graphene, graphql-core, graphql-relay, packaging, promise, python-dateutil, python-multipart, tenacity, websocket-client
+sqlalchemy[postgresql_psycopg2binary]==1.3.17 # via -r requirements/_base.txt, aiopg, alembic
+starlette==0.13.4 # via -r requirements/_base.txt, fastapi
+tenacity==6.2.0 # via -r requirements/_base.txt
text-unidecode==1.3 # via faker
+toml==0.10.1 # via pylint
typed-ast==1.4.1 # via astroid
-typing-extensions==3.7.4.1 # via aiohttp
-ujson==1.35 # via -r requirements/_base.txt, fastapi
-urllib3==1.25.8 # via -r requirements/_base.txt, requests
-uvicorn==0.11.2 # via -r requirements/_base.txt, fastapi
+typing-extensions==3.7.4.2 # via aiohttp
+ujson==3.0.0 # via -r requirements/_base.txt, fastapi
+urllib3==1.25.9 # via -r requirements/_base.txt, requests
+uvicorn==0.11.5 # via -r requirements/_base.txt, fastapi
uvloop==0.14.0 # via -r requirements/_base.txt, uvicorn
-wcwidth==0.1.8 # via pytest
+wcwidth==0.2.5 # via pytest
+websocket-client==0.57.0 # via docker
websockets==8.1 # via -r requirements/_base.txt, uvicorn
-wrapt==1.11.2 # via astroid
+wrapt==1.12.1 # via astroid
yarl==1.4.2 # via -r requirements/_base.txt, aiohttp
zipp==3.1.0 # via importlib-metadata
diff --git a/services/catalog/requirements/dev.txt b/services/catalog/requirements/dev.txt
index ebdfa55d837..99cc51a1cef 100644
--- a/services/catalog/requirements/dev.txt
+++ b/services/catalog/requirements/dev.txt
@@ -10,7 +10,7 @@
-r _test.txt
# installs this repo's packages
--e ../../packages/postgres-database/
+-e ../../packages/postgres-database/[migration]
-e ../../packages/pytest-simcore/
# installs current package
diff --git a/services/catalog/setup.cfg b/services/catalog/setup.cfg
index d993000b975..004829b443e 100644
--- a/services/catalog/setup.cfg
+++ b/services/catalog/setup.cfg
@@ -7,7 +7,3 @@ tag = False
[bumpversion:file:VERSION]
[bumpversion:file:src/simcore_service_catalog/api/v0/openapi.yaml]
-
-[bumpversion:file:.cookiecutterrc]
-search = '{current_version}'
-replace = '{new_version}'
diff --git a/services/catalog/src/simcore_service_catalog/__main__.py b/services/catalog/src/simcore_service_catalog/__main__.py
index 10fa4f0890e..0228973c881 100644
--- a/services/catalog/src/simcore_service_catalog/__main__.py
+++ b/services/catalog/src/simcore_service_catalog/__main__.py
@@ -3,14 +3,32 @@
`python -m simcore_service_catalog ...`
"""
+import sys
+from pathlib import Path
+
import uvicorn
+from fastapi import FastAPI
+
+from simcore_service_catalog.core.application import init_app
+from simcore_service_catalog.core.settings import AppSettings, BootModeEnum
+
+current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
+
-from simcore_service_catalog.config import uvicorn_settings
-from simcore_service_catalog.main import app
+# SINGLETON FastAPI app
+the_app: FastAPI = init_app()
def main():
- uvicorn.run(app, **uvicorn_settings)
+ cfg: AppSettings = the_app.state.settings
+ uvicorn.run(
+ "simcore_service_catalog.__main__:the_app",
+ host=cfg.host,
+ port=cfg.port,
+ reload=cfg.boot_mode == BootModeEnum.development,
+ reload_dirs=[current_dir,],
+ log_level=cfg.log_level_name.lower(),
+ )
if __name__ == "__main__":
diff --git a/services/catalog/src/simcore_service_catalog/__version__.py b/services/catalog/src/simcore_service_catalog/__version__.py
index 3e72d0de708..ac32db2ddd2 100644
--- a/services/catalog/src/simcore_service_catalog/__version__.py
+++ b/services/catalog/src/simcore_service_catalog/__version__.py
@@ -5,4 +5,4 @@
major, minor, patch = __version__.split(".")
api_version = __version__
-api_version_prefix: str = f"v{major}"
+api_vtag: str = f"v{major}"
diff --git a/services/catalog/src/simcore_service_catalog/endpoints/__init__.py b/services/catalog/src/simcore_service_catalog/api/__init__.py
similarity index 100%
rename from services/catalog/src/simcore_service_catalog/endpoints/__init__.py
rename to services/catalog/src/simcore_service_catalog/api/__init__.py
diff --git a/services/catalog/src/simcore_service_catalog/schemas/__init__.py b/services/catalog/src/simcore_service_catalog/api/dependencies/__init__.py
similarity index 100%
rename from services/catalog/src/simcore_service_catalog/schemas/__init__.py
rename to services/catalog/src/simcore_service_catalog/api/dependencies/__init__.py
diff --git a/services/catalog/src/simcore_service_catalog/api/dependencies/database.py b/services/catalog/src/simcore_service_catalog/api/dependencies/database.py
new file mode 100644
index 00000000000..ef94b71e26c
--- /dev/null
+++ b/services/catalog/src/simcore_service_catalog/api/dependencies/database.py
@@ -0,0 +1,21 @@
+from typing import AsyncGenerator, Callable, Type
+
+from aiopg.sa import Engine
+from fastapi import Depends
+from fastapi.requests import Request
+
+from ...db.repositories import BaseRepository
+
+
+def _get_db_engine(request: Request) -> Engine:
+ return request.app.state.engine
+
+
+def get_repository(repo_type: Type[BaseRepository]) -> Callable:
+ async def _get_repo(
+ engine: Engine = Depends(_get_db_engine),
+ ) -> AsyncGenerator[BaseRepository, None]:
+ async with engine.acquire() as conn:
+ yield repo_type(conn)
+
+ return _get_repo
diff --git a/services/catalog/src/simcore_service_catalog/api/root.py b/services/catalog/src/simcore_service_catalog/api/root.py
new file mode 100644
index 00000000000..a17002b1378
--- /dev/null
+++ b/services/catalog/src/simcore_service_catalog/api/root.py
@@ -0,0 +1,10 @@
+from fastapi import APIRouter
+
+from .routes import dags, health, meta
+
+router = APIRouter()
+router.include_router(health.router)
+
+# API
+router.include_router(meta.router, tags=["meta"], prefix="/meta")
+router.include_router(dags.router, tags=["DAG"], prefix="/dags")
diff --git a/services/catalog/src/simcore_service_catalog/utils/__init__.py b/services/catalog/src/simcore_service_catalog/api/routes/__init__.py
similarity index 100%
rename from services/catalog/src/simcore_service_catalog/utils/__init__.py
rename to services/catalog/src/simcore_service_catalog/api/routes/__init__.py
diff --git a/services/catalog/src/simcore_service_catalog/endpoints/dags.py b/services/catalog/src/simcore_service_catalog/api/routes/dags.py
similarity index 68%
rename from services/catalog/src/simcore_service_catalog/endpoints/dags.py
rename to services/catalog/src/simcore_service_catalog/api/routes/dags.py
index a75c2ed6eb1..a373a987f2f 100644
--- a/services/catalog/src/simcore_service_catalog/endpoints/dags.py
+++ b/services/catalog/src/simcore_service_catalog/api/routes/dags.py
@@ -9,15 +9,15 @@
HTTP_501_NOT_IMPLEMENTED,
)
-from .. import db
-from ..schemas import schemas_dags as schemas
-from ..store import crud_dags as crud
+from ...db.repositories.dags import DAGsRepository
+from ...models.schemas.dag import DAGIn, DAGOut
+from ..dependencies.database import get_repository
router = APIRouter()
log = logging.getLogger(__name__)
-@router.get("/dags", response_model=List[schemas.DAGOut])
+@router.get("", response_model=List[DAGOut])
async def list_dags(
page_token: Optional[str] = Query(
None, description="Requests a specific page of the list results"
@@ -28,7 +28,7 @@ async def list_dags(
order_by: Optional[str] = Query(
None, description="Sorts in ascending order comma-separated fields"
),
- conn: db.SAConnection = Depends(db.get_cnx),
+ dags_repo: DAGsRepository = Depends(get_repository(DAGsRepository)),
):
# List is suited to data from a single collection that is bounded in size and not cached
@@ -41,18 +41,18 @@ async def list_dags(
# TODO: filter: https://cloud.google.com/apis/design/naming_convention#list_filter_field
# SEE response: https://cloud.google.com/apis/design/naming_convention#list_response
log.debug("%s %s %s", page_token, page_size, order_by)
- dags = await crud.list_dags(conn)
+ dags = await dags_repo.list_dags()
return dags
-@router.get("/dags:batchGet")
+@router.get(":batchGet")
async def batch_get_dags():
raise HTTPException(
status_code=HTTP_501_NOT_IMPLEMENTED, detail="Still not implemented"
)
-@router.get("/dags:search")
+@router.get(":search")
async def search_dags():
# A method that takes multiple resource IDs and returns an object for each of those IDs
# Alternative to List for fetching data that does not adhere to List semantics, such as services.search.
@@ -62,20 +62,23 @@ async def search_dags():
)
-@router.get("/dags/{dag_id}", response_model=schemas.DAGOut)
-async def get_dag(dag_id: int, conn: db.SAConnection = Depends(db.get_cnx)):
- dag = await crud.get_dag(conn, dag_id)
+@router.get("/{dag_id}", response_model=DAGOut)
+async def get_dag(
+ dag_id: int, dags_repo: DAGsRepository = Depends(get_repository(DAGsRepository)),
+):
+ dag = await dags_repo.get_dag(dag_id)
return dag
@router.post(
- "/dags",
+ "",
response_model=int,
status_code=HTTP_201_CREATED,
response_description="Successfully created",
)
async def create_dag(
- dag: schemas.DAGIn = Body(...), conn: db.SAConnection = Depends(db.get_cnx)
+ dag: DAGIn = Body(...),
+ dags_repo: DAGsRepository = Depends(get_repository(DAGsRepository)),
):
assert dag # nosec
@@ -87,40 +90,42 @@ async def create_dag(
)
# FIXME: conversion DAG (issue with workbench being json in orm and dict in schema)
- dag_id = await crud.create_dag(conn, dag)
+ dag_id = await dags_repo.create_dag(dag)
# TODO: no need to return since there is not extra info?, perhaps return
return dag_id
-@router.patch("/dags/{dag_id}", response_model=schemas.DAGOut)
+@router.patch("/{dag_id}", response_model=DAGOut)
async def udpate_dag(
dag_id: int,
- dag: schemas.DAGIn = Body(None),
- conn: db.SAConnection = Depends(db.get_cnx),
+ dag: DAGIn = Body(None),
+ dags_repo: DAGsRepository = Depends(get_repository(DAGsRepository)),
):
- async with conn.begin():
- await crud.update_dag(conn, dag_id, dag)
- updated_dag = await crud.get_dag(conn, dag_id)
+ async with dags_repo.connection.begin():
+ await dags_repo.update_dag(dag_id, dag)
+ updated_dag = await dags_repo.get_dag(dag_id)
return updated_dag
-@router.put("/dags/{dag_id}", response_model=Optional[schemas.DAGOut])
+@router.put("/{dag_id}", response_model=Optional[DAGOut])
async def replace_dag(
dag_id: int,
- dag: schemas.DAGIn = Body(...),
- conn: db.SAConnection = Depends(db.get_cnx),
+ dag: DAGIn = Body(...),
+ dags_repo: DAGsRepository = Depends(get_repository(DAGsRepository)),
):
- await crud.replace_dag(conn, dag_id, dag)
+ await dags_repo.replace_dag(dag_id, dag)
@router.delete(
- "/dags/{dag_id}",
+ "/{dag_id}",
status_code=HTTP_204_NO_CONTENT,
response_description="Successfully deleted",
)
-async def delete_dag(dag_id: int, conn: db.SAConnection = Depends(db.get_cnx)):
+async def delete_dag(
+ dag_id: int, dags_repo: DAGsRepository = Depends(get_repository(DAGsRepository)),
+):
# If the Delete method immediately removes the resource, it should return an empty response.
# If the Delete method initiates a long-running operation, it should return the long-running operation.
# If the Delete method only marks the resource as being deleted, it should return the updated resource.
- await crud.delete_dag(conn, dag_id)
+ await dags_repo.delete_dag(dag_id)
diff --git a/services/catalog/src/simcore_service_catalog/api/routes/health.py b/services/catalog/src/simcore_service_catalog/api/routes/health.py
new file mode 100644
index 00000000000..d8b50c5f504
--- /dev/null
+++ b/services/catalog/src/simcore_service_catalog/api/routes/health.py
@@ -0,0 +1,8 @@
+from fastapi import APIRouter
+
+router = APIRouter()
+
+
+@router.get("/", include_in_schema=False)
+async def check_service_health():
+ return ":-)"
diff --git a/services/catalog/src/simcore_service_catalog/api/routes/meta.py b/services/catalog/src/simcore_service_catalog/api/routes/meta.py
new file mode 100644
index 00000000000..903ce9666b1
--- /dev/null
+++ b/services/catalog/src/simcore_service_catalog/api/routes/meta.py
@@ -0,0 +1,15 @@
+from fastapi import APIRouter
+
+from ...__version__ import __version__, api_version, api_vtag
+from ...models.schemas.meta import Meta
+
+router = APIRouter()
+
+
+@router.get("", response_model=Meta)
+async def get_service_metadata():
+ return Meta(
+ name=__name__.split(".")[0],
+ version=api_version,
+ released={api_vtag: api_version},
+ )
diff --git a/services/catalog/src/simcore_service_catalog/api/v0/openapi.yaml b/services/catalog/src/simcore_service_catalog/api/v0/openapi.yaml
deleted file mode 100644
index 41fd31693ca..00000000000
--- a/services/catalog/src/simcore_service_catalog/api/v0/openapi.yaml
+++ /dev/null
@@ -1,452 +0,0 @@
-components:
- schemas:
- Connection:
- properties:
- nodeUuid:
- title: Nodeuuid
- type: string
- output:
- title: Output
- type: string
- title: Connection
- type: object
- DAGIn:
- properties:
- contact:
- format: email
- title: Contact
- type: string
- description:
- title: Description
- type: string
- key:
- example: simcore/services/frontend/nodes-group/macros/1
- pattern: ^(simcore)/(services)(/demodec)?/(comp|dynamic|frontend)(/[^\s]+)+$
- title: Key
- type: string
- name:
- title: Name
- type: string
- version:
- example: 1.0.0
- pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$
- title: Version
- type: string
- workbench:
- additionalProperties:
- $ref: '#/components/schemas/Node'
- title: Workbench
- type: object
- required:
- - key
- - version
- - name
- title: DAGIn
- type: object
- DAGOut:
- properties:
- contact:
- format: email
- title: Contact
- type: string
- description:
- title: Description
- type: string
- id:
- title: Id
- type: integer
- key:
- example: simcore/services/frontend/nodes-group/macros/1
- pattern: ^(simcore)/(services)(/demodec)?/(comp|dynamic|frontend)(/[^\s]+)+$
- title: Key
- type: string
- name:
- title: Name
- type: string
- version:
- example: 1.0.0
- pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$
- title: Version
- type: string
- workbench:
- additionalProperties:
- $ref: '#/components/schemas/Node'
- title: Workbench
- type: object
- required:
- - key
- - version
- - name
- - id
- title: DAGOut
- type: object
- FilePickerOutput:
- properties:
- dataset:
- title: Dataset
- type: string
- label:
- title: Label
- type: string
- path:
- title: Path
- type: string
- store:
- anyOf:
- - type: string
- - type: integer
- title: Store
- required:
- - store
- - path
- - label
- title: FilePickerOutput
- type: object
- HTTPValidationError:
- properties:
- detail:
- items:
- $ref: '#/components/schemas/ValidationError'
- title: Detail
- type: array
- title: HTTPValidationError
- type: object
- Node:
- properties:
- inputAccess:
- additionalProperties:
- enum:
- - ReadAndWrite
- - Invisible
- - ReadOnly
- type: string
- title: Inputaccess
- type: object
- inputNodes:
- default: []
- items:
- type: string
- title: Inputnodes
- type: array
- inputs:
- additionalProperties:
- anyOf:
- - type: integer
- - type: string
- - type: number
- - $ref: '#/components/schemas/Connection'
- - $ref: '#/components/schemas/FilePickerOutput'
- title: Inputs
- type: object
- key:
- example: simcore/services/comp/sleeper
- pattern: ^(simcore)/(services)(/demodec)?/(comp|dynamic|frontend)(/[^\s]+)+$
- title: Key
- type: string
- label:
- title: Label
- type: string
- outputNode:
- deprecated: true
- title: Outputnode
- type: boolean
- outputNodes:
- default: []
- items:
- type: string
- title: Outputnodes
- type: array
- outputs:
- additionalProperties:
- anyOf:
- - type: integer
- - type: string
- - type: number
- - $ref: '#/components/schemas/FilePickerOutput'
- title: Outputs
- type: object
- parent:
- description: Parent's (group-nodes') node ID s.
- example: nodeUUid1
- title: Parent
- type: string
- position:
- $ref: '#/components/schemas/Position'
- progress:
- default: 0
- maximum: 100.0
- minimum: 0.0
- title: Progress
- type: number
- thumbnail:
- title: Thumbnail
- type: string
- version:
- example: 6.2.0
- pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$
- title: Version
- type: string
- required:
- - key
- - version
- - label
- - position
- title: Node
- type: object
- Position:
- properties:
- x:
- title: X
- type: integer
- y:
- title: Y
- type: integer
- required:
- - x
- - y
- title: Position
- type: object
- ValidationError:
- properties:
- loc:
- items:
- type: string
- title: Location
- type: array
- msg:
- title: Message
- type: string
- type:
- title: Error Type
- type: string
- required:
- - loc
- - msg
- - type
- title: ValidationError
- type: object
-info:
- description: Manages and maintains a **catalog** of all published components (e.g.
- macro-algorithms, scripts, etc)
- title: Components Catalog Service
- version: 0.3.2
-openapi: 3.0.2
-paths:
- /:
- get:
- operationId: healthcheck__get
- responses:
- '200':
- content:
- application/json:
- schema: {}
- description: Successful Response
- summary: Healthcheck
- tags:
- - diagnostics
- /v0/dags:
- get:
- operationId: list_dags_v0_dags_get
- parameters:
- - description: Requests a specific page of the list results
- in: query
- name: page_token
- required: false
- schema:
- description: Requests a specific page of the list results
- title: Page Token
- type: string
- - description: Maximum number of results to be returned by the server
- in: query
- name: page_size
- required: false
- schema:
- default: 0
- description: Maximum number of results to be returned by the server
- minimum: 0.0
- title: Page Size
- type: integer
- - description: Sorts in ascending order comma-separated fields
- in: query
- name: order_by
- required: false
- schema:
- description: Sorts in ascending order comma-separated fields
- title: Order By
- type: string
- responses:
- '200':
- content:
- application/json:
- schema:
- items:
- $ref: '#/components/schemas/DAGOut'
- title: Response List Dags V0 Dags Get
- type: array
- description: Successful Response
- '422':
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/HTTPValidationError'
- description: Validation Error
- summary: List Dags
- tags:
- - dags
- post:
- operationId: create_dag_v0_dags_post
- requestBody:
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/DAGIn'
- required: true
- responses:
- '201':
- content:
- application/json:
- schema:
- title: Response Create Dag V0 Dags Post
- type: integer
- description: Successfully created
- '422':
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/HTTPValidationError'
- description: Validation Error
- summary: Create Dag
- tags:
- - dags
- /v0/dags/{dag_id}:
- delete:
- operationId: delete_dag_v0_dags__dag_id__delete
- parameters:
- - in: path
- name: dag_id
- required: true
- schema:
- title: Dag Id
- type: integer
- responses:
- '204':
- description: Successfully deleted
- '422':
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/HTTPValidationError'
- description: Validation Error
- summary: Delete Dag
- tags:
- - dags
- get:
- operationId: get_dag_v0_dags__dag_id__get
- parameters:
- - in: path
- name: dag_id
- required: true
- schema:
- title: Dag Id
- type: integer
- responses:
- '200':
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/DAGOut'
- description: Successful Response
- '422':
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/HTTPValidationError'
- description: Validation Error
- summary: Get Dag
- tags:
- - dags
- patch:
- operationId: udpate_dag_v0_dags__dag_id__patch
- parameters:
- - in: path
- name: dag_id
- required: true
- schema:
- title: Dag Id
- type: integer
- requestBody:
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/DAGIn'
- responses:
- '200':
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/DAGOut'
- description: Successful Response
- '422':
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/HTTPValidationError'
- description: Validation Error
- summary: Udpate Dag
- tags:
- - dags
- put:
- operationId: replace_dag_v0_dags__dag_id__put
- parameters:
- - in: path
- name: dag_id
- required: true
- schema:
- title: Dag Id
- type: integer
- requestBody:
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/DAGIn'
- required: true
- responses:
- '200':
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/DAGOut'
- description: Successful Response
- '422':
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/HTTPValidationError'
- description: Validation Error
- summary: Replace Dag
- tags:
- - dags
- /v0/dags:batchGet:
- get:
- operationId: batch_get_dags_v0_dags_batchGet_get
- responses:
- '200':
- content:
- application/json:
- schema: {}
- description: Successful Response
- summary: Batch Get Dags
- tags:
- - dags
- /v0/dags:search:
- get:
- operationId: search_dags_v0_dags_search_get
- responses:
- '200':
- content:
- application/json:
- schema: {}
- description: Successful Response
- summary: Search Dags
- tags:
- - dags
diff --git a/services/catalog/src/simcore_service_catalog/config.py b/services/catalog/src/simcore_service_catalog/config.py
deleted file mode 100644
index a04b5d84611..00000000000
--- a/services/catalog/src/simcore_service_catalog/config.py
+++ /dev/null
@@ -1,56 +0,0 @@
-"""
-
- NOTE: CONS of programmatic config
- - not testing-friendly since variables set upon import. Must reload when fixture is setup
-"""
-import logging
-import os
-
-from .utils.helpers import cast_to_bool
-
-# DOCKER
-is_container_environ: bool = "SC_BOOT_MODE" in os.environ
-is_devel = os.environ.get("SC_BUILD_TARGET") == "development"
-is_prod = os.environ.get("SC_BUILD_TARGET") == "production"
-
-
-# LOGGING
-log_level_name = os.environ.get("LOGLEVEL", "debug").upper()
-log_level = getattr(logging, log_level_name.upper())
-log_formatter = logging.Formatter("%(levelname)s: %(message)s [%(name)s:%(lineno)d]")
-
-logging.root.setLevel(log_level)
-if logging.root.handlers:
- logging.root.handlers[0].setFormatter(log_formatter)
-
-
-# TEST MODE
-is_testing_enabled: bool = cast_to_bool(os.environ.get("TESTING", "true"))
-
-
-# POSGRESS API
-postgres_cfg: dict = {
- "user": os.environ.get("POSTGRES_USER", "test"),
- "password": os.environ.get("POSTGRES_PASSWORD", "test"),
- "database": os.environ.get("POSTGRES_DB", "test"),
- "host": os.environ.get("POSTGRES_HOST", "localhost"),
- "port": int(os.environ.get("POSTGRES_PORT", "5432")),
-}
-postgres_dsn: str = "postgresql://{user}:{password}@{host}:{port}/{database}".format(
- **postgres_cfg
-)
-postgres_cfg: dict = {**postgres_cfg, "uri": postgres_dsn}
-init_tables: bool = cast_to_bool(
- os.environ.get("POSTGRES_INIT_TABLES", "true" if is_devel else "false")
-)
-
-# SERVER
-# NOTE: https://www.uvicorn.org/settings/
-uvicorn_settings: dict = {
- "host": "0.0.0.0" if is_container_environ else "127.0.0.1", # nosec
- "port": 8000,
- "log_level": log_level_name.lower(),
-}
-
-# APPLICATION
-app_context: dict = {} # FIXME: hate globals!
diff --git a/services/catalog/src/simcore_service_catalog/core/__init__.py b/services/catalog/src/simcore_service_catalog/core/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/catalog/src/simcore_service_catalog/core/application.py b/services/catalog/src/simcore_service_catalog/core/application.py
new file mode 100644
index 00000000000..3fe1f837297
--- /dev/null
+++ b/services/catalog/src/simcore_service_catalog/core/application.py
@@ -0,0 +1,57 @@
+import logging
+from typing import Optional
+
+from fastapi import FastAPI
+
+from ..__version__ import api_version, api_vtag
+from ..api.root import router as api_router
+from ..api.routes.health import router as health_router
+from .events import create_start_app_handler, create_stop_app_handler
+from .settings import AppSettings
+
+# from fastapi.exceptions import RequestValidationError
+# from starlette.exceptions import HTTPException
+
+# from ..api.errors.http_error import http_error_handler
+# from ..api.errors.validation_error import http422_error_handler
+
+
+logger = logging.getLogger(__name__)
+
+
+def init_app(settings: Optional[AppSettings] = None) -> FastAPI:
+ if settings is None:
+ settings = AppSettings.create_default()
+
+ logging.basicConfig(level=settings.loglevel)
+ logging.root.setLevel(settings.loglevel)
+
+ app = FastAPI(
+ debug=settings.debug,
+ title="Components Catalog Service",
+ # TODO: get here extended description from setup or the other way around
+ description="Manages and maintains a **catalog** of all published components (e.g. macro-algorithms, scripts, etc)",
+ version=api_version,
+ openapi_url=f"/api/{api_vtag}/openapi.json",
+ docs_url="/dev/docs",
+ redoc_url=None, # default disabled
+ )
+
+ logger.debug(settings)
+ app.state.settings = settings
+
+ app.add_event_handler("startup", create_start_app_handler(app))
+ app.add_event_handler("shutdown", create_stop_app_handler(app))
+
+ # app.add_exception_handler(HTTPException, http_error_handler)
+ # app.add_exception_handler(RequestValidationError, http422_error_handler)
+
+ # Routing
+
+ # healthcheck at / and at /v0/
+ app.include_router(health_router)
+
+ # api under /v*
+ app.include_router(api_router, prefix=f"/{api_vtag}")
+
+ return app
diff --git a/services/catalog/src/simcore_service_catalog/core/errors.py b/services/catalog/src/simcore_service_catalog/core/errors.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/catalog/src/simcore_service_catalog/core/events.py b/services/catalog/src/simcore_service_catalog/core/events.py
new file mode 100644
index 00000000000..58d91eb81ab
--- /dev/null
+++ b/services/catalog/src/simcore_service_catalog/core/events.py
@@ -0,0 +1,38 @@
+import logging
+from typing import Callable
+
+from fastapi import FastAPI
+
+from ..db.events import close_db_connection, connect_to_db
+from ..services.remote_debug import setup_remote_debugging
+from .settings import BootModeEnum
+
+logger = logging.getLogger(__name__)
+
+
+def create_start_app_handler(app: FastAPI) -> Callable:
+ async def start_app() -> None:
+ logger.info("Application started")
+
+ # setup connection to remote debugger (if applies)
+ setup_remote_debugging(
+ force_enabled=app.state.settings.boot_mode == BootModeEnum.debug
+ )
+
+ # setup connection to pg db
+ if app.state.settings.postgres.enabled:
+ await connect_to_db(app)
+
+ return start_app
+
+
+def create_stop_app_handler(app: FastAPI) -> Callable:
+ async def stop_app() -> None:
+ try:
+ logger.info("Application stopping")
+ if app.state.settings.postgres.enabled:
+ await close_db_connection(app)
+ except Exception: # pylint: disable=broad-except
+ logger.exception("Stopping application")
+
+ return stop_app
diff --git a/services/catalog/src/simcore_service_catalog/core/settings.py b/services/catalog/src/simcore_service_catalog/core/settings.py
new file mode 100644
index 00000000000..cf684359a5b
--- /dev/null
+++ b/services/catalog/src/simcore_service_catalog/core/settings.py
@@ -0,0 +1,84 @@
+import logging
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseSettings, Field, SecretStr, validator
+from yarl import URL
+
+
+class BootModeEnum(str, Enum):
+ debug = "debug-ptvsd"
+ production = "production"
+ development = "development"
+
+
+class _CommonConfig:
+ case_sensitive = False
+ env_file = ".env" # SEE https://pydantic-docs.helpmanual.io/usage/settings/#dotenv-env-support
+
+
+class PostgresSettings(BaseSettings):
+ enabled: bool = Field(
+ True, description="Enables/Disables connection with postgres service"
+ )
+ user: str
+ password: SecretStr
+ db: str
+ host: str
+ port: int = 5432
+
+ minsize: int = 10
+ maxsize: int = 10
+
+ @property
+ def dsn(self) -> URL:
+ return URL.build(
+ scheme="postgresql",
+ user=self.user,
+ password=self.password.get_secret_value(),
+ host=self.host,
+ port=self.port,
+ path=f"/{self.db}",
+ )
+
+ class Config(_CommonConfig):
+ env_prefix = "POSTGRES_"
+
+
+class AppSettings(BaseSettings):
+ @classmethod
+ def create_default(cls) -> "AppSettings":
+ # This call triggers parsers
+ return cls(postgres=PostgresSettings())
+
+ # pylint: disable=no-self-use
+ # pylint: disable=no-self-argument
+
+ # DOCKER
+ boot_mode: Optional[BootModeEnum] = Field(..., env="SC_BOOT_MODE")
+
+ # LOGGING
+ log_level_name: str = Field("DEBUG", env="LOG_LEVEL")
+
+ @validator("log_level_name")
+ def match_logging_level(cls, value) -> str:
+ try:
+ getattr(logging, value.upper())
+ except AttributeError:
+ raise ValueError(f"{value.upper()} is not a valid level")
+ return value.upper()
+
+ @property
+ def loglevel(self) -> int:
+ return getattr(logging, self.log_level_name)
+
+ # POSTGRES
+ postgres: PostgresSettings
+
+ # SERVICE SERVER (see : https://www.uvicorn.org/settings/)
+ host: str = "0.0.0.0" # nosec
+ port: int = 8000
+ debug: bool = False # If True, debug tracebacks should be returned on errors.
+
+ class Config(_CommonConfig):
+ env_prefix = ""
diff --git a/services/catalog/src/simcore_service_catalog/db.py b/services/catalog/src/simcore_service_catalog/db.py
deleted file mode 100644
index 675ec192395..00000000000
--- a/services/catalog/src/simcore_service_catalog/db.py
+++ /dev/null
@@ -1,60 +0,0 @@
-""" Access to postgres service
-
-"""
-from typing import Optional
-
-import aiopg.sa
-from aiopg.sa import Engine
-from aiopg.sa.connection import SAConnection
-from aiopg.sa.result import ResultProxy, RowProxy
-from fastapi import Depends
-from sqlalchemy.sql.ddl import CreateTable
-
-from .config import app_context, postgres_dsn
-from .orm import DAG, dags
-
-
-# TODO: idealy context cleanup. This concept here? app-context Dependency?
-async def setup_engine() -> Engine:
- engine = await aiopg.sa.create_engine(
- postgres_dsn,
- # unique identifier per app
- application_name=f"{__name__}_{id(app_context)}",
- minsize=5,
- maxsize=10,
- )
- app_context["engine"] = engine
-
- return engine
-
-
-async def teardown_engine() -> None:
- engine = app_context["engine"]
- engine.close()
- await engine.wait_closed()
-
-
-async def create_tables(conn: SAConnection):
- # FIXME: this is dangerous since it enforces an empty table
- await conn.execute(f"DROP TABLE IF EXISTS {DAG.__tablename__}")
- await conn.execute(CreateTable(dags))
-
-
-def info(engine: Optional[Engine] = None):
- engine = engine or get_engine()
- props = "closed driver dsn freesize maxsize minsize name size timeout".split()
- for p in props:
- print(f"{p} = {getattr(engine, p)}")
-
-
-def get_engine() -> Engine:
- return app_context["engine"]
-
-
-async def get_cnx(engine: Engine = Depends(get_engine)):
- # TODO: problem here is retries??
- async with engine.acquire() as conn:
- yield conn
-
-
-__all__ = ("Engine", "ResultProxy", "RowProxy", "SAConnection")
diff --git a/services/catalog/src/simcore_service_catalog/db/__init__.py b/services/catalog/src/simcore_service_catalog/db/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/catalog/src/simcore_service_catalog/db/errors.py b/services/catalog/src/simcore_service_catalog/db/errors.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/catalog/src/simcore_service_catalog/db/events.py b/services/catalog/src/simcore_service_catalog/db/events.py
new file mode 100644
index 00000000000..eebdcb7c9d8
--- /dev/null
+++ b/services/catalog/src/simcore_service_catalog/db/events.py
@@ -0,0 +1,56 @@
+import logging
+from io import StringIO
+
+from aiopg.sa import Engine, create_engine
+from fastapi import FastAPI
+from tenacity import before_sleep_log, retry, stop_after_attempt, wait_fixed
+
+from ..core.settings import PostgresSettings
+
+logger = logging.getLogger(__name__)
+
+
+ENGINE_ATTRS = "closed driver dsn freesize maxsize minsize name size timeout".split()
+
+
+pg_retry_policy = dict(
+ wait=wait_fixed(5),
+ stop=stop_after_attempt(20),
+ before_sleep=before_sleep_log(logger, logging.WARNING),
+ reraise=True,
+)
+
+
+def _compose_info_on_engine(app: FastAPI) -> str:
+ engine = app.state.engine
+ stm = StringIO()
+ print("Setup engine:", end=" ", file=stm)
+ for attr in ENGINE_ATTRS:
+ print(f"{attr}={getattr(engine, attr)}", end="; ", file=stm)
+ return stm.getvalue()
+
+
+@retry(**pg_retry_policy)
+async def connect_to_db(app: FastAPI) -> None:
+ logger.debug("Connecting db ...")
+
+ cfg: PostgresSettings = app.state.settings.postgres
+ engine: Engine = await create_engine(
+ str(cfg.dsn),
+ application_name=f"{__name__}_{id(app)}", # unique identifier per app
+ minsize=cfg.minsize,
+ maxsize=cfg.maxsize,
+ )
+ logger.debug("Connected to %s", engine.dsn)
+ app.state.engine = engine
+
+ logger.debug(_compose_info_on_engine(app))
+
+
+async def close_db_connection(app: FastAPI) -> None:
+ logger.debug("Disconnecting db ...")
+
+ engine: Engine = app.state.engine
+ engine.close()
+ await engine.wait_closed()
+ logger.debug("Disconnected from %s", engine.dsn)
diff --git a/services/catalog/src/simcore_service_catalog/db/repositories/__init__.py b/services/catalog/src/simcore_service_catalog/db/repositories/__init__.py
new file mode 100644
index 00000000000..a5eeffe1ff5
--- /dev/null
+++ b/services/catalog/src/simcore_service_catalog/db/repositories/__init__.py
@@ -0,0 +1 @@
+from ._base import BaseRepository
diff --git a/services/catalog/src/simcore_service_catalog/db/repositories/_base.py b/services/catalog/src/simcore_service_catalog/db/repositories/_base.py
new file mode 100644
index 00000000000..81f04c0f7b5
--- /dev/null
+++ b/services/catalog/src/simcore_service_catalog/db/repositories/_base.py
@@ -0,0 +1,15 @@
+from aiopg.sa.connection import SAConnection
+
+
+class BaseRepository:
+ """
+ Repositories are pulled at every request
+ All queries to db within that request use same connection
+ """
+
+ def __init__(self, conn: SAConnection) -> None:
+ self._conn = conn
+
+ @property
+ def connection(self) -> SAConnection:
+ return self._conn
diff --git a/services/catalog/src/simcore_service_catalog/db/repositories/dags.py b/services/catalog/src/simcore_service_catalog/db/repositories/dags.py
new file mode 100644
index 00000000000..9138378efa3
--- /dev/null
+++ b/services/catalog/src/simcore_service_catalog/db/repositories/dags.py
@@ -0,0 +1,60 @@
+import json
+from typing import List, Optional
+
+import sqlalchemy as sa
+from aiopg.sa.result import RowProxy
+
+from ...models.domain.dag import DAGAtDB
+from ...models.schemas.dag import DAGIn
+from ..tables import dags
+from ._base import BaseRepository
+
+
+class DAGsRepository(BaseRepository):
+ async def list_dags(self) -> List[DAGAtDB]:
+ dagraphs = []
+ async for row in self.connection.execute(dags.select()):
+ if row:
+ dagraphs.append(DAGAtDB(**row))
+ return dagraphs
+
+ async def get_dag(self, dag_id: int) -> Optional[DAGAtDB]:
+ stmt = dags.select().where(dags.c.id == dag_id)
+ row: RowProxy = await (await self.connection.execute(stmt)).first()
+ if row:
+ return DAGAtDB(**row)
+ return None
+
+ async def create_dag(self, dag: DAGIn) -> int:
+ stmt = dags.insert().values(
+ workbench=json.dumps(dag.dict()["workbench"]),
+ **dag.dict(exclude={"workbench"})
+ )
+ new_id: int = await (await self.connection.execute(stmt)).scalar()
+ return new_id
+
+ async def replace_dag(self, dag_id: int, dag: DAGIn):
+ stmt = (
+ dags.update()
+ .values(
+ workbench=json.dumps(dag.dict()["workbench"]),
+ **dag.dict(exclude={"workbench"})
+ )
+ .where(dags.c.id == dag_id)
+ )
+ await self.connection.execute(stmt)
+
+ async def update_dag(self, dag_id: int, dag: DAGIn):
+ patch = dag.dict(exclude_unset=True, exclude={"workbench"})
+ if "workbench" in dag.__fields_set__:
+ patch["workbench"] = json.dumps(patch["workbench"])
+
+ stmt = sa.update(dags).values(**patch).where(dags.c.id == dag_id)
+ res = await self.connection.execute(stmt)
+
+ # TODO: dev asserts
+ assert res.returns_rows == False # nosec
+
+ async def delete_dag(self, dag_id: int):
+ stmt = sa.delete(dags).where(dags.c.id == dag_id)
+ await self.connection.execute(stmt)
diff --git a/services/catalog/src/simcore_service_catalog/orm.py b/services/catalog/src/simcore_service_catalog/db/tables.py
similarity index 62%
rename from services/catalog/src/simcore_service_catalog/orm.py
rename to services/catalog/src/simcore_service_catalog/db/tables.py
index 01557d31052..be6fb11dfda 100644
--- a/services/catalog/src/simcore_service_catalog/orm.py
+++ b/services/catalog/src/simcore_service_catalog/db/tables.py
@@ -1,3 +1,3 @@
-from simcore_postgres_database.models.direct_acyclic_graphs import DAG, dags
+from simcore_postgres_database.models.direct_acyclic_graphs import dags
-__all__ = ["dags", "DAG"]
+__all__ = ["dags"]
diff --git a/services/catalog/src/simcore_service_catalog/endpoints/diagnostics.py b/services/catalog/src/simcore_service_catalog/endpoints/diagnostics.py
deleted file mode 100644
index 15411e27b83..00000000000
--- a/services/catalog/src/simcore_service_catalog/endpoints/diagnostics.py
+++ /dev/null
@@ -1,17 +0,0 @@
-from fastapi import APIRouter
-
-from ..__version__ import __version__, api_version
-
-router = APIRouter()
-
-
-@router.get("/")
-async def healthcheck():
- # TODO: this is the entrypoint that docker uses to determin whether the service is starting, failed, etc...
- # TODO: Reaching this point, what does it means? How is the health of this service? when shall it respond non-succesful?
- return {
- "name": __name__.split(".")[0],
- "version": __version__,
- "status": "SERVICE_RUNNING",
- "api_version": api_version,
- }
diff --git a/services/catalog/src/simcore_service_catalog/main.py b/services/catalog/src/simcore_service_catalog/main.py
deleted file mode 100644
index 63a1ad27460..00000000000
--- a/services/catalog/src/simcore_service_catalog/main.py
+++ /dev/null
@@ -1,78 +0,0 @@
-import logging
-import os
-import sys
-from pathlib import Path
-
-import yaml
-from fastapi import FastAPI
-from tenacity import before_sleep_log, retry, stop_after_attempt, wait_fixed
-
-from . import config as cfg
-from .__version__ import api_version, api_version_prefix
-from .db import create_tables, setup_engine, teardown_engine
-from .endpoints import dags, diagnostics
-from .utils.remote_debug import setup_remote_debugging
-
-current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
-
-log = logging.getLogger(__name__)
-pid = os.getpid()
-
-app = FastAPI(
- debug=cfg.is_testing_enabled,
- title="Components Catalog Service",
- # TODO: get here extended description from setup
- description="Manages and maintains a **catalog** of all published components (e.g. macro-algorithms, scripts, etc)",
- version=api_version,
- openapi_url=f"/{api_version_prefix}/openapi.json",
-)
-
-# projects
-app.include_router(diagnostics.router, tags=["diagnostics"])
-app.include_router(dags.router, tags=["dags"], prefix=f"/{api_version_prefix}")
-
-
-def dump_openapi():
- oas_path: Path = current_dir / f"api/{api_version_prefix}/openapi.yaml"
- log.info("Saving openapi schema to %s", oas_path)
- with open(oas_path, "wt") as fh:
- yaml.safe_dump(app.openapi(), fh)
-
-
-@app.on_event("startup")
-def startup_event():
- log.info("Starting app '%d' [%d]...", id(app), pid)
- setup_remote_debugging()
-
-
-@app.on_event("startup")
-async def start_db():
- log.info("Initializing db")
-
- @retry(
- wait=wait_fixed(5),
- stop=stop_after_attempt(20),
- before_sleep=before_sleep_log(log, logging.WARNING),
- reraise=True,
- )
- async def go():
- engine = await setup_engine()
- assert engine # nosec
-
- if cfg.init_tables:
- log.info("Creating db tables (testing mode)")
- async with engine.acquire() as conn:
- await create_tables(conn)
-
- await go() # NOTE: non-blocking this way
-
-
-@app.on_event("shutdown")
-def shutdown_event():
- log.info("Closing app '%d' [%d]...", id(app), pid)
-
-
-@app.on_event("shutdown")
-async def shutdown_db():
- log.info("Closing db")
- await teardown_engine()
diff --git a/services/catalog/src/simcore_service_catalog/models/__init__.py b/services/catalog/src/simcore_service_catalog/models/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/catalog/src/simcore_service_catalog/models/domain/__init__.py b/services/catalog/src/simcore_service_catalog/models/domain/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/catalog/src/simcore_service_catalog/schemas/schemas_dags.py b/services/catalog/src/simcore_service_catalog/models/domain/dag.py
similarity index 71%
rename from services/catalog/src/simcore_service_catalog/schemas/schemas_dags.py
rename to services/catalog/src/simcore_service_catalog/models/domain/dag.py
index cfea6816e4b..0bef8016217 100644
--- a/services/catalog/src/simcore_service_catalog/schemas/schemas_dags.py
+++ b/services/catalog/src/simcore_service_catalog/models/domain/dag.py
@@ -16,18 +16,6 @@ class DAGBase(BaseModel):
contact: Optional[EmailStr]
-class DAGIn(DAGBase):
- workbench: Optional[Dict[str, project.Node]]
-
-
-class DAGInPath(DAGBase):
- version: str
- name: str
- description: Optional[str]
- contact: Optional[str]
- workbench: Optional[Dict[str, project.Node]]
-
-
class DAGAtDB(DAGBase):
id: int
workbench: Json[Dict[str, project.Node]] # pylint: disable=unsubscriptable-object
@@ -36,5 +24,5 @@ class Config:
orm_mode = True
-class DAGOut(DAGAtDB):
+class DAGData(DAGAtDB):
workbench: Optional[Dict[str, project.Node]]
diff --git a/services/catalog/src/simcore_service_catalog/schemas/project.py b/services/catalog/src/simcore_service_catalog/models/domain/project.py
similarity index 100%
rename from services/catalog/src/simcore_service_catalog/schemas/project.py
rename to services/catalog/src/simcore_service_catalog/models/domain/project.py
diff --git a/services/catalog/src/simcore_service_catalog/models/schemas/__init__.py b/services/catalog/src/simcore_service_catalog/models/schemas/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/catalog/src/simcore_service_catalog/models/schemas/dag.py b/services/catalog/src/simcore_service_catalog/models/schemas/dag.py
new file mode 100644
index 00000000000..392f3d3cdb3
--- /dev/null
+++ b/services/catalog/src/simcore_service_catalog/models/schemas/dag.py
@@ -0,0 +1,20 @@
+from typing import Dict, Optional
+
+from ..domain import project
+from ..domain.dag import DAGBase, DAGData
+
+
+class DAGIn(DAGBase):
+ workbench: Optional[Dict[str, project.Node]]
+
+
+class DAGInPath(DAGBase):
+ version: str
+ name: str
+ description: Optional[str]
+ contact: Optional[str]
+ workbench: Optional[Dict[str, project.Node]]
+
+
+class DAGOut(DAGData):
+ pass
diff --git a/services/catalog/src/simcore_service_catalog/models/schemas/meta.py b/services/catalog/src/simcore_service_catalog/models/schemas/meta.py
new file mode 100644
index 00000000000..dd23eced796
--- /dev/null
+++ b/services/catalog/src/simcore_service_catalog/models/schemas/meta.py
@@ -0,0 +1,27 @@
+from typing import Dict, Optional
+
+from pydantic import BaseModel, Field, constr
+
+# TODO: review this RE
+# use https://www.python.org/dev/peps/pep-0440/#version-scheme
+# or https://www.python.org/dev/peps/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions
+#
+VERSION_RE = r"^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$"
+VersionStr = constr(regex=VERSION_RE)
+
+
+class Meta(BaseModel):
+ name: str
+ version: VersionStr
+ released: Optional[Dict[str, VersionStr]] = Field(
+ None, description="Maps every route's path tag with a released version"
+ )
+
+ class Config:
+ schema_extra = {
+ "example": {
+ "name": "simcore_service_foo",
+ "version": "2.4.45",
+ "released": {"v1": "1.3.4", "v2": "2.4.45"},
+ }
+ }
diff --git a/services/catalog/src/simcore_service_catalog/services/__init__.py b/services/catalog/src/simcore_service_catalog/services/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/services/catalog/src/simcore_service_catalog/utils/remote_debug.py b/services/catalog/src/simcore_service_catalog/services/remote_debug.py
similarity index 58%
rename from services/catalog/src/simcore_service_catalog/utils/remote_debug.py
rename to services/catalog/src/simcore_service_catalog/services/remote_debug.py
index b29069c9fd8..d1ba21d5016 100644
--- a/services/catalog/src/simcore_service_catalog/utils/remote_debug.py
+++ b/services/catalog/src/simcore_service_catalog/services/remote_debug.py
@@ -1,38 +1,41 @@
""" Setup remote debugger with Python Tools for Visual Studio (PTVSD)
"""
+
import logging
import os
-REMOTE_DEBUG_PORT = 3000
+logger = logging.getLogger(__name__)
-log = logging.getLogger(__name__)
+REMOTE_DEBUG_PORT = 3000
-def setup_remote_debugging(force_enabled=False):
+def setup_remote_debugging(force_enabled=False, *, boot_mode=None):
"""
Programaticaly enables remote debugging if SC_BOOT_MODE==debug-ptvsd
"""
- boot_mode = os.environ.get("SC_BOOT_MODE")
+ boot_mode = boot_mode or os.environ.get("SC_BOOT_MODE")
if boot_mode == "debug-ptvsd" or force_enabled:
try:
- log.debug("Enabling attach ptvsd ...")
+ logger.debug("Enabling attach ptvsd ...")
#
# SEE https://github.com/microsoft/ptvsd#enabling-debugging
#
import ptvsd
ptvsd.enable_attach(
- address=("0.0.0.0", REMOTE_DEBUG_PORT), redirect_output=True
+ address=("0.0.0.0", REMOTE_DEBUG_PORT), # nosec
) # nosec
except ImportError:
raise ValueError(
"Cannot enable remote debugging. Please install ptvsd first"
)
- log.info("Remote debugging enabled: listening port %s", REMOTE_DEBUG_PORT)
+ logger.info("Remote debugging enabled: listening port %s", REMOTE_DEBUG_PORT)
else:
- log.debug("Booting without remote debugging since SC_BOOT_MODE=%s", boot_mode)
+ logger.debug(
+ "Booting without remote debugging since SC_BOOT_MODE=%s", boot_mode
+ )
__all__ = ["setup_remote_debugging"]
diff --git a/services/catalog/src/simcore_service_catalog/store/__init__.py b/services/catalog/src/simcore_service_catalog/store/__init__.py
deleted file mode 100644
index 848941974fd..00000000000
--- a/services/catalog/src/simcore_service_catalog/store/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-"""
- Access to data stored in database
-"""
diff --git a/services/catalog/src/simcore_service_catalog/store/crud_dags.py b/services/catalog/src/simcore_service_catalog/store/crud_dags.py
deleted file mode 100644
index a10815bb853..00000000000
--- a/services/catalog/src/simcore_service_catalog/store/crud_dags.py
+++ /dev/null
@@ -1,60 +0,0 @@
-import json
-from typing import List, Optional
-
-import sqlalchemy as sa
-
-from .. import db, orm
-from ..schemas import schemas_dags as schemas
-
-
-async def list_dags(conn: db.SAConnection) -> List[schemas.DAGAtDB]:
- dags = []
- async for row in conn.execute(orm.dags.select()):
- if row:
- dags.append(schemas.DAGAtDB(**row))
- return dags
-
-
-async def get_dag(conn: db.SAConnection, dag_id: int) -> Optional[schemas.DAGAtDB]:
- stmt = orm.dags.select().where(orm.dags.c.id == dag_id)
- row: db.RowProxy = await (await conn.execute(stmt)).first()
- if row:
- return schemas.DAGAtDB(**row)
- return None
-
-
-async def create_dag(conn: db.SAConnection, dag: schemas.DAGIn):
- stmt = orm.dags.insert().values(
- workbench=json.dumps(dag.dict()["workbench"]), **dag.dict(exclude={"workbench"})
- )
- new_id: int = await (await conn.execute(stmt)).scalar()
- return new_id
-
-
-async def replace_dag(conn: db.SAConnection, dag_id: int, dag: schemas.DAGIn):
- stmt = (
- orm.dags.update()
- .values(
- workbench=json.dumps(dag.dict()["workbench"]),
- **dag.dict(exclude={"workbench"})
- )
- .where(orm.dags.c.id == dag_id)
- )
- await conn.execute(stmt)
-
-
-async def update_dag(conn: db.SAConnection, dag_id: int, dag: schemas.DAGIn):
- patch = dag.dict(exclude_unset=True, exclude={"workbench"})
- if "workbench" in dag.__fields_set__:
- patch["workbench"] = json.dumps(patch["workbench"])
-
- stmt = sa.update(orm.dags).values(**patch).where(orm.dags.c.id == dag_id)
- res = await conn.execute(stmt)
-
- # TODO: dev asserts
- assert res.returns_rows == False # nosec
-
-
-async def delete_dag(conn: db.SAConnection, dag_id: int):
- stmt = sa.delete(orm.dags).where(orm.dags.c.id == dag_id)
- await conn.execute(stmt)
diff --git a/services/catalog/src/simcore_service_catalog/utils/helpers.py b/services/catalog/src/simcore_service_catalog/utils/helpers.py
deleted file mode 100644
index 4d95766d28b..00000000000
--- a/services/catalog/src/simcore_service_catalog/utils/helpers.py
+++ /dev/null
@@ -1,2 +0,0 @@
-def cast_to_bool(value: str) -> bool:
- return value.lower() in ["true", "1", "yes"]
diff --git a/services/catalog/tests/unit/conftest.py b/services/catalog/tests/unit/conftest.py
index 3c222784c79..c934ce49f28 100644
--- a/services/catalog/tests/unit/conftest.py
+++ b/services/catalog/tests/unit/conftest.py
@@ -11,11 +11,16 @@
import simcore_service_catalog
+pytest_plugins = ["pytest_simcore.postgres_service2"]
+
current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
+## FOLDER LAYOUT ------
+
+
@pytest.fixture(scope="session")
-def project_slug_dir():
+def project_slug_dir() -> Path:
folder = current_dir.parent.parent
assert folder.exists()
assert any(folder.glob("src/simcore_service_catalog"))
@@ -23,7 +28,7 @@ def project_slug_dir():
@pytest.fixture(scope="session")
-def package_dir():
+def installed_package_dir():
dirpath = Path(simcore_service_catalog.__file__).resolve().parent
assert dirpath.exists()
return dirpath
@@ -49,6 +54,9 @@ def api_specs_dir(osparc_simcore_root_dir):
return specs_dir
+# FAKE DATA ------
+
+
@pytest.fixture()
def fake_data_dag_in() -> Dict:
DAG_DATA_IN_DICT = {
diff --git a/services/catalog/tests/unit/test_package.py b/services/catalog/tests/unit/test_package.py
index 3c4e17d5671..11121fc8d4d 100644
--- a/services/catalog/tests/unit/test_package.py
+++ b/services/catalog/tests/unit/test_package.py
@@ -10,8 +10,6 @@
from pytest_simcore.helpers.utils_pylint import assert_pylint_is_passing
-# from simcore_service_catalog.__main__ import main
-
@pytest.fixture
def pylintrc(project_slug_dir, osparc_simcore_root_dir):
@@ -22,25 +20,14 @@ def pylintrc(project_slug_dir, osparc_simcore_root_dir):
return pylintrc
-def test_run_pylint(pylintrc, package_dir):
- assert_pylint_is_passing(pylintrc=pylintrc, package_dir=package_dir)
-
-
-# FIXME: main entrypoint
-# def test_main(here): # pylint: disable=unused-variable
-# """
-# Checks cli in place
-# """
-# with pytest.raises(SystemExit) as excinfo:
-# main("--help".split())
-#
-# assert excinfo.value.code == 0
+def test_run_pylint(pylintrc, installed_package_dir):
+ assert_pylint_is_passing(pylintrc=pylintrc, package_dir=installed_package_dir)
-def test_no_pdbs_in_place(package_dir):
+def test_no_pdbs_in_place(installed_package_dir):
MATCH = re.compile(r"pdb.set_trace()")
EXCLUDE = ["__pycache__", ".git"]
- for root, dirs, files in os.walk(package_dir):
+ for root, dirs, files in os.walk(installed_package_dir):
for name in files:
if name.endswith(".py"):
pypth = Path(root) / name
diff --git a/services/catalog/tests/unit/test_schemas.py b/services/catalog/tests/unit/test_schemas.py
index be56b2503be..0cfe82f8042 100644
--- a/services/catalog/tests/unit/test_schemas.py
+++ b/services/catalog/tests/unit/test_schemas.py
@@ -7,28 +7,16 @@
import pytest
-from simcore_service_catalog.orm import DAG
-from simcore_service_catalog.schemas import schemas_dags
-
-# from typing import Optional, TypeVar, Generic
-# from pydantic import GenericModel, BaseModel
-
-# DataT = TypeVar('DataT')
-
-# class Error(BaseModel):
-# code: int
-# message: str
-
-
-# class Envelope(GenericModel, Generic[DataT]):
-# data: Optional[DataT]
-# error: Optional[Error]
+import simcore_postgres_database.models.direct_acyclic_graphs as orm
+from simcore_service_catalog.db import tables
+from simcore_service_catalog.models.domain.dag import DAGAtDB
+from simcore_service_catalog.models.schemas.dag import DAGIn, DAGOut
@pytest.mark.skip(reason="DEV")
def test_dev():
- dag_in = schemas_dags.DAGIn(
+ dag_in = DAGIn(
key="simcore/services/frontend/nodes-group/macros/", version="1.0.0", name="foo"
)
assert "key" in dag_in.__fields_set__
@@ -44,22 +32,22 @@ def test_dev():
print(dag_in.dict(exclude_none=True))
-def test_api_in_2_orm(fake_data_dag_in):
+def test_api_in_to_orm(fake_data_dag_in):
# dag in to db
- dag_in = schemas_dags.DAGIn(**fake_data_dag_in)
+ dag_in = DAGIn(**fake_data_dag_in)
# TODO: create DAG.from_api( :DAGIn)
# SEE crud_dags.create_dag
- selection = set(DAG.__table__.columns.keys()).remove("workbench")
- dag_orm = DAG(
+ selection = set(tables.dags.columns.keys()).remove("workbench")
+ dag_orm = orm.DAG(
id=1,
workbench=json.dumps(fake_data_dag_in["workbench"]),
**dag_in.dict(include=selection, exclude={"workbench"}),
)
-def test_orm_2_api_out(fake_data_dag_in):
- dag_orm = DAG(
+def test_orm_to_api_out(fake_data_dag_in):
+ dag_orm = orm.DAG(
id=1,
key="simcore/services/comp/foo",
version="1.0.0",
@@ -69,8 +57,8 @@ def test_orm_2_api_out(fake_data_dag_in):
workbench=json.dumps(fake_data_dag_in["workbench"]),
)
- dag_db = schemas_dags.DAGAtDB.from_orm(dag_orm)
+ dag_db = DAGAtDB.from_orm(dag_orm)
assert type(dag_db.workbench) == dict
- dag_out = schemas_dags.DAGOut(**dag_db.dict())
- assert dag_out.id == 1
+ dag_out = DAGOut(**dag_db.dict())
+ assert dag_out.id == 1 # pylint: disable=no-member
diff --git a/services/catalog/tests/unit/with_dbs/conftest.py b/services/catalog/tests/unit/with_dbs/conftest.py
index 58fa505758c..6c17c38c60a 100644
--- a/services/catalog/tests/unit/with_dbs/conftest.py
+++ b/services/catalog/tests/unit/with_dbs/conftest.py
@@ -2,73 +2,41 @@
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
-import importlib
-import os
import sys
from pathlib import Path
+from typing import Dict
import pytest
-import sqlalchemy as sa
+from fastapi import FastAPI
+from starlette.testclient import TestClient
-import simcore_service_catalog.config
+from simcore_service_catalog.core.application import init_app
current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
@pytest.fixture(scope="session")
-def environ_context():
- keep = os.environ.copy()
+def test_docker_compose_file() -> Path:
+ # OVERRIDES pytest_simcore.postgres_service2.test_docker_compose_file
+ return current_dir / "docker-compose.yml"
- # config database
- os.environ["POSTGRES_DB"] = "test"
- os.environ["POSTGRES_USER"] = "test"
- os.environ["POSTGRES_PASSWORD"] = "test"
- os.environ["POSTGRES_HOST"] = "127.0.0.1"
- os.environ["POSTGRES_PORT"] = "5432"
- os.environ["POSTGRES_INIT_TABLES"] = "True"
- os.environ["TESTING"] = "True"
+@pytest.fixture
+def app(
+ monkeypatch,
+ test_environment: Dict[str, str], # pytest_simcore.postgres_service2
+ apply_migration, # pytest_simcore.postgres_service2
+) -> FastAPI:
- # FIXME: dirty trick to update configuration with these environs! WARNING: might have side effects
- importlib.reload(simcore_service_catalog.config)
+ # Emulates environ so settings can get config
+ for key, value in test_environment.items():
+ monkeypatch.setenv(key, value)
- yield
+ app = init_app()
+ yield app
- os.environ = keep
-
-@pytest.fixture(scope="session")
-def docker_compose_file(environ_context):
- """ Overrides pytest-docker fixture """
-
- # docker-compose reads these environs
- file_path = current_dir / "docker-compose.yml"
- assert file_path.exists()
-
- yield str(file_path)
-
-
-def is_postgres_responsive(url: str):
- """Check if something responds to ``url`` """
- try:
- engine = sa.create_engine(url)
- conn = engine.connect()
- conn.close()
- except sa.exc.OperationalError:
- return False
- return True
-
-
-@pytest.fixture(scope="session")
-def postgres_service(docker_services, docker_ip, environ_context):
-
- url = "postgresql://{e[POSTGRES_USER]}:{e[POSTGRES_PASSWORD]}@{e[POSTGRES_HOST]}:{e[POSTGRES_PORT]}/{e[POSTGRES_DB]}".format(
- e=os.environ
- )
-
- # Wait until service is responsive.
- docker_services.wait_until_responsive(
- check=lambda: is_postgres_responsive(url), timeout=30.0, pause=0.1,
- )
-
- return url
+@pytest.fixture
+def client(app) -> TestClient:
+ with TestClient(app) as cli:
+ yield cli
diff --git a/services/catalog/tests/unit/with_dbs/test_entrypoint_dags.py b/services/catalog/tests/unit/with_dbs/test_entrypoint_dags.py
index 9a70345b589..948c14f0b58 100644
--- a/services/catalog/tests/unit/with_dbs/test_entrypoint_dags.py
+++ b/services/catalog/tests/unit/with_dbs/test_entrypoint_dags.py
@@ -4,25 +4,22 @@
from typing import List
-import pytest
-from starlette.testclient import TestClient
-
-# TODO: app is init globally ... which is bad!
-from simcore_service_catalog.main import api_version, app
-
-
-@pytest.fixture
-def client(environ_context, postgres_service):
- # TODO: create new web-app everyt
- with TestClient(app) as cli:
- yield cli
+from simcore_service_catalog.__version__ import api_version
+from simcore_service_catalog.models.schemas.meta import Meta
def test_read_healthcheck(client):
response = client.get("/")
assert response.status_code == 200
- assert "api_version" in response.json()
- assert response.json()["api_version"] == api_version
+ assert response.text == '":-)"'
+
+
+def test_read_meta(client):
+ response = client.get("/v0/meta")
+ assert response.status_code == 200
+ meta = Meta(**response.json())
+ assert meta.version == api_version
+ assert meta.name == "simcore_service_catalog"
def test_list_dags(client):
diff --git a/services/director/Dockerfile b/services/director/Dockerfile
index a0acda52b7e..cbc0627f1d7 100644
--- a/services/director/Dockerfile
+++ b/services/director/Dockerfile
@@ -1,5 +1,5 @@
ARG PYTHON_VERSION="3.6.10"
-FROM python:${PYTHON_VERSION}-slim as base
+FROM python:${PYTHON_VERSION}-slim-buster as base
#
# USAGE:
# cd sercices/director
@@ -72,7 +72,7 @@ RUN apt-get update &&\
RUN python -m venv ${VIRTUAL_ENV}
RUN pip --no-cache-dir install --upgrade \
- pip~=20.0.2 \
+ pip~=20.1.1 \
wheel \
setuptools
diff --git a/services/docker-compose.devel.yml b/services/docker-compose.devel.yml
index 7d5492cdb78..c4876264f0f 100644
--- a/services/docker-compose.devel.yml
+++ b/services/docker-compose.devel.yml
@@ -9,7 +9,8 @@ services:
api-server:
environment:
- SC_BOOT_MODE=debug-ptvsd
- - LOGLEVEL=debug
+ - LOG_LEVEL=debug
+ - DEBUG=true
volumes:
- ./api-server:/devel/services/api-server
- ../packages:/devel/packages
@@ -17,8 +18,8 @@ services:
catalog:
environment:
- SC_BOOT_MODE=debug-ptvsd
- - TESTING=true
- - LOGLEVEL=debug
+ - LOG_LEVEL=debug
+ - DEBUG=true
volumes:
- ./catalog:/devel/services/catalog
- ../packages:/devel/packages
diff --git a/services/docker-compose.local.yml b/services/docker-compose.local.yml
index b0b813ad346..33e07664f0c 100644
--- a/services/docker-compose.local.yml
+++ b/services/docker-compose.local.yml
@@ -18,8 +18,6 @@ services:
- "3006:3000"
catalog:
- environment:
- - SC_BOOT_MODE=${SC_BOOT_MODE:-default}
ports:
- "8005:8000"
- "3005:3000"
diff --git a/services/docker-compose.yml b/services/docker-compose.yml
index f2da6db23c5..5ef9124a6d1 100644
--- a/services/docker-compose.yml
+++ b/services/docker-compose.yml
@@ -36,8 +36,7 @@ services:
- POSTGRES_DB=${POSTGRES_DB}
- POSTGRES_HOST=${POSTGRES_HOST}
- POSTGRES_PORT=${POSTGRES_PORT}
- - TESTING=false
- - LOGLEVEL=${LOG_LEVEL:-WARNING}
+ - LOG_LEVEL=${LOG_LEVEL:-WARNING}
depends_on:
- postgres
networks:
diff --git a/services/sidecar/Dockerfile b/services/sidecar/Dockerfile
index 50aec0e82bc..b74a30d7759 100644
--- a/services/sidecar/Dockerfile
+++ b/services/sidecar/Dockerfile
@@ -1,5 +1,5 @@
ARG PYTHON_VERSION="3.6.10"
-FROM python:${PYTHON_VERSION}-slim as base
+FROM python:${PYTHON_VERSION}-slim-buster as base
#
# USAGE:
# cd sercices/sidecar
@@ -61,7 +61,7 @@ RUN apt-get update &&\
RUN python -m venv ${VIRTUAL_ENV}
RUN pip --no-cache-dir install --upgrade \
- pip~=20.0.2 \
+ pip~=20.1.1 \
wheel \
setuptools
diff --git a/services/storage/Dockerfile b/services/storage/Dockerfile
index 512ff02b9e6..99dfd140d6c 100644
--- a/services/storage/Dockerfile
+++ b/services/storage/Dockerfile
@@ -56,7 +56,7 @@ RUN apk add --no-cache \
linux-headers
RUN $SC_PIP install --upgrade \
- pip~=20.0.2 \
+ pip~=20.1.1 \
wheel \
setuptools
diff --git a/services/web/Dockerfile b/services/web/Dockerfile
index 2ecae9ea643..e38b0474338 100644
--- a/services/web/Dockerfile
+++ b/services/web/Dockerfile
@@ -1,5 +1,5 @@
ARG PYTHON_VERSION="3.6.10"
-FROM python:${PYTHON_VERSION}-slim as base
+FROM python:${PYTHON_VERSION}-slim-buster as base
#
# USAGE:
# cd sercices/web
@@ -79,7 +79,7 @@ RUN apt-get update &&\
RUN python -m venv ${VIRTUAL_ENV}
RUN pip --no-cache-dir install --upgrade \
- pip~=20.0.2 \
+ pip~=20.1.1 \
wheel \
setuptools
From 6b02813693e65d9bc55b6e6223477498dc2bdae3 Mon Sep 17 00:00:00 2001
From: Sylvain <35365065+sanderegg@users.noreply.github.com>
Date: Tue, 30 Jun 2020 10:13:57 +0200
Subject: [PATCH 13/43] Bugfix/apiserver does not need sslheaders (#1564)
* no need for sslheader in apiserver
* sslheader middleware does not like hyphens
* declare swarm stack name in e2e testing
* wait for services now checks the latest task
* discover name of postgres container
---
Makefile | 3 +-
ci/github/system-testing/e2e.bash | 3 +
services/docker-compose.yml | 7 +--
tests/e2e/Makefile | 2 +-
tests/e2e/utils/wait_for_services.py | 84 ++++++++++++++++++----------
5 files changed, 62 insertions(+), 37 deletions(-)
diff --git a/Makefile b/Makefile
index 71e34f82d28..0bf0e115eab 100644
--- a/Makefile
+++ b/Makefile
@@ -54,7 +54,8 @@ export WEBSERVER_API_VERSION := $(shell cat $(CURDIR)/services/web/server/VERSI
# swarm stacks
-export SWARM_STACK_NAME ?= simcore
+export SWARM_STACK_NAME ?= master-simcore
+export SWARM_STACK_NAME_NO_HYPHEN = $(subst -,_,$(SWARM_STACK_NAME))
# version tags
export DOCKER_IMAGE_TAG ?= latest
diff --git a/ci/github/system-testing/e2e.bash b/ci/github/system-testing/e2e.bash
index 3749c0db734..06ebb2dc4dc 100755
--- a/ci/github/system-testing/e2e.bash
+++ b/ci/github/system-testing/e2e.bash
@@ -8,6 +8,9 @@ IFS=$'\n\t'
DOCKER_IMAGE_TAG=$(exec ci/helpers/build_docker_image_tag.bash)
export DOCKER_IMAGE_TAG
+SWARM_STACK_NAME=e2e_test_stack
+export SWARM_STACK_NAME
+
install() {
echo "--------------- installing psql client..."
/bin/bash -c 'sudo apt install -y postgresql-client'
diff --git a/services/docker-compose.yml b/services/docker-compose.yml
index 5ef9124a6d1..bb7752a149f 100644
--- a/services/docker-compose.yml
+++ b/services/docker-compose.yml
@@ -17,13 +17,12 @@ services:
# gzip compression
- traefik.http.middlewares.${SWARM_STACK_NAME}_gzip.compress=true
# ssl header necessary so that socket.io upgrades correctly from polling to websocket mode. the middleware must be attached to the right connection.
- - traefik.http.middlewares.${SWARM_STACK_NAME}_sslheader.headers.customrequestheaders.X-Forwarded-Proto=http
- traefik.enable=true
- traefik.http.services.${SWARM_STACK_NAME}_api-server.loadbalancer.server.port=8000
- traefik.http.routers.${SWARM_STACK_NAME}_api-server.rule=hostregexp(`{host:.+}`)
- traefik.http.routers.${SWARM_STACK_NAME}_api-server.entrypoints=simcore_api
- traefik.http.routers.${SWARM_STACK_NAME}_api-server.priority=1
- - traefik.http.routers.${SWARM_STACK_NAME}_api-server.middlewares=${SWARM_STACK_NAME}_gzip@docker, ${SWARM_STACK_NAME}_sslheader
+ - traefik.http.routers.${SWARM_STACK_NAME}_api-server.middlewares=${SWARM_STACK_NAME}_gzip@docker
networks:
- default
@@ -116,13 +115,13 @@ services:
# gzip compression
- traefik.http.middlewares.${SWARM_STACK_NAME}_gzip.compress=true
# ssl header necessary so that socket.io upgrades correctly from polling to websocket mode. the middleware must be attached to the right connection.
- - traefik.http.middlewares.${SWARM_STACK_NAME}_sslheader.headers.customrequestheaders.X-Forwarded-Proto=http
+ - traefik.http.middlewares.${SWARM_STACK_NAME_NO_HYPHEN}_sslheader.headers.customrequestheaders.X-Forwarded-Proto=http
- traefik.enable=true
- traefik.http.services.${SWARM_STACK_NAME}_webserver.loadbalancer.server.port=8080
- traefik.http.routers.${SWARM_STACK_NAME}_webserver.rule=hostregexp(`{host:.+}`)
- traefik.http.routers.${SWARM_STACK_NAME}_webserver.entrypoints=http
- traefik.http.routers.${SWARM_STACK_NAME}_webserver.priority=1
- - traefik.http.routers.${SWARM_STACK_NAME}_webserver.middlewares=${SWARM_STACK_NAME}_gzip@docker, ${SWARM_STACK_NAME}_sslheader
+ - traefik.http.routers.${SWARM_STACK_NAME}_webserver.middlewares=${SWARM_STACK_NAME}_gzip@docker, ${SWARM_STACK_NAME_NO_HYPHEN}_sslheader@docker
networks:
- default
- interactive_services_subnet
diff --git a/tests/e2e/Makefile b/tests/e2e/Makefile
index 9d0e8683a24..844b4c7cc1b 100644
--- a/tests/e2e/Makefile
+++ b/tests/e2e/Makefile
@@ -61,7 +61,7 @@ transfer-images-to-registry: ## transfer images to registry
# completed transfer of images
curl registry:5000/v2/_catalog
-PUBLISHED_PORT = $(shell docker inspect simcore_postgres --format "{{(index .Endpoint.Ports 0).PublishedPort}}")
+PUBLISHED_PORT = $(shell docker inspect $(shell docker service ls --format "{{ .Name }}" | grep postgres) --format "{{(index .Endpoint.Ports 0).PublishedPort}}")
.PHONY: inject-templates-in-db
inject-templates-in-db: ## inject project templates
@PGPASSWORD=adminadmin psql --host localhost \
diff --git a/tests/e2e/utils/wait_for_services.py b/tests/e2e/utils/wait_for_services.py
index 6726fceb3de..995cefc371a 100644
--- a/tests/e2e/utils/wait_for_services.py
+++ b/tests/e2e/utils/wait_for_services.py
@@ -1,4 +1,5 @@
import logging
+from pdb import Pdb
import sys
import time
from pathlib import Path
@@ -10,20 +11,14 @@
logger = logging.getLogger(__name__)
-current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
+current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
WAIT_TIME_SECS = 20
RETRY_COUNT = 7
-MAX_WAIT_TIME=240
+MAX_WAIT_TIME = 240
# https://docs.docker.com/engine/swarm/how-swarm-mode-works/swarm-task-states/
-pre_states = [
- "NEW",
- "PENDING",
- "ASSIGNED",
- "PREPARING",
- "STARTING"
-]
+pre_states = ["NEW", "PENDING", "ASSIGNED", "PREPARING", "STARTING"]
failed_states = [
"COMPLETE",
@@ -32,37 +27,44 @@
"REJECTED",
"ORPHANED",
"REMOVE",
- "CREATED"
+ "CREATED",
]
# UTILS --------------------------------
+
def get_tasks_summary(tasks):
msg = ""
for t in tasks:
- t["Status"].setdefault("Err", '')
+ t["Status"].setdefault("Err", "")
msg += "- task ID:{ID}, STATE: {Status[State]}, ERROR: '{Status[Err]}' \n".format(
- **t)
+ **t
+ )
return msg
def get_failed_tasks_logs(service, docker_client):
failed_logs = ""
for t in service.tasks():
- if t['Status']['State'].upper() in failed_states:
- cid = t['Status']['ContainerStatus']['ContainerID']
+ if t["Status"]["State"].upper() in failed_states:
+ cid = t["Status"]["ContainerStatus"]["ContainerID"]
failed_logs += "{2} {0} - {1} BEGIN {2}\n".format(
- service.name, t['ID'], "="*10)
+ service.name, t["ID"], "=" * 10
+ )
if cid:
container = docker_client.containers.get(cid)
- failed_logs += container.logs().decode('utf-8')
+ failed_logs += container.logs().decode("utf-8")
else:
failed_logs += " log unavailable. container does not exists\n"
failed_logs += "{2} {0} - {1} END {2}\n".format(
- service.name, t['ID'], "="*10)
+ service.name, t["ID"], "=" * 10
+ )
return failed_logs
+
+
# --------------------------------------------------------------------------------
+
def osparc_simcore_root_dir() -> Path:
WILDCARD = "services/web/server"
@@ -81,46 +83,66 @@ def osparc_simcore_root_dir() -> Path:
def core_docker_compose_file() -> Path:
return osparc_simcore_root_dir() / ".stack-simcore-version.yml"
+
def core_services() -> List[str]:
with core_docker_compose_file().open() as fp:
dc_specs = yaml.safe_load(fp)
return [x for x in dc_specs["services"].keys()]
+
def ops_docker_compose_file() -> Path:
return osparc_simcore_root_dir() / ".stack-ops.yml"
+
def ops_services() -> List[str]:
with ops_docker_compose_file().open() as fp:
dc_specs = yaml.safe_load(fp)
return [x for x in dc_specs["services"].keys()]
-def wait_for_services() -> bool:
+
+def wait_for_services() -> None:
# get all services
services = core_services() + ops_services()
client = docker.from_env()
- running_services = [x for x in client.services.list() if x.name.split("_")[1] in services]
+ running_services = [
+ x for x in client.services.list() if x.name.split("_")[-1] in services
+ ]
+
# check all services are in
assert len(running_services), "no services started!"
- assert len(services) == len(running_services), "Some services are missing"
+ assert len(services) == len(
+ running_services
+ ), f"Some services are missing:\nexpected: {services}\ngot: {running_services}"
# now check they are in running mode
for service in running_services:
+ task = None
for n in range(RETRY_COUNT):
- task = service.tasks()[0]
- if task['Status']['State'].upper() in pre_states:
- print("Waiting [{}/{}] for {}...\n{}".format(n, RETRY_COUNT, service.name, get_tasks_summary(service.tasks())))
+ # get last updated task
+ sorted_tasks = sorted(service.tasks(), key=lambda task: task["UpdatedAt"])
+ task = sorted_tasks[-1]
+
+ if task["Status"]["State"].upper() in pre_states:
+ print(
+ "Waiting [{}/{}] for {}...\n{}".format(
+ n, RETRY_COUNT, service.name, get_tasks_summary(service.tasks())
+ )
+ )
time.sleep(WAIT_TIME_SECS)
- elif task['Status']['State'].upper() in failed_states:
- print(f"Waiting [{n}/{RETRY_COUNT}] Service {service.name} failed once...\n{get_tasks_summary(service.tasks())}")
+ elif task["Status"]["State"].upper() in failed_states:
+ print(
+ f"Waiting [{n}/{RETRY_COUNT}] Service {service.name} failed once...\n{get_tasks_summary(service.tasks())}"
+ )
time.sleep(WAIT_TIME_SECS)
else:
break
- assert task['Status']['State'].upper() == "RUNNING",\
- "Expected running, got \n{}\n{}".format(
- pformat(task),
- get_tasks_summary(service.tasks()))
- # get_failed_tasks_logs(service, client))
-
+ assert task
+ assert (
+ task["Status"]["State"].upper() == "RUNNING"
+ ), "Expected running, got \n{}\n{}".format(
+ pformat(task), get_tasks_summary(service.tasks())
+ )
+ # get_failed_tasks_logs(service, client))
if __name__ == "__main__":
From 681fc9c41e7ef8d8616050349ac5d03669d507a5 Mon Sep 17 00:00:00 2001
From: Sylvain <35365065+sanderegg@users.noreply.github.com>
Date: Tue, 30 Jun 2020 13:56:02 +0200
Subject: [PATCH 14/43] [feature] UI Fine grained access - project locking and
notification
* write permissions needed to remove a user not delete permission
* added test for opening a shared project 2 times
* access_rights renamed to accessRights
*added test for groups access rights
* adding state endpoint
* mypy
* now check project is locked
* user is automatically enters room upon successful login
* project state now returns the value + owner of the lock if any
---
api/specs/common/schemas/project.yaml | 43 +-
.../webserver/components/schemas/group.yaml | 4 +-
api/specs/webserver/openapi-groups.yaml | 10 +-
api/specs/webserver/openapi-projects.yaml | 26 +
api/specs/webserver/openapi.yaml | 3 +
mypy.ini | 13 +-
.../src/simcore_postgres_database/cli.py | 16 +-
.../src/pytest_simcore/websocket_client.py | 18 +-
.../service-library/src/servicelib/utils.py | 14 +-
scripts/common.Makefile | 3 +
scripts/mypy.bash | 22 +
.../osparc/component/export/Permissions.js | 4 +-
.../preferences/pages/OrganizationsPage.js | 6 +-
.../api/v0/openapi.yaml | 221 +++++++--
.../simcore_service_webserver/groups_api.py | 9 +-
.../simcore_service_webserver/groups_utils.py | 4 +-
.../projects/projects_api.py | 22 +-
.../projects/projects_handlers.py | 103 +++-
.../projects/projects_models.py | 27 +-
.../resource_manager/websocket_manager.py | 10 +-
.../socketio/events.py | 10 +
.../socketio/handlers.py | 14 +
.../simcore_service_webserver/users_api.py | 17 +-
.../server/tests/unit/with_dbs/_helpers.py | 71 +++
.../server/tests/unit/with_dbs/conftest.py | 63 ++-
.../server/tests/unit/with_dbs/test_groups.py | 361 ++++++--------
.../tests/unit/with_dbs/test_projects.py | 468 +++++++++++++-----
.../unit/with_dbs/test_resource_manager.py | 33 +-
28 files changed, 1118 insertions(+), 497 deletions(-)
create mode 100755 scripts/mypy.bash
create mode 100644 services/web/server/tests/unit/with_dbs/_helpers.py
diff --git a/api/specs/common/schemas/project.yaml b/api/specs/common/schemas/project.yaml
index b71b11006df..cceb224263e 100644
--- a/api/specs/common/schemas/project.yaml
+++ b/api/specs/common/schemas/project.yaml
@@ -1,17 +1,18 @@
components:
schemas:
Project:
- $ref: './project-v0.0.1-converted.yaml'
+ $ref: "./project-v0.0.1-converted.yaml"
ProjectEnveloped:
type: object
required:
- data
properties:
data:
- $ref: '#/components/schemas/Project'
+ $ref: "#/components/schemas/Project"
error:
nullable: true
default: null
+
ProjectArrayEnveloped:
type: object
required:
@@ -20,7 +21,43 @@ components:
data:
type: array
items:
- $ref: '#/components/schemas/Project'
+ $ref: "#/components/schemas/Project"
+ error:
+ nullable: true
+ default: null
+
+ ProjectState:
+ type: object
+ required:
+ - locked
+ properties:
+ locked:
+ type: object
+ description: describes the project lock state
+ required:
+ - value
+ properties:
+ value:
+ type: boolean
+ description: true if the project is locked
+ owner:
+ type: object
+ properties:
+ first_name:
+ type: string
+ last_name:
+ type: string
+ required:
+ - firstName
+ - lastName
+
+ ProjectStateEnveloped:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ $ref: "#/components/schemas/ProjectState"
error:
nullable: true
default: null
diff --git a/api/specs/webserver/components/schemas/group.yaml b/api/specs/webserver/components/schemas/group.yaml
index ebb7bfde29e..12459b729da 100644
--- a/api/specs/webserver/components/schemas/group.yaml
+++ b/api/specs/webserver/components/schemas/group.yaml
@@ -42,13 +42,13 @@ UsersGroup:
description: url to the group thumbnail
type: string
format: uri
- access_rights:
+ accessRights:
$ref: "#/GroupAccessRights"
required:
- gid
- label
- description
- - access_rights
+ - accessRights
example:
- gid: "27"
label: "A user"
diff --git a/api/specs/webserver/openapi-groups.yaml b/api/specs/webserver/openapi-groups.yaml
index 5de07b012bf..fc7fd24fb83 100644
--- a/api/specs/webserver/openapi-groups.yaml
+++ b/api/specs/webserver/openapi-groups.yaml
@@ -181,7 +181,15 @@ paths:
content:
application/json:
schema:
- $ref: "./components/schemas/group.yaml#/GroupAccessRights"
+ type: object
+ properties:
+ accessRights:
+ $ref: "./components/schemas/group.yaml#/GroupAccessRights"
+ required:
+ - accessRights
+
+
+
responses:
"200":
description: modified user
diff --git a/api/specs/webserver/openapi-projects.yaml b/api/specs/webserver/openapi-projects.yaml
index 197fdbb1438..3efcd1f1f88 100644
--- a/api/specs/webserver/openapi-projects.yaml
+++ b/api/specs/webserver/openapi-projects.yaml
@@ -171,6 +171,29 @@ paths:
default:
$ref: "#/components/responses/DefaultErrorResponse"
+ /projects/{project_id}/state:
+ parameters:
+ - name: project_id
+ in: path
+ required: true
+ schema:
+ type: string
+ get:
+ tags:
+ - project
+ summary: returns the state of a project
+ operationId: state_project
+ responses:
+ "200":
+ description: returns the project current state
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/ProjectStateEnveloped"
+
+ default:
+ $ref: "#/components/responses/DefaultErrorResponse"
+
/projects/{project_id}/close:
parameters:
- name: project_id
@@ -362,6 +385,9 @@ components:
ProjectArrayEnveloped:
$ref: "../common/schemas/project.yaml#/components/schemas/ProjectArrayEnveloped"
+ ProjectStateEnveloped:
+ $ref: "../common/schemas/project.yaml#/components/schemas/ProjectStateEnveloped"
+
RunningServiceEnveloped:
$ref: "../common/schemas/running_service.yaml#/components/schemas/RunningServiceEnveloped"
diff --git a/api/specs/webserver/openapi.yaml b/api/specs/webserver/openapi.yaml
index 34d54bd2e85..06a566c05e6 100644
--- a/api/specs/webserver/openapi.yaml
+++ b/api/specs/webserver/openapi.yaml
@@ -159,6 +159,9 @@ paths:
/projects/{project_id}:open:
$ref: "./openapi-projects.yaml#/paths/~1projects~1{project_id}~1open"
+ /projects/{project_id}/state:
+ $ref: "./openapi-projects.yaml#/paths/~1projects~1{project_id}~1state"
+
/projects/{project_id}:close:
$ref: "./openapi-projects.yaml#/paths/~1projects~1{project_id}~1close"
diff --git a/mypy.ini b/mypy.ini
index bd8c2057cf8..e343faa54ff 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -3,7 +3,8 @@
python_version = 3.6
warn_return_any = True
warn_unused_configs = True
-
+namespace_packages = True
+; ignore_missing_imports = True
# Per-module options:
[mypy-aio-pika.*]
ignore_missing_imports = True
@@ -17,6 +18,8 @@ ignore_missing_imports = True
ignore_missing_imports = True
[mypy-aiosmtplib.*]
ignore_missing_imports = True
+[mypy-aiozipkin.*]
+ignore_missing_imports = True
[mypy-asyncpg.*]
ignore_missing_imports = True
[mypy-celery.*]
@@ -27,6 +30,10 @@ ignore_missing_imports = True
ignore_missing_imports = True
[mypy-jsondiff.*]
ignore_missing_imports = True
+[mypy-jsonschema.*]
+ignore_missing_imports = True
+[mypy-openapi_core.*]
+ignore_missing_imports = True
[mypy-passlib.*]
ignore_missing_imports = True
[mypy-prometheus_client.*]
@@ -39,3 +46,7 @@ ignore_missing_imports = True
ignore_missing_imports = True
[mypy-trafaret.*]
ignore_missing_imports = True
+[mypy-trafaret_config.*]
+ignore_missing_imports = True
+[mypy-yarl.*]
+ignore_missing_imports = True
diff --git a/packages/postgres-database/src/simcore_postgres_database/cli.py b/packages/postgres-database/src/simcore_postgres_database/cli.py
index f86c97035b3..697a3101c70 100644
--- a/packages/postgres-database/src/simcore_postgres_database/cli.py
+++ b/packages/postgres-database/src/simcore_postgres_database/cli.py
@@ -11,14 +11,14 @@
from copy import deepcopy
from logging.config import fileConfig
from pathlib import Path
-from typing import Dict
+from typing import Dict, Optional
-import alembic.command
import click
+
+import alembic.command
import docker
from alembic import __version__ as __alembic_version__
from alembic.config import Config as AlembicConfig
-
from simcore_postgres_database.models import *
from simcore_postgres_database.utils import build_url, raise_if_not_responsive
@@ -110,14 +110,13 @@ def _reset_cache():
def main():
""" Simplified CLI for database migration with alembic """
-
@main.command()
@click.option("--user", "-u")
@click.option("--password", "-p")
@click.option("--host")
@click.option("--port", type=int)
@click.option("--database", "-d")
-def discover(**cli_inputs) -> Dict:
+def discover(**cli_inputs) -> Optional[Dict]:
""" Discovers databases and caches configs in ~/.simcore_postgres_database.json (except if --no-cache)"""
# NOTE: Do not add defaults to user, password so we get a chance to ping urls
# TODO: if multiple candidates online, then query user to select
@@ -125,14 +124,14 @@ def discover(**cli_inputs) -> Dict:
click.echo("Discovering database ...")
cli_cfg = {key: value for key, value in cli_inputs.items() if value is not None}
- def _test_cached():
+ def _test_cached() -> Dict:
"""Tests cached configuration """
cfg = _load_cache() or {}
if cfg:
cfg.update(cli_cfg) # overrides
return cfg
- def _test_env():
+ def _test_env() -> Dict:
"""Tests environ variables """
cfg = {
"user": os.getenv("POSTGRES_USER"),
@@ -144,7 +143,7 @@ def _test_env():
cfg.update(cli_cfg)
return cfg
- def _test_swarm():
+ def _test_swarm() -> Dict:
"""Tests published port in swarm from host """
cfg = _test_env()
cfg["host"] = "127.0.0.1"
@@ -182,6 +181,7 @@ def _test_swarm():
_reset_cache()
click.secho("Sorry, database not found !!", blink=False, bold=True, fg="red")
+ return None
@main.command()
diff --git a/packages/pytest-simcore/src/pytest_simcore/websocket_client.py b/packages/pytest-simcore/src/pytest_simcore/websocket_client.py
index 5e4d49ba0eb..a9f5927d241 100644
--- a/packages/pytest-simcore/src/pytest_simcore/websocket_client.py
+++ b/packages/pytest-simcore/src/pytest_simcore/websocket_client.py
@@ -10,7 +10,7 @@
@pytest.fixture()
-async def security_cookie(loop, client) -> str:
+async def security_cookie_factory(loop, client) -> str:
# get the cookie by calling the root entrypoint
resp = await client.get("/v0/")
payload = await resp.json()
@@ -32,17 +32,23 @@ async def socketio_url(loop, client) -> str:
@pytest.fixture()
-async def socketio_client(socketio_url: str, security_cookie: str):
+async def socketio_client(
+ socketio_url: str, security_cookie_factory: str
+) -> socketio.AsyncClient:
clients = []
async def connect(client_session_id) -> socketio.AsyncClient:
- sio = socketio.AsyncClient(ssl_verify=False) # enginio 3.10.0 introduced ssl verification
- url = str(URL(socketio_url).with_query({'client_session_id': client_session_id}))
+ sio = socketio.AsyncClient(
+ ssl_verify=False
+ ) # enginio 3.10.0 introduced ssl verification
+ url = str(
+ URL(socketio_url).with_query({"client_session_id": client_session_id})
+ )
headers = {}
- if security_cookie:
+ if security_cookie_factory:
# WARNING: engineio fails with empty cookies. Expects "key=value"
- headers.update({'Cookie': security_cookie})
+ headers.update({"Cookie": security_cookie_factory})
await sio.connect(url, headers=headers)
assert sio.sid
diff --git a/packages/service-library/src/servicelib/utils.py b/packages/service-library/src/servicelib/utils.py
index 758b47cbf3e..150452576cb 100644
--- a/packages/service-library/src/servicelib/utils.py
+++ b/packages/service-library/src/servicelib/utils.py
@@ -8,15 +8,15 @@
import logging
import os
from pathlib import Path
-from typing import Any, Coroutine, List, Optional, Union
+from typing import Any, Awaitable, Coroutine, List, Optional, Union
logger = logging.getLogger(__name__)
def is_production_environ() -> bool:
- """
- If True, this code most probably
- runs in a production container of one of the
+ """
+ If True, this code most probably
+ runs in a production container of one of the
osparc-simcore services.
"""
# WARNING: based on a convention that is not constantly verified
@@ -46,7 +46,9 @@ def search_osparc_repo_dir(start: Union[str, Path], max_iterations=8) -> Optiona
# FUTURES
-def fire_and_forget_task(obj: Union[Coroutine, asyncio.Future]) -> asyncio.Future:
+def fire_and_forget_task(
+ obj: Union[Coroutine, asyncio.Future, Awaitable]
+) -> asyncio.Future:
future = asyncio.ensure_future(obj)
def log_exception_callback(fut: asyncio.Future):
@@ -63,7 +65,7 @@ def log_exception_callback(fut: asyncio.Future):
async def logged_gather(
*tasks, reraise: bool = True, log: logging.Logger = logger
) -> List[Any]:
- """
+ """
*all* coroutine passed are executed concurrently and once they are all
completed, the first error (if any) is reraised or all returned
diff --git a/scripts/common.Makefile b/scripts/common.Makefile
index 3fc94e1ead0..eb8b6b5b7c1 100644
--- a/scripts/common.Makefile
+++ b/scripts/common.Makefile
@@ -100,6 +100,9 @@ autoformat: ## runs black python formatter on this service's code. Use AFTER mak
--exclude "/(\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|\.svn|_build|buck-out|build|dist|migration|client-sdk|generated_code)/" \
$(CURDIR)
+.PHONY: mypy
+mypy: $(REPO_BASE_DIR)/scripts/mypy.bash $(REPO_BASE_DIR)/mypy.ini ## runs mypy python static type checker on this services's code. Use AFTER make install-*
+ @$(REPO_BASE_DIR)/scripts/mypy.bash src
.PHONY: version-patch version-minor version-major
version-patch: ## commits version with bug fixes not affecting the cookiecuter config
diff --git a/scripts/mypy.bash b/scripts/mypy.bash
new file mode 100755
index 00000000000..001f76bc7da
--- /dev/null
+++ b/scripts/mypy.bash
@@ -0,0 +1,22 @@
+#!/bin/bash
+# http://redsymbol.net/articles/unofficial-bash-strict-mode/
+set -o errexit
+set -o nounset
+set -o pipefail
+IFS=$'\n\t'
+
+target_path=$(realpath ${1:-Please give target path as argument})
+cd "$(dirname "$0")"
+default_mypy_config="$(dirname ${PWD})/mypy.ini"
+mypy_config=$(realpath ${2:-${default_mypy_config}})
+
+echo mypying ${target_path} using config in ${mypy_config}...
+
+echo $default_mypy_config
+docker run --rm \
+ -v ${mypy_config}:/config/mypy.ini \
+ -v ${target_path}:/src \
+ --workdir=/src \
+ kiwicom/mypy mypy \
+ --config-file /config/mypy.ini \
+ /src
diff --git a/services/web/client/source/class/osparc/component/export/Permissions.js b/services/web/client/source/class/osparc/component/export/Permissions.js
index 8c00053b4c7..90bda71e574 100644
--- a/services/web/client/source/class/osparc/component/export/Permissions.js
+++ b/services/web/client/source/class/osparc/component/export/Permissions.js
@@ -157,7 +157,7 @@ qx.Class.define("osparc.component.export.Permissions", {
ctrl.bindProperty("login", "subtitle", null, item, id); // user
ctrl.bindProperty("description", "subtitle", null, item, id); // organization
ctrl.bindProperty("isOrg", "isOrganization", null, item, id);
- ctrl.bindProperty("access_rights", "accessRights", null, item, id);
+ ctrl.bindProperty("accessRights", "accessRights", null, item, id);
ctrl.bindProperty("showOptions", "showOptions", null, item, id);
},
configureItem: item => {
@@ -229,7 +229,7 @@ qx.Class.define("osparc.component.export.Permissions", {
collaborator["thumbnail"] = osparc.utils.Avatar.getUrl(collaborator["login"], 32);
collaborator["name"] = osparc.utils.Utils.firstsUp(collaborator["first_name"], collaborator["last_name"]);
}
- collaborator["access_rights"] = aceessRights[gid];
+ collaborator["accessRights"] = aceessRights[gid];
if (this.__isUserOwner()) {
collaborator["showOptions"] = true;
}
diff --git a/services/web/client/source/class/osparc/desktop/preferences/pages/OrganizationsPage.js b/services/web/client/source/class/osparc/desktop/preferences/pages/OrganizationsPage.js
index 5613368f77e..5675578ce85 100644
--- a/services/web/client/source/class/osparc/desktop/preferences/pages/OrganizationsPage.js
+++ b/services/web/client/source/class/osparc/desktop/preferences/pages/OrganizationsPage.js
@@ -88,7 +88,7 @@ qx.Class.define("osparc.desktop.preferences.pages.OrganizationsPage", {
ctrl.bindProperty("label", "title", null, item, id);
ctrl.bindProperty("description", "subtitle", null, item, id);
ctrl.bindProperty("nMembers", "contact", null, item, id);
- ctrl.bindProperty("access_rights", "accessRights", null, item, id);
+ ctrl.bindProperty("accessRights", "accessRights", null, item, id);
},
configureItem: item => {
const thumbanil = item.getChildControl("thumbnail");
@@ -159,7 +159,7 @@ qx.Class.define("osparc.desktop.preferences.pages.OrganizationsPage", {
ctrl.bindProperty("id", "key", null, item, id);
ctrl.bindProperty("thumbnail", "thumbnail", null, item, id);
ctrl.bindProperty("name", "title", null, item, id);
- ctrl.bindProperty("access_rights", "accessRights", null, item, id);
+ ctrl.bindProperty("accessRights", "accessRights", null, item, id);
ctrl.bindProperty("login", "subtitle", null, item, id);
ctrl.bindProperty("showOptions", "showOptions", null, item, id);
},
@@ -365,7 +365,7 @@ qx.Class.define("osparc.desktop.preferences.pages.OrganizationsPage", {
"uid": orgMember["key"]
},
data: {
- "access_rights": {
+ "accessRights": {
"read": true,
"write": true,
"delete": false
diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml
index 7e9f28a8dc5..f52a88fd62b 100644
--- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml
+++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml
@@ -2136,7 +2136,7 @@ paths:
description: url to the group thumbnail
type: string
format: uri
- access_rights:
+ accessRights:
description: defines acesss rights for the user
type: object
properties:
@@ -2164,7 +2164,7 @@ paths:
- gid
- label
- description
- - access_rights
+ - accessRights
example:
- gid: '27'
label: A user
@@ -2196,7 +2196,7 @@ paths:
description: url to the group thumbnail
type: string
format: uri
- access_rights:
+ accessRights:
description: defines acesss rights for the user
type: object
properties:
@@ -2224,7 +2224,7 @@ paths:
- gid
- label
- description
- - access_rights
+ - accessRights
example:
- gid: '27'
label: A user
@@ -2254,7 +2254,7 @@ paths:
description: url to the group thumbnail
type: string
format: uri
- access_rights:
+ accessRights:
description: defines acesss rights for the user
type: object
properties:
@@ -2282,7 +2282,7 @@ paths:
- gid
- label
- description
- - access_rights
+ - accessRights
example:
- gid: '27'
label: A user
@@ -2867,7 +2867,7 @@ paths:
description: url to the group thumbnail
type: string
format: uri
- access_rights:
+ accessRights:
description: defines acesss rights for the user
type: object
properties:
@@ -2895,7 +2895,7 @@ paths:
- gid
- label
- description
- - access_rights
+ - accessRights
example:
- gid: '27'
label: A user
@@ -2927,7 +2927,7 @@ paths:
description: url to the group thumbnail
type: string
format: uri
- access_rights:
+ accessRights:
description: defines acesss rights for the user
type: object
properties:
@@ -2955,7 +2955,7 @@ paths:
- gid
- label
- description
- - access_rights
+ - accessRights
example:
- gid: '27'
label: A user
@@ -2985,7 +2985,7 @@ paths:
description: url to the group thumbnail
type: string
format: uri
- access_rights:
+ accessRights:
description: defines acesss rights for the user
type: object
properties:
@@ -3013,7 +3013,7 @@ paths:
- gid
- label
- description
- - access_rights
+ - accessRights
example:
- gid: '27'
label: A user
@@ -3139,7 +3139,7 @@ paths:
description: url to the group thumbnail
type: string
format: uri
- access_rights:
+ accessRights:
description: defines acesss rights for the user
type: object
properties:
@@ -3167,7 +3167,7 @@ paths:
- gid
- label
- description
- - access_rights
+ - accessRights
example:
- gid: '27'
label: A user
@@ -3207,7 +3207,7 @@ paths:
description: url to the group thumbnail
type: string
format: uri
- access_rights:
+ accessRights:
description: defines acesss rights for the user
type: object
properties:
@@ -3235,7 +3235,7 @@ paths:
- gid
- label
- description
- - access_rights
+ - accessRights
example:
- gid: '27'
label: A user
@@ -3373,7 +3373,7 @@ paths:
description: url to the group thumbnail
type: string
format: uri
- access_rights:
+ accessRights:
description: defines acesss rights for the user
type: object
properties:
@@ -3401,7 +3401,7 @@ paths:
- gid
- label
- description
- - access_rights
+ - accessRights
example:
- gid: '27'
label: A user
@@ -3527,7 +3527,7 @@ paths:
description: url to the group thumbnail
type: string
format: uri
- access_rights:
+ accessRights:
description: defines acesss rights for the user
type: object
properties:
@@ -3555,7 +3555,7 @@ paths:
- gid
- label
- description
- - access_rights
+ - accessRights
example:
- gid: '27'
label: A user
@@ -3595,7 +3595,7 @@ paths:
description: url to the group thumbnail
type: string
format: uri
- access_rights:
+ accessRights:
description: defines acesss rights for the user
type: object
properties:
@@ -3623,7 +3623,7 @@ paths:
- gid
- label
- description
- - access_rights
+ - accessRights
example:
- gid: '27'
label: A user
@@ -4269,29 +4269,34 @@ paths:
content:
application/json:
schema:
- description: defines acesss rights for the user
type: object
properties:
- read:
- type: boolean
- write:
- type: boolean
- delete:
- type: boolean
+ accessRights:
+ description: defines acesss rights for the user
+ type: object
+ properties:
+ read:
+ type: boolean
+ write:
+ type: boolean
+ delete:
+ type: boolean
+ required:
+ - read
+ - write
+ - delete
+ example:
+ - read: true
+ write: false
+ delete: false
+ - read: true
+ write: true
+ delete: false
+ - read: true
+ write: true
+ delete: true
required:
- - read
- - write
- - delete
- example:
- - read: true
- write: false
- delete: false
- - read: true
- write: true
- delete: false
- - read: true
- write: true
- delete: true
+ - accessRights
responses:
'200':
description: modified user
@@ -8641,6 +8646,138 @@ paths:
message: Password is not secure
field: pasword
status: 400
+ '/projects/{project_id}/state':
+ parameters:
+ - name: project_id
+ in: path
+ required: true
+ schema:
+ type: string
+ get:
+ tags:
+ - project
+ summary: returns the state of a project
+ operationId: state_project
+ responses:
+ '200':
+ description: returns the project current state
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - data
+ properties:
+ data:
+ type: object
+ required:
+ - locked
+ properties:
+ locked:
+ type: object
+ description: describes the project lock state
+ required:
+ - value
+ properties:
+ value:
+ type: boolean
+ description: true if the project is locked
+ owner:
+ type: object
+ properties:
+ first_name:
+ type: string
+ last_name:
+ type: string
+ required:
+ - firstName
+ - lastName
+ error:
+ nullable: true
+ default: null
+ default:
+ description: Default http error response body
+ content:
+ application/json:
+ schema:
+ type: object
+ required:
+ - error
+ properties:
+ data:
+ nullable: true
+ default: null
+ error:
+ type: object
+ nullable: true
+ properties:
+ logs:
+ description: log messages
+ type: array
+ items:
+ type: object
+ properties:
+ level:
+ description: log level
+ type: string
+ default: INFO
+ enum:
+ - DEBUG
+ - WARNING
+ - INFO
+ - ERROR
+ message:
+ description: 'log message. If logger is USER, then it MUST be human readable'
+ type: string
+ logger:
+ description: name of the logger receiving this message
+ type: string
+ required:
+ - message
+ example:
+ message: 'Hi there, Mr user'
+ level: INFO
+ logger: user-logger
+ errors:
+ description: errors metadata
+ type: array
+ items:
+ type: object
+ required:
+ - code
+ - message
+ properties:
+ code:
+ type: string
+ description: Typically the name of the exception that produced it otherwise some known error code
+ message:
+ type: string
+ description: Error message specific to this item
+ resource:
+ type: string
+ description: API resource affected by this error
+ field:
+ type: string
+ description: Specific field within the resource
+ status:
+ description: HTTP error code
+ type: integer
+ example:
+ BadRequestError:
+ logs:
+ - message: Requested information is incomplete or malformed
+ level: ERROR
+ - message: Invalid email and password
+ level: ERROR
+ logger: USER
+ errors:
+ - code: InvalidEmail
+ message: Email is malformed
+ field: email
+ - code: UnsavePassword
+ message: Password is not secure
+ field: pasword
+ status: 400
'/projects/{project_id}:close':
parameters:
- name: project_id
diff --git a/services/web/server/src/simcore_service_webserver/groups_api.py b/services/web/server/src/simcore_service_webserver/groups_api.py
index 722fb264997..256cfe6f86f 100644
--- a/services/web/server/src/simcore_service_webserver/groups_api.py
+++ b/services/web/server/src/simcore_service_webserver/groups_api.py
@@ -120,7 +120,7 @@ async def create_user_group(
)
)
return convert_groups_db_to_schema(
- group, access_rights=DEFAULT_GROUP_OWNER_ACCESS_RIGHTS
+ group, accessRights=DEFAULT_GROUP_OWNER_ACCESS_RIGHTS
)
@@ -145,7 +145,7 @@ async def update_user_group(
)
updated_group = await result.fetchone()
return convert_groups_db_to_schema(
- updated_group, access_rights=group.access_rights
+ updated_group, accessRights=group.access_rights
)
@@ -271,10 +271,11 @@ async def update_user_in_group(
conn, gid, the_user_id_in_group
)
# modify the user access rights
+ new_db_values = {"access_rights": new_values_for_user_in_group["accessRights"]}
await conn.execute(
# pylint: disable=no-value-for-parameter
user_to_groups.update()
- .values(**new_values_for_user_in_group)
+ .values(**new_db_values)
.where(
and_(
user_to_groups.c.uid == the_user_id_in_group,
@@ -283,7 +284,7 @@ async def update_user_in_group(
)
)
the_user = dict(the_user)
- the_user.update(**new_values_for_user_in_group)
+ the_user.update(**new_db_values)
return convert_user_in_group_to_schema(the_user)
diff --git a/services/web/server/src/simcore_service_webserver/groups_utils.py b/services/web/server/src/simcore_service_webserver/groups_utils.py
index 5ac3a0eafa8..f2183b2602c 100644
--- a/services/web/server/src/simcore_service_webserver/groups_utils.py
+++ b/services/web/server/src/simcore_service_webserver/groups_utils.py
@@ -13,7 +13,7 @@
"label": "name",
"description": "description",
"thumbnail": "thumbnail",
- "access_rights": "access_rights",
+ "accessRights": "access_rights",
}
@@ -49,6 +49,6 @@ def convert_groups_schema_to_db(schema: Dict) -> Dict:
def convert_user_in_group_to_schema(row: Union[RowProxy, Dict]) -> Dict[str, str]:
group_user = convert_user_db_to_schema(row)
group_user.pop("role")
- group_user["access_rights"] = row["access_rights"]
+ group_user["accessRights"] = row["access_rights"]
group_user["gid"] = row["primary_gid"]
return group_user
diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py
index 35896cdaa03..ff615334a80 100644
--- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py
+++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py
@@ -22,6 +22,7 @@
from ..computation_api import delete_pipeline_db
from ..director import director_api
+from ..socketio.events import SOCKET_IO_PROJECT_UPDATED_EVENT, post_group_messages
from ..storage_api import copy_data_folders_from_project # mocked in unit-tests
from ..storage_api import (
delete_data_folders_of_project,
@@ -30,6 +31,7 @@
from .config import CONFIG_SECTION_NAME
from .projects_db import APP_PROJECT_DBAPI
from .projects_exceptions import NodeNotFoundError
+from .projects_models import ProjectState
from .projects_utils import clone_project_document
log = logging.getLogger(__name__)
@@ -49,7 +51,7 @@ async def get_project_for_user(
project_uuid: str,
user_id: int,
*,
- include_templates: bool = False
+ include_templates: bool = False,
) -> Dict:
""" Returns a project accessible to user
@@ -326,3 +328,21 @@ async def is_node_id_present_in_any_project_workbench(
"""If the node_id is presnet in one of the projects' workbenche returns True"""
db = app[APP_PROJECT_DBAPI]
return node_id in await db.get_all_node_ids_from_workbenches()
+
+
+async def notify_project_state_update(
+ app: web.Application, project: Dict, state: ProjectState
+) -> None:
+ rooms_to_notify = [
+ f"{gid}" for gid, rights in project["accessRights"].items() if rights["read"]
+ ]
+
+ messages = {
+ SOCKET_IO_PROJECT_UPDATED_EVENT: {
+ "project_uuid": project["uuid"],
+ "data": state.dict(),
+ }
+ }
+
+ for room in rooms_to_notify:
+ await post_group_messages(app, room, messages)
diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py
index 9507566d572..24715830fbb 100644
--- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py
+++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py
@@ -3,6 +3,7 @@
"""
import json
import logging
+from typing import Set
from aiohttp import web
from jsonschema import ValidationError
@@ -14,9 +15,11 @@
from ..resource_manager.websocket_manager import managed_resource
from ..security_api import check_permission
from ..security_decorators import permission_required
+from ..users_api import get_user_name
from . import projects_api
from .projects_db import APP_PROJECT_DBAPI
from .projects_exceptions import ProjectInvalidRightsError, ProjectNotFoundError
+from .projects_models import Owner, ProjectLocked, ProjectState
OVERRIDABLE_DOCUMENT_KEYS = [
"name",
@@ -273,6 +276,11 @@ async def delete_project(request: web.Request):
raise web.HTTPNoContent(content_type="application/json")
+class HTTPLocked(web.HTTPClientError):
+ # pylint: disable=too-many-ancestors
+ status_code = 423
+
+
@login_required
@permission_required("project.open")
async def open_project(request: web.Request) -> web.Response:
@@ -291,12 +299,34 @@ async def open_project(request: web.Request) -> web.Response:
user_id=user_id,
include_templates=True,
)
+
+ # let's check if that project is already opened by someone else
+ other_users: Set[int] = {
+ x
+ for x in await rt.find_users_of_resource("project_id", project_uuid)
+ if x != f"{user_id}"
+ }
+
+ if other_users:
+ # project is already locked
+ usernames = [
+ await get_user_name(request.app, uid) for uid in other_users
+ ]
+ raise HTTPLocked(reason=f"Project is already opened by {usernames}")
await rt.add("project_id", project_uuid)
# user id opened project uuid
await projects_api.start_project_interactive_services(request, project, user_id)
-
- return {"data": project}
+ # notify users that project is now locked
+ project_state = ProjectState(
+ locked=ProjectLocked(
+ value=True, owner=Owner(**await get_user_name(request.app, user_id))
+ )
+ )
+ await projects_api.notify_project_state_update(
+ request.app, project, project_state
+ )
+ return web.json_response({"data": project})
except ProjectNotFoundError:
raise web.HTTPNotFound(reason=f"Project {project_uuid} not found")
@@ -313,28 +343,71 @@ async def close_project(request: web.Request) -> web.Response:
# TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead!
from .projects_api import get_project_for_user
+ project = await get_project_for_user(
+ request.app,
+ project_uuid=project_uuid,
+ user_id=user_id,
+ include_templates=True,
+ )
+ project_opened_by_others: bool = False
with managed_resource(user_id, client_session_id, request.app) as rt:
- await get_project_for_user(
- request.app,
- project_uuid=project_uuid,
- user_id=user_id,
- include_templates=True,
- )
await rt.remove("project_id")
- other_users = await rt.find_users_of_resource("project_id", project_uuid)
- if not other_users:
- # only remove the services if no one else is using them now
- fire_and_forget_task(
- projects_api.remove_project_interactive_services(
+ project_opened_by_others = (
+ len(await rt.find_users_of_resource("project_id", project_uuid)) > 0
+ )
+ # if we are the only user left we can safely remove the services
+ async def _close_project_task() -> None:
+ try:
+ if not project_opened_by_others:
+ # only remove the services if no one else is using them now
+ await projects_api.remove_project_interactive_services(
user_id, project_uuid, request.app
)
+ finally:
+ # ensure we notify the user whatever happens, the GC should take care of dangling services in case of issue
+ await projects_api.notify_project_state_update(
+ request.app, project, ProjectState(locked={"value": False})
)
+ fire_and_forget_task(_close_project_task())
+
raise web.HTTPNoContent(content_type="application/json")
except ProjectNotFoundError:
raise web.HTTPNotFound(reason=f"Project {project_uuid} not found")
+@login_required
+@permission_required("project.read")
+async def state_project(request: web.Request) -> web.Response:
+ user_id = request[RQT_USERID_KEY]
+ project_uuid = request.match_info.get("project_id")
+ with managed_resource(user_id, None, request.app) as rt:
+ # TODO: temporary hidden until get_handlers_from_namespace refactor to seek marked functions instead!
+ from .projects_api import get_project_for_user
+
+ # check that project exists
+ await get_project_for_user(
+ request.app,
+ project_uuid=project_uuid,
+ user_id=user_id,
+ include_templates=True,
+ )
+
+ users_of_project = await rt.find_users_of_resource("project_id", project_uuid)
+ usernames = [
+ await get_user_name(request.app, uid) for uid in set(users_of_project)
+ ]
+ assert len(usernames) <= 1 # currently not possible to have more than 1
+ project_state = ProjectState(
+ locked={
+ "value": len(usernames) > 0,
+ "owner": Owner(**usernames[0]) if len(usernames) > 0 else None,
+ }
+ )
+
+ return web.json_response({"data": project_state.dict()})
+
+
@login_required
@permission_required("project.read")
async def get_active_project(request: web.Request) -> web.Response:
@@ -357,7 +430,7 @@ async def get_active_project(request: web.Request) -> web.Response:
include_templates=True,
)
- return {"data": project}
+ return web.json_response({"data": project})
except ProjectNotFoundError:
raise web.HTTPNotFound(reason="Project not found")
@@ -416,7 +489,7 @@ async def get_node(request: web.Request) -> web.Response:
node_details = await projects_api.get_project_node(
request, project_uuid, user_id, node_uuid
)
- return {"data": node_details}
+ return web.json_response({"data": node_details})
except ProjectNotFoundError:
raise web.HTTPNotFound(reason=f"Project {project_uuid} not found")
diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_models.py b/services/web/server/src/simcore_service_webserver/projects/projects_models.py
index 3952c803d07..18f2544985b 100644
--- a/services/web/server/src/simcore_service_webserver/projects/projects_models.py
+++ b/services/web/server/src/simcore_service_webserver/projects/projects_models.py
@@ -2,12 +2,31 @@
Facade
"""
-from simcore_postgres_database.webserver_models import (
- ProjectType,
- projects,
-)
+from typing import Optional
+
+from pydantic import BaseModel
+
+from simcore_postgres_database.webserver_models import ProjectType, projects
+
+
+class Owner(BaseModel):
+ first_name: str
+ last_name: str
+
+
+class ProjectLocked(BaseModel):
+ value: bool
+ owner: Optional[Owner]
+
+
+class ProjectState(BaseModel):
+ locked: ProjectLocked
+
__all__ = [
"projects",
"ProjectType",
+ "ProjectState",
+ "ProjectLocked",
+ "Owner",
]
diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/websocket_manager.py b/services/web/server/src/simcore_service_webserver/resource_manager/websocket_manager.py
index 5ea83ebd0af..1deff4df369 100644
--- a/services/web/server/src/simcore_service_webserver/resource_manager/websocket_manager.py
+++ b/services/web/server/src/simcore_service_webserver/resource_manager/websocket_manager.py
@@ -16,7 +16,7 @@
import logging
from contextlib import contextmanager
-from typing import Dict, List
+from typing import Dict, Iterator, List, Optional, Union
import attr
from aiohttp import web
@@ -32,7 +32,7 @@
@attr.s(auto_attribs=True)
class WebsocketRegistry:
user_id: str
- client_session_id: str
+ client_session_id: Optional[str]
app: web.Application
def _resource_key(self) -> Dict[str, str]:
@@ -150,9 +150,9 @@ async def find_users_of_resource(self, key: str, value: str) -> List[str]:
@contextmanager
def managed_resource(
- user_id: str, client_session_id: str, app: web.Application
-) -> WebsocketRegistry:
- registry = WebsocketRegistry(user_id, client_session_id, app)
+ user_id: Union[str, int], client_session_id: Optional[str], app: web.Application
+) -> Iterator[WebsocketRegistry]:
+ registry = WebsocketRegistry(str(user_id), client_session_id, app)
try:
yield registry
except Exception:
diff --git a/services/web/server/src/simcore_service_webserver/socketio/events.py b/services/web/server/src/simcore_service_webserver/socketio/events.py
index d7477aca481..9ffae614981 100644
--- a/services/web/server/src/simcore_service_webserver/socketio/events.py
+++ b/services/web/server/src/simcore_service_webserver/socketio/events.py
@@ -15,6 +15,8 @@
log = logging.getLogger(__name__)
+SOCKET_IO_PROJECT_UPDATED_EVENT: str = "projectStateUpdated"
+
async def post_messages(
app: Application, user_id: str, messages: Dict[str, Any]
@@ -28,3 +30,11 @@ async def post_messages(
# Notice that there might be several tabs open
for event_name, data in messages.items():
fire_and_forget_task(sio.emit(event_name, json.dumps(data), room=sid))
+
+
+async def post_group_messages(
+ app: Application, room: str, messages: Dict[str, Any]
+) -> None:
+ sio: AsyncServer = get_socket_server(app)
+ for event_name, data in messages.items():
+ fire_and_forget_task(sio.emit(event_name, json.dumps(data), room=room))
diff --git a/services/web/server/src/simcore_service_webserver/socketio/handlers.py b/services/web/server/src/simcore_service_webserver/socketio/handlers.py
index 51681a1b992..3a0fa3bdc6f 100644
--- a/services/web/server/src/simcore_service_webserver/socketio/handlers.py
+++ b/services/web/server/src/simcore_service_webserver/socketio/handlers.py
@@ -16,6 +16,7 @@
from servicelib.observer import observe
from servicelib.utils import fire_and_forget_task, logged_gather
+from ..groups_api import list_user_groups
from ..login.decorators import RQT_USERID_KEY, login_required
from ..resource_manager.config import get_service_deletion_timeout
from ..resource_manager.websocket_manager import managed_resource
@@ -45,6 +46,7 @@ async def connect(sid: str, environ: Dict, app: web.Application) -> bool:
request = environ[_SOCKET_IO_AIOHTTP_REQUEST_KEY]
try:
await authenticate_user(sid, app, request)
+ await set_user_in_rooms(sid, app, request)
except web.HTTPUnauthorized:
raise SocketIOConnectionError("authentification failed")
except Exception as exc: # pylint: disable=broad-except
@@ -86,6 +88,18 @@ async def authenticate_user(
await rt.set_socket_id(sid)
+async def set_user_in_rooms(
+ sid: str, app: web.Application, request: web.Request
+) -> None:
+ user_id = request.get(RQT_USERID_KEY, ANONYMOUS_USER_ID)
+ primary_group, user_groups, all_group = await list_user_groups(app, user_id)
+ groups = [primary_group] + user_groups + [all_group]
+ sio = get_socket_server(app)
+ # TODO: check if it is necessary to leave_room when socket disconnects
+ for group in groups:
+ sio.enter_room(sid, f"{group['gid']}")
+
+
async def disconnect_other_sockets(sio, sockets: List[str]) -> None:
log.debug("disconnecting sockets %s", sockets)
logout_tasks = [
diff --git a/services/web/server/src/simcore_service_webserver/users_api.py b/services/web/server/src/simcore_service_webserver/users_api.py
index ea35c123b37..2e990d24aa3 100644
--- a/services/web/server/src/simcore_service_webserver/users_api.py
+++ b/services/web/server/src/simcore_service_webserver/users_api.py
@@ -44,20 +44,20 @@ async def get_user_profile(app: web.Application, user_id: int) -> Dict[str, Any]
all_group = convert_groups_db_to_schema(
row,
prefix="groups_",
- access_rights=row["user_to_groups_access_rights"],
+ accessRights=row["user_to_groups_access_rights"],
)
elif row["groups_type"] == GroupType.PRIMARY:
user_primary_group = convert_groups_db_to_schema(
row,
prefix="groups_",
- access_rights=row["user_to_groups_access_rights"],
+ accessRights=row["user_to_groups_access_rights"],
)
else:
user_standard_groups.append(
convert_groups_db_to_schema(
row,
prefix="groups_",
- access_rights=row["user_to_groups_access_rights"],
+ accessRights=row["user_to_groups_access_rights"],
)
)
if not user_profile:
@@ -94,7 +94,6 @@ async def update_user_profile(
assert resp.rowcount == 1 # nosec
-
async def is_user_guest(app: web.Application, user_id: int) -> bool:
"""Returns True if the user exists and is a GUEST"""
db = get_storage(app)
@@ -208,3 +207,13 @@ async def delete_token(app: web.Application, user_id: int, service_id: str) -> N
and_(tokens.c.user_id == user_id, tokens.c.token_service == service_id)
)
)
+
+
+async def get_user_name(app: web.Application, user_id: int) -> Dict[str, str]:
+ engine = app[APP_DB_ENGINE_KEY]
+ async with engine.acquire() as conn:
+ user_name = await conn.scalar(
+ sa.select([users.c.name]).where(users.c.id == user_id)
+ )
+ parts = user_name.split(".") + [""]
+ return dict(first_name=parts[0], last_name=parts[1])
diff --git a/services/web/server/tests/unit/with_dbs/_helpers.py b/services/web/server/tests/unit/with_dbs/_helpers.py
new file mode 100644
index 00000000000..ed826046a94
--- /dev/null
+++ b/services/web/server/tests/unit/with_dbs/_helpers.py
@@ -0,0 +1,71 @@
+from asyncio import Future
+from collections import namedtuple
+from typing import List, Tuple
+
+from aiohttp import web
+
+from simcore_service_webserver.projects.projects_handlers import HTTPLocked
+from simcore_service_webserver.security_roles import UserRole
+
+ExpectedResponse = namedtuple(
+ "ExpectedResponse",
+ ["ok", "created", "no_content", "not_found", "forbidden", "locked"],
+)
+
+
+def standard_role_response() -> Tuple[str, List[Tuple[UserRole, ExpectedResponse]]]:
+ return (
+ "user_role,expected",
+ [
+ (
+ UserRole.ANONYMOUS,
+ ExpectedResponse(
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ web.HTTPUnauthorized,
+ ),
+ ),
+ (
+ UserRole.GUEST,
+ ExpectedResponse(
+ web.HTTPForbidden,
+ web.HTTPForbidden,
+ web.HTTPForbidden,
+ web.HTTPForbidden,
+ web.HTTPForbidden,
+ web.HTTPForbidden,
+ ),
+ ),
+ (
+ UserRole.USER,
+ ExpectedResponse(
+ web.HTTPOk,
+ web.HTTPCreated,
+ web.HTTPNoContent,
+ web.HTTPNotFound,
+ web.HTTPForbidden,
+ HTTPLocked,
+ ),
+ ),
+ (
+ UserRole.TESTER,
+ ExpectedResponse(
+ web.HTTPOk,
+ web.HTTPCreated,
+ web.HTTPNoContent,
+ web.HTTPNotFound,
+ web.HTTPForbidden,
+ HTTPLocked,
+ ),
+ ),
+ ],
+ )
+
+
+def future_with_result(result) -> Future:
+ f = Future()
+ f.set_result(result)
+ return f
diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py
index eefc459f8c0..b2fb1368756 100644
--- a/services/web/server/tests/unit/with_dbs/conftest.py
+++ b/services/web/server/tests/unit/with_dbs/conftest.py
@@ -14,7 +14,7 @@
from asyncio import Future
from copy import deepcopy
from pathlib import Path
-from typing import Dict, List
+from typing import Callable, Dict, List, Optional
from uuid import uuid4
import aioredis
@@ -23,11 +23,13 @@
import socketio
import sqlalchemy as sa
import trafaret_config
+from aiohttp import web
+from pytest_simcore.helpers.utils_assert import assert_status
+from pytest_simcore.helpers.utils_login import NewUser
from yarl import URL
import simcore_service_webserver.db_models as orm
import simcore_service_webserver.utils
-from pytest_simcore.helpers.utils_login import NewUser
from servicelib.aiopg_utils import DSN
from servicelib.rest_responses import unwrap_envelope
from simcore_service_webserver.application import create_application
@@ -90,7 +92,7 @@ def docker_compose_file(default_app_cfg):
os.environ["TEST_POSTGRES_USER"] = cfg["user"]
os.environ["TEST_POSTGRES_PASSWORD"] = cfg["password"]
- dc_path = current_dir / "docker-compose.yml"
+ dc_path = current_dir / "docker-compose-devel.yml"
assert dc_path.exists()
yield str(dc_path)
@@ -223,7 +225,7 @@ def redis_service(docker_services, docker_ip):
return url
-def is_redis_responsive(host: str, port: str) -> bool:
+def is_redis_responsive(host: str, port: int) -> bool:
r = redis.Redis(host=host, port=port)
return r.ping() == True
@@ -239,41 +241,54 @@ async def redis_client(loop, redis_service):
@pytest.fixture()
-async def socketio_url(client) -> str:
- SOCKET_IO_PATH = "/socket.io/"
- return str(client.make_url(SOCKET_IO_PATH))
+def socketio_url(client) -> Callable:
+ def create_url(client_override: Optional = None) -> str:
+ SOCKET_IO_PATH = "/socket.io/"
+ return str((client_override or client).make_url(SOCKET_IO_PATH))
+
+ yield create_url
@pytest.fixture()
-async def security_cookie(client) -> str:
- # get the cookie by calling the root entrypoint
- resp = await client.get("/v0/")
- payload = await resp.json()
- assert resp.status == 200, str(payload)
- data, error = unwrap_envelope(payload)
- assert data
- assert not error
+async def security_cookie_factory(client) -> Callable:
+ async def creator(client_override: Optional = None) -> str:
+ # get the cookie by calling the root entrypoint
+ resp = await (client_override or client).get("/v0/")
+ data, error = await assert_status(resp, web.HTTPOk)
+ assert data
+ assert not error
+
+ cookie = (
+ resp.request_info.headers["Cookie"]
+ if "Cookie" in resp.request_info.headers
+ else ""
+ )
+ return cookie
- cookie = ""
- if "Cookie" in resp.request_info.headers:
- cookie = resp.request_info.headers["Cookie"]
- yield cookie
+ yield creator
@pytest.fixture()
-async def socketio_client(socketio_url: str, security_cookie: str):
+async def socketio_client(
+ socketio_url: Callable, security_cookie_factory: Callable
+) -> Callable:
clients = []
- async def connect(client_session_id) -> socketio.AsyncClient:
+ async def connect(
+ client_session_id: str, client: Optional = None
+ ) -> socketio.AsyncClient:
sio = socketio.AsyncClient(ssl_verify=False)
# enginio 3.10.0 introduced ssl verification
url = str(
- URL(socketio_url).with_query({"client_session_id": client_session_id})
+ URL(socketio_url(client)).with_query(
+ {"client_session_id": client_session_id}
+ )
)
headers = {}
- if security_cookie:
+ cookie = await security_cookie_factory(client)
+ if cookie:
# WARNING: engineio fails with empty cookies. Expects "key=value"
- headers.update({"Cookie": security_cookie})
+ headers.update({"Cookie": cookie})
await sio.connect(url, headers=headers)
assert sio.sid
diff --git a/services/web/server/tests/unit/with_dbs/test_groups.py b/services/web/server/tests/unit/with_dbs/test_groups.py
index d6da3bb01d3..884cfea7d53 100644
--- a/services/web/server/tests/unit/with_dbs/test_groups.py
+++ b/services/web/server/tests/unit/with_dbs/test_groups.py
@@ -5,13 +5,14 @@
import random
from copy import deepcopy
-from typing import Dict, List, Tuple
+from typing import Dict, List
import pytest
from aiohttp import web
-
from pytest_simcore.helpers.utils_assert import assert_status
from pytest_simcore.helpers.utils_login import LoggedUser, create_user
+
+from _helpers import standard_role_response
from servicelib.application import create_safe_application
from simcore_service_webserver.db import setup_db
from simcore_service_webserver.groups import setup_groups
@@ -59,21 +60,16 @@ def client(loop, aiohttp_client, app_cfg, postgres_service):
return client
-# WARNING: pytest-asyncio and pytest-aiohttp are not compatible
-#
-# https://github.com/aio-libs/pytest-aiohttp/issues/8#issuecomment-405602020
-# https://github.com/pytest-dev/pytest-asyncio/issues/76
-#
-
-
@pytest.fixture
-async def logged_user(client, role: UserRole):
+async def logged_user(client, user_role: UserRole):
""" adds a user in db and logs in with client
NOTE: role fixture is defined as a parametrization below
"""
async with LoggedUser(
- client, {"role": role.name}, check_if_succeeds=role != UserRole.ANONYMOUS
+ client,
+ {"role": user_role.name},
+ check_if_succeeds=user_role != UserRole.ANONYMOUS,
) as user:
yield user
@@ -83,9 +79,9 @@ async def logged_user(client, role: UserRole):
def _assert_group(group: Dict[str, str]):
- properties = ["gid", "label", "description", "thumbnail", "access_rights"]
+ properties = ["gid", "label", "description", "thumbnail", "accessRights"]
assert all(x in group for x in properties)
- access_rights = group["access_rights"]
+ access_rights = group["accessRights"]
access_rights_properties = ["read", "write", "delete"]
assert all(x in access_rights for x in access_rights_properties)
@@ -102,104 +98,18 @@ def _assert__group_user(
assert actual_user["login"] == expected_user["email"]
assert "gravatar_id" in actual_user
assert actual_user["gravatar_id"] == gravatar_hash(expected_user["email"])
- assert "access_rights" in actual_user
- assert actual_user["access_rights"] == expected_access_rights
+ assert "accessRights" in actual_user
+ assert actual_user["accessRights"] == expected_access_rights
assert "id" in actual_user
assert actual_user["id"] == expected_user["id"]
assert "gid" in actual_user
-@pytest.mark.parametrize(
- "role,expected",
- [
- (UserRole.ANONYMOUS, web.HTTPUnauthorized),
- (UserRole.GUEST, web.HTTPForbidden),
- (UserRole.USER, web.HTTPOk),
- (UserRole.TESTER, web.HTTPOk),
- ],
-)
+@pytest.mark.parametrize(*standard_role_response(),)
async def test_list_groups(
client,
logged_user,
- role,
- expected,
- primary_group: Dict[str, str],
- standard_groups: List[Dict[str, str]],
- all_group: Dict[str, str],
-):
- url = client.app.router["list_groups"].url_for()
- assert str(url) == f"{PREFIX}"
-
- resp = await client.get(url)
- data, error = await assert_status(resp, expected)
-
- if not error:
- assert isinstance(data, dict)
- assert "me" in data
- _assert_group(data["me"])
- assert data["me"] == primary_group
-
- assert "organizations" in data
- assert isinstance(data["organizations"], list)
- for group in data["organizations"]:
- _assert_group(group)
- assert data["organizations"] == standard_groups
- assert "all" in data
- _assert_group(data["all"])
- assert data["all"] == all_group
-
-
-def _standard_role_response() -> Tuple[
- str, List[Tuple[UserRole, web.Response, web.Response, web.Response]]
-]:
- return (
- "role,expected_ok, expected_created, expected_no_contents, expected_not_found",
- [
- (
- UserRole.ANONYMOUS,
- web.HTTPUnauthorized,
- web.HTTPUnauthorized,
- web.HTTPUnauthorized,
- web.HTTPUnauthorized,
- ),
- (
- UserRole.GUEST,
- web.HTTPForbidden,
- web.HTTPForbidden,
- web.HTTPForbidden,
- web.HTTPForbidden,
- ),
- (
- UserRole.USER,
- web.HTTPOk,
- web.HTTPCreated,
- web.HTTPNoContent,
- web.HTTPNotFound,
- ),
- (
- UserRole.TESTER,
- web.HTTPOk,
- web.HTTPCreated,
- web.HTTPNoContent,
- web.HTTPNotFound,
- ),
- ],
- )
-
-
-@pytest.mark.parametrize(
- "role,expected",
- [
- (UserRole.ANONYMOUS, web.HTTPUnauthorized),
- (UserRole.GUEST, web.HTTPForbidden),
- (UserRole.USER, web.HTTPOk),
- (UserRole.TESTER, web.HTTPOk),
- ],
-)
-async def test_group_access_rights(
- client,
- logged_user,
- role,
+ user_role,
expected,
primary_group: Dict[str, str],
standard_groups: List[Dict[str, str]],
@@ -209,7 +119,7 @@ async def test_group_access_rights(
assert str(url) == f"{PREFIX}"
resp = await client.get(url)
- data, error = await assert_status(resp, expected)
+ data, error = await assert_status(resp, expected.ok)
if not error:
assert isinstance(data, dict)
@@ -252,48 +162,8 @@ async def test_group_access_rights(
data, error = await assert_status(resp, web.HTTPForbidden)
-@pytest.mark.parametrize(
- "role,expected,expected_read,expected_delete,expected_not_found",
- [
- (
- UserRole.ANONYMOUS,
- web.HTTPUnauthorized,
- web.HTTPUnauthorized,
- web.HTTPUnauthorized,
- web.HTTPUnauthorized,
- ),
- (
- UserRole.GUEST,
- web.HTTPForbidden,
- web.HTTPForbidden,
- web.HTTPForbidden,
- web.HTTPForbidden,
- ),
- (
- UserRole.USER,
- web.HTTPCreated,
- web.HTTPOk,
- web.HTTPNoContent,
- web.HTTPNotFound,
- ),
- (
- UserRole.TESTER,
- web.HTTPCreated,
- web.HTTPOk,
- web.HTTPNoContent,
- web.HTTPNotFound,
- ),
- ],
-)
-async def test_group_creation_workflow(
- client,
- logged_user,
- role,
- expected,
- expected_read,
- expected_delete,
- expected_not_found,
-):
+@pytest.mark.parametrize(*standard_role_response())
+async def test_group_creation_workflow(client, logged_user, user_role, expected):
url = client.app.router["create_group"].url_for()
assert str(url) == f"{PREFIX}"
@@ -305,7 +175,7 @@ async def test_group_creation_workflow(
}
resp = await client.post(url, json=new_group)
- data, error = await assert_status(resp, expected)
+ data, error = await assert_status(resp, expected.created)
assigned_group = new_group
if not error:
@@ -317,7 +187,7 @@ async def test_group_creation_workflow(
for prop in ["label", "description", "thumbnail"]:
assert assigned_group[prop] == new_group[prop]
# we get all rights on the group since we are the creator
- assert assigned_group["access_rights"] == {
+ assert assigned_group["accessRights"] == {
"read": True,
"write": True,
"delete": True,
@@ -328,7 +198,7 @@ async def test_group_creation_workflow(
assert str(url) == f"{PREFIX}"
resp = await client.get(url)
- data, error = await assert_status(resp, expected_read)
+ data, error = await assert_status(resp, expected.ok)
if not error:
assert len(data["organizations"]) == 1
assert data["organizations"][0] == assigned_group
@@ -337,7 +207,7 @@ async def test_group_creation_workflow(
url = client.app.router["get_group"].url_for(gid=str(assigned_group["gid"]))
assert str(url) == f"{PREFIX}/{assigned_group['gid']}"
resp = await client.get(url)
- data, error = await assert_status(resp, expected_read)
+ data, error = await assert_status(resp, expected.ok)
if not error:
assert data == assigned_group
@@ -346,7 +216,7 @@ async def test_group_creation_workflow(
url = client.app.router["update_group"].url_for(gid=str(assigned_group["gid"]))
assert str(url) == f"{PREFIX}/{assigned_group['gid']}"
resp = await client.patch(url, json=modified_group)
- data, error = await assert_status(resp, expected_read)
+ data, error = await assert_status(resp, expected.ok)
if not error:
assert data != assigned_group
_assert_group(data)
@@ -356,7 +226,7 @@ async def test_group_creation_workflow(
url = client.app.router["get_group"].url_for(gid=str(assigned_group["gid"]))
assert str(url) == f"{PREFIX}/{assigned_group['gid']}"
resp = await client.get(url)
- data, error = await assert_status(resp, expected_read)
+ data, error = await assert_status(resp, expected.ok)
if not error:
_assert_group(data)
assert data == assigned_group
@@ -365,7 +235,7 @@ async def test_group_creation_workflow(
url = client.app.router["delete_group"].url_for(gid=str(assigned_group["gid"]))
assert str(url) == f"{PREFIX}/{assigned_group['gid']}"
resp = await client.delete(url)
- data, error = await assert_status(resp, expected_delete)
+ data, error = await assert_status(resp, expected.no_content)
if not error:
assert not data
@@ -373,56 +243,18 @@ async def test_group_creation_workflow(
url = client.app.router["delete_group"].url_for(gid=str(assigned_group["gid"]))
assert str(url) == f"{PREFIX}/{assigned_group['gid']}"
resp = await client.delete(url)
- data, error = await assert_status(resp, expected_not_found)
+ data, error = await assert_status(resp, expected.not_found)
# check getting the group fails
url = client.app.router["get_group"].url_for(gid=str(assigned_group["gid"]))
assert str(url) == f"{PREFIX}/{assigned_group['gid']}"
resp = await client.get(url)
- data, error = await assert_status(resp, expected_not_found)
-
-
-@pytest.mark.parametrize(
- "role, expected_created,expected,expected_not_found,expected_no_content",
- [
- (
- UserRole.ANONYMOUS,
- web.HTTPUnauthorized,
- web.HTTPUnauthorized,
- web.HTTPUnauthorized,
- web.HTTPUnauthorized,
- ),
- (
- UserRole.GUEST,
- web.HTTPForbidden,
- web.HTTPForbidden,
- web.HTTPForbidden,
- web.HTTPForbidden,
- ),
- (
- UserRole.USER,
- web.HTTPCreated,
- web.HTTPOk,
- web.HTTPNotFound,
- web.HTTPNoContent,
- ),
- (
- UserRole.TESTER,
- web.HTTPCreated,
- web.HTTPOk,
- web.HTTPNotFound,
- web.HTTPNoContent,
- ),
- ],
-)
+ data, error = await assert_status(resp, expected.not_found)
+
+
+@pytest.mark.parametrize(*standard_role_response())
async def test_add_remove_users_from_group(
- client,
- logged_user,
- role,
- expected_created,
- expected,
- expected_not_found,
- expected_no_content,
+ client, logged_user, user_role, expected,
):
new_group = {
@@ -436,13 +268,13 @@ async def test_add_remove_users_from_group(
url = client.app.router["get_group_users"].url_for(gid=new_group["gid"])
assert str(url) == f"{PREFIX}/{new_group['gid']}/users"
resp = await client.get(url)
- data, error = await assert_status(resp, expected_not_found)
+ data, error = await assert_status(resp, expected.not_found)
url = client.app.router["create_group"].url_for()
assert str(url) == f"{PREFIX}"
resp = await client.post(url, json=new_group)
- data, error = await assert_status(resp, expected_created)
+ data, error = await assert_status(resp, expected.created)
assigned_group = new_group
if not error:
@@ -454,7 +286,7 @@ async def test_add_remove_users_from_group(
for prop in ["label", "description", "thumbnail"]:
assert assigned_group[prop] == new_group[prop]
# we get all rights on the group since we are the creator
- assert assigned_group["access_rights"] == {
+ assert assigned_group["accessRights"] == {
"read": True,
"write": True,
"delete": True,
@@ -466,7 +298,7 @@ async def test_add_remove_users_from_group(
)
assert str(get_group_users_url) == f"{PREFIX}/{assigned_group['gid']}/users"
resp = await client.get(get_group_users_url)
- data, error = await assert_status(resp, expected)
+ data, error = await assert_status(resp, expected.ok)
if not error:
list_of_users = data
@@ -491,7 +323,7 @@ async def test_add_remove_users_from_group(
else {"email": created_users_list[i]["email"]}
)
resp = await client.post(add_group_user_url, json=params)
- data, error = await assert_status(resp, expected_no_content)
+ data, error = await assert_status(resp, expected.no_content)
get_group_user_url = client.app.router["get_group_user"].url_for(
gid=str(assigned_group["gid"]), uid=str(created_users_list[i]["id"])
@@ -501,14 +333,14 @@ async def test_add_remove_users_from_group(
== f"{PREFIX}/{assigned_group['gid']}/users/{created_users_list[i]['id']}"
)
resp = await client.get(get_group_user_url)
- data, error = await assert_status(resp, expected)
+ data, error = await assert_status(resp, expected.ok)
if not error:
_assert__group_user(
created_users_list[i], DEFAULT_GROUP_READ_ACCESS_RIGHTS, data
)
# check list is correct
resp = await client.get(get_group_users_url)
- data, error = await assert_status(resp, expected)
+ data, error = await assert_status(resp, expected.ok)
if not error:
list_of_users = data
# now we should have all the users in the group + the owner
@@ -538,9 +370,9 @@ async def test_add_remove_users_from_group(
gid=str(assigned_group["gid"]), uid=str(created_users_list[i]["id"])
)
resp = await client.patch(
- update_group_user_url, json={"access_rights": MANAGER_ACCESS_RIGHTS}
+ update_group_user_url, json={"accessRights": MANAGER_ACCESS_RIGHTS}
)
- data, error = await assert_status(resp, expected)
+ data, error = await assert_status(resp, expected.ok)
if not error:
_assert__group_user(created_users_list[i], MANAGER_ACCESS_RIGHTS, data)
# check it is there
@@ -548,7 +380,7 @@ async def test_add_remove_users_from_group(
gid=str(assigned_group["gid"]), uid=str(created_users_list[i]["id"])
)
resp = await client.get(get_group_user_url)
- data, error = await assert_status(resp, expected)
+ data, error = await assert_status(resp, expected.ok)
if not error:
_assert__group_user(created_users_list[i], MANAGER_ACCESS_RIGHTS, data)
# remove the user from the group
@@ -556,14 +388,123 @@ async def test_add_remove_users_from_group(
gid=str(assigned_group["gid"]), uid=str(created_users_list[i]["id"])
)
resp = await client.delete(delete_group_user_url)
- data, error = await assert_status(resp, expected_no_content)
+ data, error = await assert_status(resp, expected.no_content)
# do it again to check it is not found anymore
resp = await client.delete(delete_group_user_url)
- data, error = await assert_status(resp, expected_not_found)
+ data, error = await assert_status(resp, expected.not_found)
# check it is not there anymore
get_group_user_url = client.app.router["get_group_user"].url_for(
gid=str(assigned_group["gid"]), uid=str(created_users_list[i]["id"])
)
resp = await client.get(get_group_user_url)
- data, error = await assert_status(resp, expected_not_found)
+ data, error = await assert_status(resp, expected.not_found)
+
+
+@pytest.mark.parametrize(*standard_role_response())
+async def test_group_access_rights(
+ client, logged_user, user_role, expected,
+):
+ # Use-case:
+ # 1. create a group
+ url = client.app.router["create_group"].url_for()
+ assert str(url) == f"{PREFIX}"
+
+ new_group = {
+ "gid": "4564",
+ "label": f"this is user {logged_user['id']} group",
+ "description": f"user {logged_user['email']} is the owner of that one",
+ "thumbnail": None,
+ }
+
+ resp = await client.post(url, json=new_group)
+ data, error = await assert_status(resp, expected.created)
+ if not data:
+ # role cannot create a group so stop here
+ return
+ assigned_group = data
+
+ # 1. have 2 users
+ users = [await create_user() for i in range(2)]
+
+ # 2. add the users to the group
+ add_group_user_url = client.app.router["add_group_user"].url_for(
+ gid=str(assigned_group["gid"])
+ )
+ assert str(add_group_user_url) == f"{PREFIX}/{assigned_group['gid']}/users"
+ for i, user in enumerate(users):
+ params = {"uid": user["id"]} if i % 2 == 0 else {"email": user["email"]}
+ resp = await client.post(add_group_user_url, json=params)
+ data, error = await assert_status(resp, expected.no_content)
+ # 3. user 1 shall be a manager
+ patch_group_user_url = client.app.router["update_group_user"].url_for(
+ gid=str(assigned_group["gid"]), uid=str(users[0]["id"])
+ )
+ assert (
+ str(patch_group_user_url)
+ == f"{PREFIX}/{assigned_group['gid']}/users/{users[0]['id']}"
+ )
+ params = {"accessRights": {"read": True, "write": True, "delete": False}}
+ resp = await client.patch(patch_group_user_url, json=params)
+ data, error = await assert_status(resp, expected.ok)
+ # 4. user 2 shall be a member
+ patch_group_user_url = client.app.router["update_group_user"].url_for(
+ gid=str(assigned_group["gid"]), uid=str(users[1]["id"])
+ )
+ assert (
+ str(patch_group_user_url)
+ == f"{PREFIX}/{assigned_group['gid']}/users/{users[1]['id']}"
+ )
+ params = {"accessRights": {"read": True, "write": False, "delete": False}}
+ resp = await client.patch(patch_group_user_url, json=params)
+ data, error = await assert_status(resp, expected.ok)
+
+ # let's login as user 1
+ # login
+ url = client.app.router["auth_login"].url_for()
+ resp = await client.post(
+ url, json={"email": users[0]["email"], "password": users[0]["raw_password"],}
+ )
+ await assert_status(resp, expected.ok)
+ # check as a manager I can remove user 2
+ delete_group_user_url = client.app.router["delete_group_user"].url_for(
+ gid=str(assigned_group["gid"]), uid=str(users[1]["id"])
+ )
+ assert (
+ str(delete_group_user_url)
+ == f"{PREFIX}/{assigned_group['gid']}/users/{users[1]['id']}"
+ )
+ resp = await client.delete(delete_group_user_url)
+ data, error = await assert_status(resp, expected.no_content)
+ # as a manager I can add user 2 again
+ resp = await client.post(add_group_user_url, json={"uid": users[1]["id"]})
+ data, error = await assert_status(resp, expected.no_content)
+ # as a manager I cannot delete the group
+ url = client.app.router["delete_group"].url_for(gid=str(assigned_group["gid"]))
+ resp = await client.delete(url)
+ data, error = await assert_status(resp, web.HTTPForbidden)
+
+ # now log in as user 2
+ # login
+ url = client.app.router["auth_login"].url_for()
+ resp = await client.post(
+ url, json={"email": users[1]["email"], "password": users[1]["raw_password"],}
+ )
+ await assert_status(resp, expected.ok)
+ # as a member I cannot remove user 1
+ delete_group_user_url = client.app.router["delete_group_user"].url_for(
+ gid=str(assigned_group["gid"]), uid=str(users[0]["id"])
+ )
+ assert (
+ str(delete_group_user_url)
+ == f"{PREFIX}/{assigned_group['gid']}/users/{users[0]['id']}"
+ )
+ resp = await client.delete(delete_group_user_url)
+ data, error = await assert_status(resp, web.HTTPForbidden)
+ # as a member I cannot add user 1
+ resp = await client.post(add_group_user_url, json={"uid": users[0]["id"]})
+ data, error = await assert_status(resp, web.HTTPForbidden)
+ # as a member I cannot delete the grouop
+ url = client.app.router["delete_group"].url_for(gid=str(assigned_group["gid"]))
+ resp = await client.delete(url)
+ data, error = await assert_status(resp, web.HTTPForbidden)
diff --git a/services/web/server/tests/unit/with_dbs/test_projects.py b/services/web/server/tests/unit/with_dbs/test_projects.py
index 26976d1abe0..4feb5fab05c 100644
--- a/services/web/server/tests/unit/with_dbs/test_projects.py
+++ b/services/web/server/tests/unit/with_dbs/test_projects.py
@@ -2,18 +2,31 @@
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
+import asyncio
+import json
+import pdb
+import time
import uuid as uuidlib
from asyncio import Future, sleep
from copy import deepcopy
-from typing import Dict, List, Optional
+from typing import Callable, Dict, List, Optional
+import mock
import pytest
+import socketio
from aiohttp import web
from mock import call
-
from pytest_simcore.helpers.utils_assert import assert_status
-from pytest_simcore.helpers.utils_login import LoggedUser, log_client_in
+from pytest_simcore.helpers.utils_login import LoggedUser, create_user, log_client_in
from pytest_simcore.helpers.utils_projects import NewProject, delete_all_projects
+from socketio.exceptions import ConnectionError
+
+from _helpers import (
+ ExpectedResponse,
+ HTTPLocked,
+ future_with_result,
+ standard_role_response,
+)
from servicelib.application import create_safe_application
from simcore_service_webserver.db import setup_db
from simcore_service_webserver.db_models import UserRole
@@ -23,11 +36,17 @@
from simcore_service_webserver.projects.projects_handlers import (
OVERRIDABLE_DOCUMENT_KEYS,
)
+from simcore_service_webserver.projects.projects_models import (
+ Owner,
+ ProjectLocked,
+ ProjectState,
+)
from simcore_service_webserver.resource_manager import setup_resource_manager
from simcore_service_webserver.rest import setup_rest
from simcore_service_webserver.security import setup_security
from simcore_service_webserver.session import setup_session
from simcore_service_webserver.socketio import setup_sockets
+from simcore_service_webserver.socketio.events import SOCKET_IO_PROJECT_UPDATED_EVENT
from simcore_service_webserver.tags import setup_tags
from simcore_service_webserver.utils import now_str, to_datetime
@@ -36,12 +55,6 @@
API_PREFIX = "/" + API_VERSION
-def future_with_result(result) -> Future:
- f = Future()
- f.set_result(result)
- return f
-
-
@pytest.fixture
def mocked_director_subsystem(mocker):
mock_director_api = {
@@ -61,6 +74,15 @@ def mocked_director_subsystem(mocker):
return mock_director_api
+DEFAULT_GARBAGE_COLLECTOR_INTERVAL_SECONDS: int = 3
+DEFAULT_GARBAGE_COLLECTOR_DELETION_TIMEOUT_SECONDS: int = 3
+
+
+@pytest.fixture
+def gc_long_deletion_timeout():
+ DEFAULT_GARBAGE_COLLECTOR_DELETION_TIMEOUT_SECONDS = 900
+
+
@pytest.fixture
def client(
loop,
@@ -80,10 +102,10 @@ def client(
cfg["director"]["enabled"] = True
cfg["resource_manager"][
"garbage_collection_interval_seconds"
- ] = 3 # increase speed of garbage collection
+ ] = DEFAULT_GARBAGE_COLLECTOR_INTERVAL_SECONDS # increase speed of garbage collection
cfg["resource_manager"][
"resource_deletion_timeout_seconds"
- ] = 3 # reduce deletion delay
+ ] = DEFAULT_GARBAGE_COLLECTOR_DELETION_TIMEOUT_SECONDS # reduce deletion delay
app = create_safe_application(cfg)
# setup app
@@ -122,22 +144,6 @@ async def logged_user(client, user_role: UserRole):
print("<----- logged out user", user_role)
-@pytest.fixture()
-async def logged_user2(client, user_role: UserRole):
- """ adds a user in db and logs in with client
-
- NOTE: `user_role` fixture is defined as a parametrization below!!!
- """
- async with LoggedUser(
- client,
- {"role": user_role.name},
- check_if_succeeds=user_role != UserRole.ANONYMOUS,
- ) as user:
- print("-----> logged in user", user_role)
- yield user
- print("<----- logged out user", user_role)
-
-
@pytest.fixture
async def user_project(client, fake_project, logged_user):
async with NewProject(
@@ -148,6 +154,23 @@ async def user_project(client, fake_project, logged_user):
print("<----- removed project", project["name"])
+@pytest.fixture
+async def shared_project(client, fake_project, logged_user, all_group):
+ fake_project.update(
+ {
+ "accessRights": {
+ f"{all_group['gid']}": {"read": True, "write": False, "delete": False}
+ },
+ },
+ )
+ async with NewProject(
+ fake_project, client.app, user_id=logged_user["id"],
+ ) as project:
+ print("-----> added project", project["name"])
+ yield project
+ print("<----- removed project", project["name"])
+
+
@pytest.fixture
async def template_project(
client, fake_project, logged_user, all_group: Dict[str, str]
@@ -372,15 +395,7 @@ async def _new_project(
# POST --------
-@pytest.mark.parametrize(
- "user_role,expected",
- [
- (UserRole.ANONYMOUS, web.HTTPUnauthorized),
- (UserRole.GUEST, web.HTTPForbidden),
- (UserRole.USER, web.HTTPCreated),
- (UserRole.TESTER, web.HTTPCreated),
- ],
-)
+@pytest.mark.parametrize(*standard_role_response())
async def test_new_project(
client,
logged_user,
@@ -390,18 +405,12 @@ async def test_new_project(
storage_subsystem_mock,
project_db_cleaner,
):
- new_project = await _new_project(client, expected, logged_user, primary_group)
+ new_project = await _new_project(
+ client, expected.created, logged_user, primary_group
+ )
-@pytest.mark.parametrize(
- "user_role,expected",
- [
- (UserRole.ANONYMOUS, web.HTTPUnauthorized),
- (UserRole.GUEST, web.HTTPForbidden),
- (UserRole.USER, web.HTTPCreated),
- (UserRole.TESTER, web.HTTPCreated),
- ],
-)
+@pytest.mark.parametrize(*standard_role_response())
async def test_new_project_from_template(
client,
logged_user,
@@ -413,7 +422,11 @@ async def test_new_project_from_template(
project_db_cleaner,
):
new_project = await _new_project(
- client, expected, logged_user, primary_group, from_template=template_project
+ client,
+ expected.created,
+ logged_user,
+ primary_group,
+ from_template=template_project,
)
if new_project:
@@ -425,15 +438,7 @@ async def test_new_project_from_template(
pytest.fail("Invalid uuid in workbench node {}".format(node_name))
-@pytest.mark.parametrize(
- "user_role,expected",
- [
- (UserRole.ANONYMOUS, web.HTTPUnauthorized),
- (UserRole.GUEST, web.HTTPForbidden),
- (UserRole.USER, web.HTTPCreated),
- (UserRole.TESTER, web.HTTPCreated),
- ],
-)
+@pytest.mark.parametrize(*standard_role_response())
async def test_new_project_from_template_with_body(
client,
logged_user,
@@ -465,7 +470,7 @@ async def test_new_project_from_template_with_body(
}
project = await _new_project(
client,
- expected,
+ expected.created,
logged_user,
primary_group,
project=predefined,
@@ -603,43 +608,7 @@ async def test_new_template_from_project(
pytest.fail("Invalid uuid in workbench node {}".format(node_name))
-@pytest.mark.parametrize(
- "user_role,expected_created,expected_ok,expected_notfound,expected_nocontents,expected_forbidden",
- [
- (
- UserRole.ANONYMOUS,
- web.HTTPUnauthorized,
- web.HTTPUnauthorized,
- web.HTTPUnauthorized,
- web.HTTPUnauthorized,
- web.HTTPUnauthorized,
- ),
- (
- UserRole.GUEST,
- web.HTTPForbidden,
- web.HTTPForbidden,
- web.HTTPNotFound,
- web.HTTPForbidden,
- web.HTTPForbidden,
- ),
- (
- UserRole.USER,
- web.HTTPCreated,
- web.HTTPOk,
- web.HTTPNotFound,
- web.HTTPNoContent,
- web.HTTPForbidden,
- ),
- (
- UserRole.TESTER,
- web.HTTPCreated,
- web.HTTPOk,
- web.HTTPNotFound,
- web.HTTPNoContent,
- web.HTTPForbidden,
- ),
- ],
-)
+@pytest.mark.parametrize(*standard_role_response())
@pytest.mark.parametrize(
"share_rights",
[
@@ -651,20 +620,16 @@ async def test_new_template_from_project(
)
async def test_share_project(
client,
- logged_user,
+ logged_user: Dict,
primary_group: Dict[str, str],
standard_groups: List[Dict[str, str]],
all_group: Dict[str, str],
- user_role,
- expected_created,
- expected_ok,
- expected_notfound,
- expected_nocontents,
- expected_forbidden,
+ user_role: UserRole,
+ expected: ExpectedResponse,
storage_subsystem_mock,
mocked_director_subsystem,
computational_system_mock,
- share_rights,
+ share_rights: Dict,
project_db_cleaner,
):
# Use-case: the user shares some projects with a group
@@ -672,7 +637,7 @@ async def test_share_project(
# create a few projects
new_project = await _new_project(
client,
- expected_created,
+ expected.created,
logged_user,
primary_group,
project={"accessRights": {str(all_group["gid"]): share_rights}},
@@ -684,7 +649,7 @@ async def test_share_project(
}
# user 1 can always get to his project
- await _get_project(client, new_project, expected_ok)
+ await _get_project(client, new_project, expected.ok)
# get another user logged in now
user_2 = await log_client_in(
@@ -695,10 +660,10 @@ async def test_share_project(
await _get_project(
client,
new_project,
- expected_ok if share_rights["read"] else expected_forbidden,
+ expected.ok if share_rights["read"] else expected.forbidden,
)
# user 2 can only list projects if user 2 has read access
- list_projects = await _list_projects(client, expected_ok)
+ list_projects = await _list_projects(client, expected.ok)
assert len(list_projects) == (1 if share_rights["read"] else 0)
# user 2 can only update the project is user 2 has write access
project_update = deepcopy(new_project)
@@ -706,13 +671,13 @@ async def test_share_project(
await _replace_project(
client,
project_update,
- expected_ok if share_rights["write"] else expected_forbidden,
+ expected.ok if share_rights["write"] else expected.forbidden,
)
# user 2 can only delete projects if user 2 has delete access
await _delete_project(
client,
new_project,
- expected_nocontents if share_rights["delete"] else expected_forbidden,
+ expected.no_content if share_rights["delete"] else expected.forbidden,
)
@@ -903,15 +868,7 @@ async def test_open_project(
mocked_director_subsystem["start_service"].assert_has_calls(calls)
-@pytest.mark.parametrize(
- "user_role,expected",
- [
- (UserRole.ANONYMOUS, web.HTTPUnauthorized),
- (UserRole.GUEST, web.HTTPForbidden),
- (UserRole.USER, web.HTTPNoContent),
- (UserRole.TESTER, web.HTTPNoContent),
- ],
-)
+@pytest.mark.parametrize(*standard_role_response())
async def test_close_project(
client,
logged_user,
@@ -943,7 +900,7 @@ async def test_close_project(
# close project
url = client.app.router["close_project"].url_for(project_id=user_project["uuid"])
resp = await client.post(url, json=client_id)
- await assert_status(resp, expected)
+ await assert_status(resp, expected.no_content)
if resp.status == web.HTTPNoContent.status_code:
calls = [
call(client.server.app, user_project["uuid"], None),
@@ -957,7 +914,7 @@ async def test_close_project(
@pytest.mark.parametrize(
"user_role, expected",
[
- # (UserRole.ANONYMOUS, web.HTTPUnauthorized),
+ (UserRole.ANONYMOUS, web.HTTPUnauthorized),
(UserRole.GUEST, web.HTTPOk),
(UserRole.USER, web.HTTPOk),
(UserRole.TESTER, web.HTTPOk),
@@ -974,8 +931,14 @@ async def test_get_active_project(
):
# login with socket using client session id
client_id1 = client_session_id()
- sio = await socketio_client(client_id1)
- assert sio.sid
+ sio = None
+ try:
+ sio = await socketio_client(client_id1)
+ assert sio.sid
+ except ConnectionError:
+ if expected == web.HTTPOk:
+ pytest.fail("socket io connection should not fail")
+
# get active projects -> empty
get_active_projects_url = (
client.app.router["get_active_project"]
@@ -1002,8 +965,12 @@ async def test_get_active_project(
# login with socket using client session id2
client_id2 = client_session_id()
- sio = await socketio_client(client_id2)
- assert sio.sid
+ try:
+ sio = await socketio_client(client_id2)
+ assert sio.sid
+ except ConnectionError:
+ if expected == web.HTTPOk:
+ pytest.fail("socket io connection should not fail")
# get active projects -> empty
get_active_projects_url = (
client.app.router["get_active_project"]
@@ -1018,15 +985,15 @@ async def test_get_active_project(
@pytest.mark.parametrize(
- "user_role, expected",
+ "user_role, expected_ok, expected_forbidden",
[
- # (UserRole.ANONYMOUS),
- (UserRole.GUEST, web.HTTPForbidden),
- (UserRole.USER, web.HTTPForbidden),
- (UserRole.TESTER, web.HTTPForbidden),
+ (UserRole.ANONYMOUS, web.HTTPUnauthorized, web.HTTPUnauthorized),
+ (UserRole.GUEST, web.HTTPOk, web.HTTPForbidden),
+ (UserRole.USER, web.HTTPOk, web.HTTPForbidden),
+ (UserRole.TESTER, web.HTTPOk, web.HTTPForbidden),
],
)
-async def test_delete_shared_project_forbidden(
+async def test_delete_multiple_opened_project_forbidden(
client,
logged_user,
user_project,
@@ -1034,21 +1001,31 @@ async def test_delete_shared_project_forbidden(
mocked_dynamic_service,
socketio_client,
client_session_id,
- expected,
+ user_role,
+ expected_ok,
+ expected_forbidden,
mocked_director_subsystem,
):
# service in project = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"])
service = await mocked_dynamic_service(logged_user["id"], user_project["uuid"])
# open project in tab1
client_session_id1 = client_session_id()
- sio1 = await socketio_client(client_session_id1)
+ try:
+ sio1 = await socketio_client(client_session_id1)
+ except ConnectionError:
+ if user_role != UserRole.ANONYMOUS:
+ pytest.fail("socket io connection should not fail")
url = client.app.router["open_project"].url_for(project_id=user_project["uuid"])
resp = await client.post(url, json=client_session_id1)
- await assert_status(resp, web.HTTPOk)
+ await assert_status(resp, expected_ok)
# delete project in tab2
client_session_id2 = client_session_id()
- sio2 = await socketio_client(client_session_id2)
- await _delete_project(client, user_project, expected)
+ try:
+ sio2 = await socketio_client(client_session_id2)
+ except ConnectionError:
+ if user_role != UserRole.ANONYMOUS:
+ pytest.fail("socket io connection should not fail")
+ await _delete_project(client, user_project, expected_forbidden)
@pytest.mark.parametrize(
@@ -1220,3 +1197,226 @@ async def test_tags_to_studies(
url = client.app.router["delete_tag"].url_for(tag_id=str(added_tags[1].get("id")))
resp = await client.delete(url)
await assert_status(resp, web.HTTPNoContent)
+
+
+async def _connect_websocket(
+ socketio_client: Callable,
+ check_connection: bool,
+ client,
+ client_id: str,
+ events: Optional[Dict[str, Callable]] = None,
+) -> socketio.AsyncClient:
+ try:
+ sio = await socketio_client(client_id, client)
+ assert sio.sid
+ if events:
+ for event, handler in events.items():
+ sio.on(event, handler=handler)
+ return sio
+ except ConnectionError:
+ if check_connection:
+ pytest.fail("socket io connection should not fail")
+
+
+async def _open_project(
+ client, client_id: str, project: Dict, expected: web.HTTPException
+):
+ url = client.app.router["open_project"].url_for(project_id=project["uuid"])
+ resp = await client.post(url, json=client_id)
+ await assert_status(resp, expected)
+
+
+async def _close_project(
+ client, client_id: str, project: Dict, expected: web.HTTPException
+):
+ url = client.app.router["close_project"].url_for(project_id=project["uuid"])
+ resp = await client.post(url, json=client_id)
+ data, error = await assert_status(resp, expected)
+
+
+async def _state_project(
+ client,
+ project: Dict,
+ expected: web.HTTPException,
+ expected_project_state: ProjectState,
+):
+ url = client.app.router["state_project"].url_for(project_id=project["uuid"])
+ resp = await client.get(url)
+ data, error = await assert_status(resp, expected)
+ if not error:
+ # the project is locked
+ assert data == expected_project_state.dict()
+
+
+async def _assert_project_state_updated(
+ handler: mock.Mock,
+ shared_project: Dict,
+ expected_project_state: ProjectState,
+ num_calls: int,
+) -> None:
+ if num_calls == 0:
+ handler.assert_not_called()
+ else:
+ # wait for the calls
+ now = time.monotonic()
+ MAX_WAITING_TIME = 15
+ while time.monotonic() - now < MAX_WAITING_TIME:
+ await asyncio.sleep(1)
+ if handler.call_count == num_calls:
+ break
+ if time.monotonic() - now > MAX_WAITING_TIME:
+ pytest.fail(
+ f"waited more than {MAX_WAITING_TIME}s and got only {handler.call_count}/{num_calls} calls"
+ )
+
+ calls = [
+ call(
+ json.dumps(
+ {
+ "project_uuid": shared_project["uuid"],
+ "data": expected_project_state.dict(),
+ }
+ )
+ )
+ ] * num_calls
+ handler.assert_has_calls(calls)
+ handler.reset_mock()
+
+
+@pytest.mark.parametrize(*standard_role_response())
+async def test_open_shared_project_2_users_locked(
+ client,
+ logged_user: Dict,
+ shared_project: Dict,
+ socketio_client: Callable,
+ # mocked_director_subsystem,
+ client_session_id: Callable,
+ user_role: UserRole,
+ expected: ExpectedResponse,
+ aiohttp_client,
+ mocker,
+):
+ # Use-case: user 1 opens a shared project, user 2 tries to open it as well
+ mock_project_state_updated_handler = mocker.Mock()
+
+ client_1 = client
+ client_id1 = client_session_id()
+ client_2 = await aiohttp_client(client.app)
+ client_id2 = client_session_id()
+
+ # 1. user 1 opens project
+ sio_1 = await _connect_websocket(
+ socketio_client,
+ user_role != UserRole.ANONYMOUS,
+ client_1,
+ client_id1,
+ {SOCKET_IO_PROJECT_UPDATED_EVENT: mock_project_state_updated_handler},
+ )
+ expected_project_state = ProjectState(locked={"value": False})
+ await _state_project(
+ client_1,
+ shared_project,
+ expected.ok if user_role != UserRole.GUEST else web.HTTPOk,
+ expected_project_state,
+ )
+ await _open_project(
+ client_1,
+ client_id1,
+ shared_project,
+ expected.ok if user_role != UserRole.GUEST else web.HTTPOk,
+ )
+ expected_project_state.locked.value = True
+ expected_project_state.locked.owner = Owner(
+ first_name=(logged_user["name"].split(".") + [""])[0],
+ last_name=(logged_user["name"].split(".") + [""])[1],
+ )
+ # NOTE: there are 2 calls since we are part of the primary group and the all group
+ await _assert_project_state_updated(
+ mock_project_state_updated_handler,
+ shared_project,
+ expected_project_state,
+ 0 if user_role == UserRole.ANONYMOUS else 2,
+ )
+
+ await _state_project(
+ client_1,
+ shared_project,
+ expected.ok if user_role != UserRole.GUEST else web.HTTPOk,
+ expected_project_state,
+ )
+
+ # 2. create a separate client now and log in user2, try to open the same shared project
+ user_2 = await log_client_in(
+ client_2, {"role": user_role.name}, enable_check=user_role != UserRole.ANONYMOUS
+ )
+ sio_2 = await _connect_websocket(
+ socketio_client,
+ user_role != UserRole.ANONYMOUS,
+ client_2,
+ client_id2,
+ {SOCKET_IO_PROJECT_UPDATED_EVENT: mock_project_state_updated_handler},
+ )
+ await _open_project(
+ client_2,
+ client_id2,
+ shared_project,
+ expected.locked if user_role != UserRole.GUEST else HTTPLocked,
+ )
+ await _state_project(
+ client_2,
+ shared_project,
+ expected.ok if user_role != UserRole.GUEST else web.HTTPOk,
+ expected_project_state,
+ )
+
+ # 3. user 1 closes the project
+ await _close_project(client_1, client_id1, shared_project, expected.no_content)
+ if not any(user_role == role for role in [UserRole.ANONYMOUS, UserRole.GUEST]):
+ # Guests cannot close projects
+ expected_project_state = ProjectState(locked=ProjectLocked(value=False))
+
+ # we should receive an event that the project lock state changed
+ # NOTE: there are 3 calls since we are part of the primary group and the all group and user 2 is part of the all group
+ await _assert_project_state_updated(
+ mock_project_state_updated_handler,
+ shared_project,
+ expected_project_state,
+ 0
+ if any(user_role == role for role in [UserRole.ANONYMOUS, UserRole.GUEST])
+ else 3,
+ )
+ await _state_project(
+ client_1,
+ shared_project,
+ expected.ok if user_role != UserRole.GUEST else web.HTTPOk,
+ expected_project_state,
+ )
+
+ # 4. user 2 now should be able to open the project
+ await _open_project(
+ client_2,
+ client_id2,
+ shared_project,
+ expected.ok if user_role != UserRole.GUEST else HTTPLocked,
+ )
+ if not any(user_role == role for role in [UserRole.ANONYMOUS, UserRole.GUEST]):
+ expected_project_state.locked.value = True
+ expected_project_state.locked.owner = Owner(
+ first_name=(user_2["name"].split(".") + [""])[0],
+ last_name=(user_2["name"].split(".") + [""])[1],
+ )
+ # NOTE: there are 3 calls since we are part of the primary group and the all group
+ await _assert_project_state_updated(
+ mock_project_state_updated_handler,
+ shared_project,
+ expected_project_state,
+ 0
+ if any(user_role == role for role in [UserRole.ANONYMOUS, UserRole.GUEST])
+ else 3,
+ )
+ await _state_project(
+ client_1,
+ shared_project,
+ expected.ok if user_role != UserRole.GUEST else web.HTTPOk,
+ expected_project_state,
+ )
diff --git a/services/web/server/tests/unit/with_dbs/test_resource_manager.py b/services/web/server/tests/unit/with_dbs/test_resource_manager.py
index 2b3cf1ff176..fa25d82d927 100644
--- a/services/web/server/tests/unit/with_dbs/test_resource_manager.py
+++ b/services/web/server/tests/unit/with_dbs/test_resource_manager.py
@@ -89,22 +89,6 @@ async def logged_user(client, user_role: UserRole):
print("<----- logged out user", user_role)
-@pytest.fixture()
-async def logged_user2(client, user_role: UserRole):
- """ adds a user in db and logs in with client
-
- NOTE: `user_role` fixture is defined as a parametrization below!!!
- """
- async with LoggedUser(
- client,
- {"role": user_role.name},
- check_if_succeeds=user_role != UserRole.ANONYMOUS,
- ) as user:
- print("-----> logged in user", user_role)
- yield user
- print("<----- logged out user", user_role)
-
-
@pytest.fixture
async def empty_user_project(client, empty_project, logged_user):
project = empty_project()
@@ -143,19 +127,28 @@ async def close_project(client, project_uuid: str, client_session_id: str) -> No
# ------------------------ TESTS -------------------------------
+from typing import Callable
+
+
async def test_anonymous_websocket_connection(
- client_session_id, socketio_url: str, security_cookie, mocker,
+ client_session_id: str,
+ socketio_url: Callable,
+ security_cookie_factory: Callable,
+ mocker,
):
from yarl import URL
sio = socketio.AsyncClient(
ssl_verify=False
) # enginio 3.10.0 introduced ssl verification
- url = str(URL(socketio_url).with_query({"client_session_id": client_session_id()}))
+ url = str(
+ URL(socketio_url()).with_query({"client_session_id": client_session_id()})
+ )
headers = {}
- if security_cookie:
+ cookie = await security_cookie_factory()
+ if cookie:
# WARNING: engineio fails with empty cookies. Expects "key=value"
- headers.update({"Cookie": security_cookie})
+ headers.update({"Cookie": cookie})
socket_connect_error = mocker.Mock()
sio.on("connect_error", handler=socket_connect_error)
From bb0788f465412866a72d7df08e224dd380c5ec2e Mon Sep 17 00:00:00 2001
From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com>
Date: Thu, 2 Jul 2020 16:16:51 +0200
Subject: [PATCH 15/43] Homogenize studies and services (#1569)
- "Services" tab renamed to "Discover". Under this tab, all the available resources (templates and services) are listed.
- To start a new study:
- If a templates is selected, a study containing that pipeline will be initialized (same as before)
- If a service is selected, a study containing that single node will be initialized (new feature). Since this will be a single node study, the service/app will be automatically maximized. Providing a more app-like user experience.
In addition, the content of the Discover tab is also shipped by the frontend compiler with its own customized navigation bar as standalone application that could be served as the Discover/Explore website.
- Plotly removed is not used
- Removed unused images
---
.github/workflows/ci-testing-deploy.yml | 3 +
services/web/client/.eslintrc.json | 1 +
services/web/client/Manifest.json | 1 -
services/web/client/compile.json | 12 +
.../source/class/explorer/Application.js | 60 ++
.../client/source/class/explorer/MainPage.js | 55 ++
.../source/class/explorer/NavigationBar.js | 35 +
.../component/message/FlashMessenger.js | 2 +-
.../osparc/component/metadata/ServiceInfo.js | 19 +-
.../component/metadata/ServiceInfoWindow.js | 14 +-
.../metadata/ServiceStarterWindow.js | 117 ++++
.../component/metadata/StudyDetailsEditor.js | 24 +-
.../class/osparc/component/node/NodeView.js | 5 +-
.../osparc/component/service/ServiceJumbo.js | 2 +-
.../component/widget/CollapsibleView.js | 15 +-
.../osparc/component/widget/PlotlyWidget.js | 86 ---
.../class/osparc/dashboard/Dashboard.js | 37 +-
.../class/osparc/dashboard/ExploreBrowser.js | 654 ++++++++++++++++++
.../class/osparc/dashboard/StudyBrowser.js | 377 ++++------
.../dashboard/StudyBrowserButtonItem.js | 19 +-
.../source/class/osparc/data/model/Node.js | 9 +-
.../source/class/osparc/desktop/MainPage.js | 72 +-
.../class/osparc/desktop/NavigationBar.js | 51 +-
.../class/osparc/desktop/StudyEditor.js | 77 ++-
.../client/source/class/osparc/store/Store.js | 2 +-
.../source/class/osparc/utils/Services.js | 48 +-
.../source/class/osparc/wrapper/Plotly.js | 138 ----
.../source/class/osparc/wrapper/WebSocket.js | 11 +-
.../client/source/resource/osparc/img0.jpg | Bin 218892 -> 0 bytes
.../client/source/resource/osparc/img1.jpg | Bin 293584 -> 0 bytes
.../client/source/resource/osparc/img10.jpg | Bin 150560 -> 0 bytes
.../client/source/resource/osparc/img11.jpg | Bin 184219 -> 0 bytes
.../client/source/resource/osparc/img12.jpg | Bin 171344 -> 0 bytes
.../client/source/resource/osparc/img13.jpg | Bin 139003 -> 0 bytes
.../client/source/resource/osparc/img14.jpg | Bin 205875 -> 0 bytes
.../client/source/resource/osparc/img15.jpg | Bin 170411 -> 0 bytes
.../client/source/resource/osparc/img16.jpg | Bin 92737 -> 0 bytes
.../client/source/resource/osparc/img17.jpg | Bin 143159 -> 0 bytes
.../client/source/resource/osparc/img18.jpg | Bin 217118 -> 0 bytes
.../client/source/resource/osparc/img19.jpg | Bin 122084 -> 0 bytes
.../client/source/resource/osparc/img2.jpg | Bin 81734 -> 0 bytes
.../client/source/resource/osparc/img20.jpg | Bin 75891 -> 0 bytes
.../client/source/resource/osparc/img21.jpg | Bin 282835 -> 0 bytes
.../client/source/resource/osparc/img22.jpg | Bin 202543 -> 0 bytes
.../client/source/resource/osparc/img23.jpg | Bin 312572 -> 0 bytes
.../client/source/resource/osparc/img24.jpg | Bin 154187 -> 0 bytes
.../client/source/resource/osparc/img25.jpg | Bin 229343 -> 0 bytes
.../client/source/resource/osparc/img3.jpg | Bin 426911 -> 0 bytes
.../client/source/resource/osparc/img4.jpg | Bin 320725 -> 0 bytes
.../client/source/resource/osparc/img5.jpg | Bin 134645 -> 0 bytes
.../client/source/resource/osparc/img6.jpg | Bin 1806944 -> 0 bytes
.../client/source/resource/osparc/img7.jpg | Bin 225430 -> 0 bytes
.../client/source/resource/osparc/img8.jpg | Bin 120040 -> 0 bytes
.../client/source/resource/osparc/img9.jpg | Bin 266454 -> 0 bytes
.../source/resource/osparc/modelerMockup.png | Bin 243248 -> 0 bytes
.../client/source/resource/osparc/nih-419.png | Bin 133339 -> 0 bytes
.../source/resource/osparc/rat-light.png | Bin 152563 -> 0 bytes
.../web/client/source/resource/osparc/rat.png | Bin 142838 -> 0 bytes
.../resource/osparc/screenshot_container.png | Bin 48315 -> 0 bytes
.../resource/osparc/screenshot_dash-plot.png | Bin 76171 -> 0 bytes
.../osparc/screenshot_file-picker.png | Bin 15033 -> 0 bytes
.../resource/osparc/screenshot_form.png | Bin 28201 -> 0 bytes
.../resource/osparc/screenshot_grid.png | Bin 404727 -> 0 bytes
.../resource/osparc/screenshot_modeler.png | Bin 172977 -> 0 bytes
.../resource/osparc/screenshot_notebook.png | Bin 110894 -> 0 bytes
.../resource/osparc/screenshot_postpro.png | Bin 302443 -> 0 bytes
.../resource/osparc/screenshot_voxels.png | Bin 265939 -> 0 bytes
.../resource/osparc/screenshot_workbench.png | Bin 56452 -> 0 bytes
.../client/source/resource/osparc/test.png | Bin 2478 -> 0 bytes
.../source/resource/osparc/yoonsun-light.png | Bin 75666 -> 0 bytes
.../client/source/resource/osparc/yoonsun.png | Bin 68204 -> 0 bytes
.../source/resource/plotly/plotly.min.js | 7 -
72 files changed, 1284 insertions(+), 669 deletions(-)
create mode 100644 services/web/client/source/class/explorer/Application.js
create mode 100644 services/web/client/source/class/explorer/MainPage.js
create mode 100644 services/web/client/source/class/explorer/NavigationBar.js
create mode 100644 services/web/client/source/class/osparc/component/metadata/ServiceStarterWindow.js
delete mode 100644 services/web/client/source/class/osparc/component/widget/PlotlyWidget.js
create mode 100644 services/web/client/source/class/osparc/dashboard/ExploreBrowser.js
delete mode 100644 services/web/client/source/class/osparc/wrapper/Plotly.js
delete mode 100644 services/web/client/source/resource/osparc/img0.jpg
delete mode 100644 services/web/client/source/resource/osparc/img1.jpg
delete mode 100644 services/web/client/source/resource/osparc/img10.jpg
delete mode 100644 services/web/client/source/resource/osparc/img11.jpg
delete mode 100644 services/web/client/source/resource/osparc/img12.jpg
delete mode 100644 services/web/client/source/resource/osparc/img13.jpg
delete mode 100644 services/web/client/source/resource/osparc/img14.jpg
delete mode 100644 services/web/client/source/resource/osparc/img15.jpg
delete mode 100644 services/web/client/source/resource/osparc/img16.jpg
delete mode 100644 services/web/client/source/resource/osparc/img17.jpg
delete mode 100644 services/web/client/source/resource/osparc/img18.jpg
delete mode 100644 services/web/client/source/resource/osparc/img19.jpg
delete mode 100644 services/web/client/source/resource/osparc/img2.jpg
delete mode 100644 services/web/client/source/resource/osparc/img20.jpg
delete mode 100644 services/web/client/source/resource/osparc/img21.jpg
delete mode 100644 services/web/client/source/resource/osparc/img22.jpg
delete mode 100644 services/web/client/source/resource/osparc/img23.jpg
delete mode 100644 services/web/client/source/resource/osparc/img24.jpg
delete mode 100644 services/web/client/source/resource/osparc/img25.jpg
delete mode 100644 services/web/client/source/resource/osparc/img3.jpg
delete mode 100644 services/web/client/source/resource/osparc/img4.jpg
delete mode 100644 services/web/client/source/resource/osparc/img5.jpg
delete mode 100644 services/web/client/source/resource/osparc/img6.jpg
delete mode 100644 services/web/client/source/resource/osparc/img7.jpg
delete mode 100644 services/web/client/source/resource/osparc/img8.jpg
delete mode 100644 services/web/client/source/resource/osparc/img9.jpg
delete mode 100644 services/web/client/source/resource/osparc/modelerMockup.png
delete mode 100644 services/web/client/source/resource/osparc/nih-419.png
delete mode 100644 services/web/client/source/resource/osparc/rat-light.png
delete mode 100644 services/web/client/source/resource/osparc/rat.png
delete mode 100644 services/web/client/source/resource/osparc/screenshot_container.png
delete mode 100644 services/web/client/source/resource/osparc/screenshot_dash-plot.png
delete mode 100644 services/web/client/source/resource/osparc/screenshot_file-picker.png
delete mode 100644 services/web/client/source/resource/osparc/screenshot_form.png
delete mode 100644 services/web/client/source/resource/osparc/screenshot_grid.png
delete mode 100644 services/web/client/source/resource/osparc/screenshot_modeler.png
delete mode 100644 services/web/client/source/resource/osparc/screenshot_notebook.png
delete mode 100644 services/web/client/source/resource/osparc/screenshot_postpro.png
delete mode 100644 services/web/client/source/resource/osparc/screenshot_voxels.png
delete mode 100644 services/web/client/source/resource/osparc/screenshot_workbench.png
delete mode 100644 services/web/client/source/resource/osparc/test.png
delete mode 100644 services/web/client/source/resource/osparc/yoonsun-light.png
delete mode 100644 services/web/client/source/resource/osparc/yoonsun.png
delete mode 100644 services/web/client/source/resource/plotly/plotly.min.js
diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml
index 54c10604e1b..56e6856c724 100644
--- a/.github/workflows/ci-testing-deploy.yml
+++ b/.github/workflows/ci-testing-deploy.yml
@@ -748,6 +748,9 @@ jobs:
run: ./ci/github/system-testing/swarm-deploy.bash clean_up
system-test-e2e:
+ # FIXME: skip the job until make it faster and more reliable
+ # https://github.com/ITISFoundation/osparc-simcore/issues/1594
+ if: "false"
name: System-testing e2e
needs: [build-test-images]
runs-on: ${{ matrix.os }}
diff --git a/services/web/client/.eslintrc.json b/services/web/client/.eslintrc.json
index 4d1bc01ce35..a31cb1035a6 100644
--- a/services/web/client/.eslintrc.json
+++ b/services/web/client/.eslintrc.json
@@ -8,6 +8,7 @@
"q": false,
"qxWeb": false,
"osparc": false,
+ "explorer": false,
"Ajv": false,
"objectPath": false
},
diff --git a/services/web/client/Manifest.json b/services/web/client/Manifest.json
index 8ba3118454b..987b71bd015 100644
--- a/services/web/client/Manifest.json
+++ b/services/web/client/Manifest.json
@@ -27,7 +27,6 @@
"svg/svg.path.js",
"jsondiffpatch/jsondiffpatch.min.js",
"jsontreeviewer/jsonTree.js",
- "plotly/plotly.min.js",
"marked/marked.js",
"DOMPurify/purify.min.js"
],
diff --git a/services/web/client/compile.json b/services/web/client/compile.json
index b29b0cdd09c..51f2427c69e 100644
--- a/services/web/client/compile.json
+++ b/services/web/client/compile.json
@@ -32,6 +32,18 @@
],
"bootPath": "source/boot"
},
+ {
+ "class": "explorer.Application",
+ "theme": "osparc.theme.Theme",
+ "name": "explorer",
+ "title": "oSPARC Explorer",
+ "include": [
+ "iconfont.material.Load",
+ "iconfont.fontawesome5.Load",
+ "osparc.theme.OSparcLight"
+ ],
+ "bootPath": "source/boot"
+ },
{
"class": "qxl.apiviewer.Application",
"theme": "qxl.apiviewer.Theme",
diff --git a/services/web/client/source/class/explorer/Application.js b/services/web/client/source/class/explorer/Application.js
new file mode 100644
index 00000000000..62c89ef53a1
--- /dev/null
+++ b/services/web/client/source/class/explorer/Application.js
@@ -0,0 +1,60 @@
+/* ************************************************************************
+
+ explorer - an entry point to oSparc
+
+ https://osparc.io/explorer
+
+ Copyright:
+ 2020 IT'IS Foundation, https://itis.swiss
+
+ License:
+ MIT: https://opensource.org/licenses/MIT
+
+ Authors:
+ * Odei Maiz (odeimaiz)
+
+************************************************************************ */
+
+/**
+ * This is the main application class of "explorer"
+ *
+ * @asset(explorer/*)
+ * @asset(common/common.css)
+ */
+
+qx.Class.define("explorer.Application", {
+ extend: qx.application.Standalone,
+ include: [
+ qx.locale.MTranslation
+ ],
+
+ members: {
+ /**
+ * This method contains the initial application code and gets called
+ * during startup of the application
+ */
+ main: function() {
+ this.base();
+
+ // Enable logging in debug variant
+ if (qx.core.Environment.get("qx.debug")) {
+ // support native logging capabilities, e.g. Firebug for Firefox
+ qx.log.appender.Native;
+ }
+
+ this.__loadMainPage();
+ },
+
+ __loadMainPage: function() {
+ const padding = 0;
+ const view = new explorer.MainPage();
+ const doc = this.getRoot();
+ doc.add(view, {
+ top: padding,
+ bottom: padding,
+ left: padding,
+ right: padding
+ });
+ }
+ }
+});
diff --git a/services/web/client/source/class/explorer/MainPage.js b/services/web/client/source/class/explorer/MainPage.js
new file mode 100644
index 00000000000..c3efc4e5f9d
--- /dev/null
+++ b/services/web/client/source/class/explorer/MainPage.js
@@ -0,0 +1,55 @@
+/* ************************************************************************
+
+ explorer - an entry point to oSparc
+
+ https://osparc.io
+
+ Copyright:
+ 2020 IT'IS Foundation, https://itis.swiss
+
+ License:
+ MIT: https://opensource.org/licenses/MIT
+
+ Authors:
+ * Odei Maiz (odeimaiz)
+
+************************************************************************ */
+
+qx.Class.define("explorer.MainPage", {
+ extend: qx.ui.core.Widget,
+
+ construct: function() {
+ this.base();
+
+ this._setLayout(new qx.ui.layout.VBox());
+
+ const navBar = this.__navBar = this.__createNavigationBar();
+ this._add(navBar);
+
+ const exploreBrowser = this.__exploreBrowser = this.__createMainView();
+ this._add(exploreBrowser, {
+ flex: 1
+ });
+ },
+
+ members: {
+ __navBar: null,
+ __exploreBrowser: null,
+
+ __createNavigationBar: function() {
+ const navBar = new explorer.NavigationBar();
+ navBar.buildLayout();
+ return navBar;
+ },
+
+ __createMainView: function() {
+ const nStudyItemsPerRow = 5;
+ const studyButtons = osparc.dashboard.StudyBrowserButtonBase;
+ const exploreBrowser = new osparc.dashboard.ExploreBrowser().set({
+ alignX: "center",
+ maxWidth: nStudyItemsPerRow * (studyButtons.ITEM_WIDTH + studyButtons.SPACING) + 10 // padding + scrollbar
+ });
+ return exploreBrowser;
+ }
+ }
+});
diff --git a/services/web/client/source/class/explorer/NavigationBar.js b/services/web/client/source/class/explorer/NavigationBar.js
new file mode 100644
index 00000000000..8460a010200
--- /dev/null
+++ b/services/web/client/source/class/explorer/NavigationBar.js
@@ -0,0 +1,35 @@
+/* ************************************************************************
+
+ explorer - an entry point to oSparc
+
+ https://osparc.io
+
+ Copyright:
+ 2020 IT'IS Foundation, https://itis.swiss
+
+ License:
+ MIT: https://opensource.org/licenses/MIT
+
+ Authors:
+ * Odei Maiz (odeimaiz)
+
+************************************************************************ */
+
+qx.Class.define("explorer.NavigationBar", {
+ extend: osparc.desktop.NavigationBar,
+
+ members: {
+ buildLayout: function() {
+ this.getChildControl("logo");
+ this.getChildControl("platform");
+
+ this._add(new qx.ui.core.Spacer(), {
+ flex: 1
+ });
+
+ this.getChildControl("user-manual");
+ this.getChildControl("feedback");
+ this.getChildControl("theme-switch");
+ }
+ }
+});
diff --git a/services/web/client/source/class/osparc/component/message/FlashMessenger.js b/services/web/client/source/class/osparc/component/message/FlashMessenger.js
index 26c581e1825..145995f776e 100644
--- a/services/web/client/source/class/osparc/component/message/FlashMessenger.js
+++ b/services/web/client/source/class/osparc/component/message/FlashMessenger.js
@@ -89,7 +89,7 @@ qx.Class.define("osparc.component.message.FlashMessenger", {
* @param {*} logMessage.logger IDK
*/
log: function(logMessage) {
- let message = logMessage.message;
+ let message = ("message" in logMessage.message) ? logMessage.message["message"] : logMessage.message;
const level = logMessage.level.toUpperCase(); // "DEBUG", "INFO", "WARNING", "ERROR"
let logger = logMessage.logger;
if (logger) {
diff --git a/services/web/client/source/class/osparc/component/metadata/ServiceInfo.js b/services/web/client/source/class/osparc/component/metadata/ServiceInfo.js
index c1b08d3b04b..cee96c3caf9 100644
--- a/services/web/client/source/class/osparc/component/metadata/ServiceInfo.js
+++ b/services/web/client/source/class/osparc/component/metadata/ServiceInfo.js
@@ -42,14 +42,22 @@ qx.Class.define("osparc.component.metadata.ServiceInfo", {
members: {
__metadata: null,
+ setService: function(metadata) {
+ this._removeAll();
+ if (metadata) {
+ this.__metadata = metadata;
+ this.__createServiceInfoView();
+ }
+ },
+
__createServiceInfoView: function() {
const container = new qx.ui.container.Composite(new qx.ui.layout.VBox(8).set({
alignY: "middle"
}));
const hBox = new qx.ui.container.Composite(new qx.ui.layout.HBox(8));
- hBox.add(this.__createThumbnail());
- hBox.add(this.__createExtraInfo(), {
+ hBox.add(this.__createExtraInfo());
+ hBox.add(this.__createThumbnail(), {
flex: 1
});
container.add(hBox);
@@ -68,12 +76,7 @@ qx.Class.define("osparc.component.metadata.ServiceInfo", {
},
__createThumbnail: function() {
- return new qx.ui.basic.Image(this.__metadata.thumbnail || "@FontAwesome5Solid/flask/50").set({
- scale: true,
- width: 300,
- height: 180,
- paddingTop: this.__metadata.thumbnail ? 0 : 60
- });
+ return new osparc.component.widget.Thumbnail(this.__metadata.thumbnail || "@FontAwesome5Solid/flask/50", 300, 180);
},
__createExtraInfo: function() {
diff --git a/services/web/client/source/class/osparc/component/metadata/ServiceInfoWindow.js b/services/web/client/source/class/osparc/component/metadata/ServiceInfoWindow.js
index 2ceb2c6945e..32acad345da 100644
--- a/services/web/client/source/class/osparc/component/metadata/ServiceInfoWindow.js
+++ b/services/web/client/source/class/osparc/component/metadata/ServiceInfoWindow.js
@@ -32,7 +32,7 @@ qx.Class.define("osparc.component.metadata.ServiceInfoWindow", {
const windowWidth = 700;
const windowHeight = 800;
this.set({
- layout: new qx.ui.layout.Grow(),
+ layout: new qx.ui.layout.VBox(10),
autoDestroy: true,
contentPadding: 10,
showMinimize: false,
@@ -42,10 +42,12 @@ qx.Class.define("osparc.component.metadata.ServiceInfoWindow", {
height: windowHeight
});
- const serviceDetails = new osparc.component.metadata.ServiceInfo(metadata);
+ const serviceInfo = this._serviceInfo = new osparc.component.metadata.ServiceInfo(metadata);
const scroll = new qx.ui.container.Scroll();
- scroll.add(serviceDetails);
- this.add(scroll);
+ scroll.add(serviceInfo);
+ this.add(scroll, {
+ flex: 1
+ });
},
properties: {
@@ -53,5 +55,9 @@ qx.Class.define("osparc.component.metadata.ServiceInfoWindow", {
refine: true,
init: "info-service-window"
}
+ },
+
+ members: {
+ _serviceInfo: null
}
});
diff --git a/services/web/client/source/class/osparc/component/metadata/ServiceStarterWindow.js b/services/web/client/source/class/osparc/component/metadata/ServiceStarterWindow.js
new file mode 100644
index 00000000000..b6dfb1c3000
--- /dev/null
+++ b/services/web/client/source/class/osparc/component/metadata/ServiceStarterWindow.js
@@ -0,0 +1,117 @@
+/* ************************************************************************
+
+ osparc - the simcore frontend
+
+ https://osparc.io
+
+ Copyright:
+ 2020 IT'IS Foundation, https://itis.swiss
+
+ License:
+ MIT: https://opensource.org/licenses/MIT
+
+ Authors:
+ * Odei Maiz (odeimaiz)
+
+************************************************************************ */
+
+
+qx.Class.define("osparc.component.metadata.ServiceStarterWindow", {
+ extend: osparc.component.metadata.ServiceInfoWindow,
+
+ /**
+ * @param metadata {Object} Service metadata
+ */
+ construct: function(metadata) {
+ this.base(arguments, metadata);
+
+ this.__service = metadata;
+
+ const toolboxContainer = this.__createToolbox();
+ this.addAt(toolboxContainer, 0);
+ },
+
+ events: {
+ "startService": "qx.event.type.Data"
+ },
+
+ members: {
+ __serviceKey: null,
+ __versionsUIBox: null,
+
+ __createToolbox: function() {
+ const toolboxContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox());
+
+ const versionsList = this.__createVersionsList();
+ toolboxContainer.add(versionsList);
+
+ toolboxContainer.add(new qx.ui.core.Spacer(), {
+ flex: 1
+ });
+
+ const openButton = new qx.ui.form.Button(this.tr("Open")).set({
+ appearance: "md-button"
+ });
+ openButton.addListener("execute", () => {
+ const data = {
+ "serviceKey": this.__service.key,
+ "serviceVersion": this.__getSelectedVersion()
+ };
+ this.fireDataEvent("startService", data);
+ });
+ toolboxContainer.add(openButton);
+
+ return toolboxContainer;
+ },
+
+ __createVersionsList: function() {
+ const versionsList = this.__versionsUIBox = new qx.ui.form.SelectBox().set({
+ font: "text-14"
+ });
+ // populate versions
+ const store = osparc.store.Store.getInstance();
+ store.getServicesDAGs()
+ .then(services => {
+ const versions = osparc.utils.Services.getVersions(services, this.__service.key);
+ if (versions) {
+ let lastItem = null;
+ versions.forEach(version => {
+ lastItem = new qx.ui.form.ListItem(version).set({
+ font: "text-14"
+ });
+ versionsList.add(lastItem);
+ });
+ if (lastItem) {
+ versionsList.setSelection([lastItem]);
+ this.__versionSelected(lastItem.getLabel());
+ }
+ }
+ });
+ versionsList.addListener("changeSelection", e => {
+ const serviceVersion = this.__getSelectedVersion();
+ if (serviceVersion) {
+ this.__versionSelected(serviceVersion);
+ }
+ }, this);
+
+ return versionsList;
+ },
+
+ __getSelectedVersion: function() {
+ const selection = this.__versionsUIBox.getSelection();
+ if (selection && selection.length) {
+ return selection[0].getLabel();
+ }
+ return null;
+ },
+
+ __versionSelected: function(serviceVersion) {
+ const store = osparc.store.Store.getInstance();
+ store.getServicesDAGs()
+ .then(services => {
+ const selectedService = osparc.utils.Services.getFromObject(services, this.__service.key, serviceVersion);
+ this._serviceInfo.setService(selectedService);
+ });
+ }
+ }
+});
diff --git a/services/web/client/source/class/osparc/component/metadata/StudyDetailsEditor.js b/services/web/client/source/class/osparc/component/metadata/StudyDetailsEditor.js
index b32e43721dd..3054efb6b01 100644
--- a/services/web/client/source/class/osparc/component/metadata/StudyDetailsEditor.js
+++ b/services/web/client/source/class/osparc/component/metadata/StudyDetailsEditor.js
@@ -115,14 +115,7 @@ qx.Class.define("osparc.component.metadata.StudyDetailsEditor", {
marginTop: 10
});
- const openButton = this.__openButton = new qx.ui.form.Button("Open").set({
- appearance: "md-button"
- });
- osparc.utils.Utils.setIdToWidget(openButton, "openStudyBtn");
- openButton.addListener("execute", () => this.fireEvent("openStudy"), this);
- buttonsLayout.add(openButton);
-
- const modeButton = new qx.ui.form.Button("Edit", "@FontAwesome5Solid/edit/16").set({
+ const modeButton = new qx.ui.form.Button(this.tr("Edit")).set({
appearance: "md-button",
visibility: isCurrentUserOwner && (!isTemplate || canUpdateTemplate) ? "visible" : "excluded"
});
@@ -130,10 +123,6 @@ qx.Class.define("osparc.component.metadata.StudyDetailsEditor", {
modeButton.addListener("execute", () => this.setMode("edit"), this);
buttonsLayout.add(modeButton);
- buttonsLayout.add(new qx.ui.core.Spacer(), {
- flex: 1
- });
-
if (!isTemplate) {
const permissionsButton = new qx.ui.form.Button(this.tr("Permissions")).set({
appearance: "md-button"
@@ -156,6 +145,17 @@ qx.Class.define("osparc.component.metadata.StudyDetailsEditor", {
}
}
+ buttonsLayout.add(new qx.ui.core.Spacer(), {
+ flex: 1
+ });
+
+ const openButton = this.__openButton = new qx.ui.form.Button(this.tr("Open")).set({
+ appearance: "md-button"
+ });
+ osparc.utils.Utils.setIdToWidget(openButton, "openStudyBtn");
+ openButton.addListener("execute", () => this.fireEvent("openStudy"), this);
+ buttonsLayout.add(openButton);
+
return buttonsLayout;
},
diff --git a/services/web/client/source/class/osparc/component/node/NodeView.js b/services/web/client/source/class/osparc/component/node/NodeView.js
index 3de4b3d566f..633f6e6666b 100644
--- a/services/web/client/source/class/osparc/component/node/NodeView.js
+++ b/services/web/client/source/class/osparc/component/node/NodeView.js
@@ -75,9 +75,8 @@ qx.Class.define("osparc.component.node.NodeView", {
isSettingsGroupShowable: function() {
const node = this.getNode();
- const propsWidget = node.getPropsWidget();
- if (propsWidget) {
- return propsWidget.hasVisibleInputs();
+ if (node && ("getPropsWidget" in node) && node.getPropsWidget()) {
+ return node.getPropsWidget().hasVisibleInputs();
}
return false;
},
diff --git a/services/web/client/source/class/osparc/component/service/ServiceJumbo.js b/services/web/client/source/class/osparc/component/service/ServiceJumbo.js
index 283d9a7624c..985da74931a 100644
--- a/services/web/client/source/class/osparc/component/service/ServiceJumbo.js
+++ b/services/web/client/source/class/osparc/component/service/ServiceJumbo.js
@@ -63,7 +63,7 @@ qx.Class.define("osparc.component.service.ServiceJumbo", {
}
}
if (data.tags && data.tags.length) {
- const category = this.getServiceModel().getCategory() || "";
+ const category = this.getServiceModel().getCategory ? this.getServiceModel().getCategory() : "";
const type = this.getServiceModel().getType() || "";
if (!data.tags.includes(osparc.utils.Utils.capitalize(category.trim())) && !data.tags.includes(osparc.utils.Utils.capitalize(type.trim()))) {
return true;
diff --git a/services/web/client/source/class/osparc/component/widget/CollapsibleView.js b/services/web/client/source/class/osparc/component/widget/CollapsibleView.js
index a6a0956a508..89dee4d3e2b 100644
--- a/services/web/client/source/class/osparc/component/widget/CollapsibleView.js
+++ b/services/web/client/source/class/osparc/component/widget/CollapsibleView.js
@@ -32,9 +32,7 @@ qx.Class.define("osparc.component.widget.CollapsibleView", {
// Title bar
this.__titleBar = new qx.ui.container.Composite(new qx.ui.layout.HBox(5).set({
alignY: "middle"
- })).set({
- allowGrowX: false
- });
+ }));
this._add(this.__titleBar);
this.__caret = this.getChildControl("caret");
@@ -46,9 +44,6 @@ qx.Class.define("osparc.component.widget.CollapsibleView", {
if (content) {
this.setContent(content);
}
-
- // Attach handlers
- this.__attachEventHandlers();
},
statics: {
@@ -101,10 +96,14 @@ qx.Class.define("osparc.component.widget.CollapsibleView", {
visibility: "excluded"
});
this.__titleBar.addAt(control, 0);
+ // Attach handler
+ this.__attachToggler(control);
break;
case "title":
control = new qx.ui.basic.Atom(this.getTitle());
this.__titleBar.addAt(control, 1);
+ // Attach handler
+ this.__attachToggler(control);
break;
}
return control || this.base(arguments, id);
@@ -198,8 +197,8 @@ qx.Class.define("osparc.component.widget.CollapsibleView", {
return collapsed ? moreCaret + caretSize : lessCaret + caretSize;
},
- __attachEventHandlers: function() {
- this.__titleBar.addListener("tap", () => {
+ __attachToggler: function(control) {
+ control.addListener("tap", () => {
this.toggleCollapsed();
}, this);
}
diff --git a/services/web/client/source/class/osparc/component/widget/PlotlyWidget.js b/services/web/client/source/class/osparc/component/widget/PlotlyWidget.js
deleted file mode 100644
index bb5c1666bcb..00000000000
--- a/services/web/client/source/class/osparc/component/widget/PlotlyWidget.js
+++ /dev/null
@@ -1,86 +0,0 @@
-/* ************************************************************************
-
- osparc - the simcore frontend
-
- https://osparc.io
-
- Copyright:
- 2019 IT'IS Foundation, https://itis.swiss
-
- License:
- MIT: https://opensource.org/licenses/MIT
-
- Authors:
- * Odei Maiz (odeimaiz)
-
-************************************************************************ */
-
-/**
- * Widget containing a Plotly dom element.
- *
- * Data for being plotted can be dynamically set adn rendered.
- *
- * *Example*
- *
- * Here is a little example of how to use the widget.
- *
- *
- * let plotlyWidget = new osparc.component.widget.PlotlyWidget("elemId");
- * this.getRoot().add(plotlyWidget);
- *
- */
-
-qx.Class.define("osparc.component.widget.PlotlyWidget", {
- extend: qx.ui.core.Widget,
-
- /**
- * @param elemId {String} Element id to set it as dom attribute
- */
- construct: function(elemId) {
- this.base();
-
- this.addListenerOnce("appear", () => {
- this.__plotlyWrapper = new osparc.wrapper.Plotly();
- this.__plotlyWrapper.addListener(("plotlyLibReady"), e => {
- let ready = e.getData();
- if (ready) {
- let plotlyPlaceholder = qx.dom.Element.create("div");
- qx.bom.element.Attribute.set(plotlyPlaceholder, "id", elemId);
- qx.bom.element.Style.set(plotlyPlaceholder, "width", "100%");
- qx.bom.element.Style.set(plotlyPlaceholder, "height", "100%");
- this.getContentElement().getDomElement()
- .appendChild(plotlyPlaceholder);
- this.__plotlyWrapper.createEmptyPlot(elemId);
- this.fireDataEvent("plotlyWidgetReady", true);
- } else {
- console.debug("plotly.js was not loaded");
- this.fireDataEvent("plotlyWidgetReady", false);
- }
- }, this);
-
- this.__plotlyWrapper.init();
- }, this);
-
- this.addListener("resize", function() {
- if (this.__plotlyWrapper) {
- this.__plotlyWrapper.resize();
- }
- }, this);
- },
-
- events: {
- "plotlyWidgetReady": "qx.event.type.Data"
- },
-
- members: {
- __plotlyWrapper: null,
-
- resize: function() {
- this.__plotlyWrapper.resize();
- },
-
- setData: function(ids, labels, values, tooltips, title) {
- this.__plotlyWrapper.setData(ids, labels, values, tooltips, title);
- }
- }
-});
diff --git a/services/web/client/source/class/osparc/dashboard/Dashboard.js b/services/web/client/source/class/osparc/dashboard/Dashboard.js
index b44b03c0518..c587df9bdff 100644
--- a/services/web/client/source/class/osparc/dashboard/Dashboard.js
+++ b/services/web/client/source/class/osparc/dashboard/Dashboard.js
@@ -56,27 +56,23 @@ qx.Class.define("osparc.dashboard.Dashboard", {
},
members: {
- __prjBrowser: null,
- __serviceBrowser: null,
- __dataManager: null,
+ __studyBrowser: null,
+ __exploreBrowser: null,
getStudyBrowser: function() {
- return this.__prjBrowser;
+ return this.__studyBrowser;
},
- getServiceBrowser: function() {
- return this.__serviceBrowser;
- },
-
- getDataManager: function() {
- return this.__dataManager;
+ getExploreBrowser: function() {
+ return this.__exploreBrowser;
},
__createMainViewLayout: function() {
[
[this.tr("Studies"), this.__createStudyBrowser],
- [this.tr("Services"), this.__createServiceBrowser],
- [this.tr("Data"), this.__createDataBrowser]
+ // [this.tr("Services"), this.__createServiceBrowser],
+ [this.tr("Data"), this.__createDataBrowser],
+ [this.tr("Discover"), this.__createExploreBrowser]
].forEach(tuple => {
const tabPage = new qx.ui.tabview.Page(tuple[0]).set({
appearance: "dashboard-page"
@@ -92,6 +88,9 @@ qx.Class.define("osparc.dashboard.Dashboard", {
if (viewLayout.resetSelection) {
viewLayout.resetSelection();
}
+ if (viewLayout.resetFilter) {
+ viewLayout.resetFilter();
+ }
}, this);
const scrollerMainView = new qx.ui.container.Scroll();
scrollerMainView.add(viewLayout);
@@ -102,18 +101,18 @@ qx.Class.define("osparc.dashboard.Dashboard", {
},
__createStudyBrowser: function() {
- const studiesView = this.__prjBrowser = new osparc.dashboard.StudyBrowser();
+ const studiesView = this.__studyBrowser = new osparc.dashboard.StudyBrowser();
return studiesView;
},
- __createServiceBrowser: function() {
- const servicesView = this.__serviceBrowser = new osparc.dashboard.ServiceBrowser();
- return servicesView;
- },
-
__createDataBrowser: function() {
- const dataManagerView = this.__dataManager = new osparc.dashboard.DataBrowser();
+ const dataManagerView = new osparc.dashboard.DataBrowser();
return dataManagerView;
+ },
+
+ __createExploreBrowser: function() {
+ const exploreView = this.__exploreBrowser = new osparc.dashboard.ExploreBrowser();
+ return exploreView;
}
}
});
diff --git a/services/web/client/source/class/osparc/dashboard/ExploreBrowser.js b/services/web/client/source/class/osparc/dashboard/ExploreBrowser.js
new file mode 100644
index 00000000000..9936c124f3d
--- /dev/null
+++ b/services/web/client/source/class/osparc/dashboard/ExploreBrowser.js
@@ -0,0 +1,654 @@
+/* ************************************************************************
+
+ osparc - the simcore frontend
+
+ https://osparc.io
+
+ Copyright:
+ 2020 IT'IS Foundation, https://itis.swiss
+
+ License:
+ MIT: https://opensource.org/licenses/MIT
+
+ Authors:
+ * Odei Maiz (odeimaiz)
+
+************************************************************************ */
+
+/**
+ * @ignore(Headers)
+ * @ignore(fetch)
+ */
+
+qx.Class.define("osparc.dashboard.ExploreBrowser", {
+ extend: qx.ui.core.Widget,
+
+ construct: function() {
+ this.base(arguments);
+
+ this._setLayout(new qx.ui.layout.VBox(10));
+
+ this.__initResources();
+ },
+
+ events: {
+ "startStudy": "qx.event.type.Data"
+ },
+
+ statics: {
+ sortTemplateList: function(studyList) {
+ let sortByProperty = function(prop) {
+ return function(a, b) {
+ if (prop === "lastChangeDate") {
+ return new Date(b[prop]) - new Date(a[prop]);
+ }
+ if (typeof a[prop] == "number") {
+ return a[prop] - b[prop];
+ }
+ if (a[prop] < b[prop]) {
+ return -1;
+ } else if (a[prop] > b[prop]) {
+ return 1;
+ }
+ return 0;
+ };
+ };
+ studyList.sort(sortByProperty("lastChangeDate"));
+ }
+ },
+
+ members: {
+ __loadingIFrame: null,
+ __exploreFilters: null,
+ __templateStudyContainer: null,
+ __servicesContainer: null,
+ __templateStudies: null,
+ __services: null,
+
+ /**
+ * Function that resets the selected item
+ */
+ resetSelection: function() {
+ if (this.__templateStudyContainer) {
+ this.__templateStudyContainer.resetSelection();
+ }
+ if (this.__servicesContainer) {
+ this.__servicesContainer.resetSelection();
+ }
+ },
+ resetFilter: function() {
+ if (this.__exploreFilters) {
+ this.__exploreFilters.reset();
+ }
+ },
+
+ __checkLoggedIn: function() {
+ let isLogged = osparc.auth.Manager.getInstance().isLoggedIn();
+ if (!isLogged) {
+ const msg = this.tr("You need to be logged in to create a study");
+ osparc.component.message.FlashMessenger.getInstance().logAs(msg);
+ }
+ return isLogged;
+ },
+
+ /**
+ * Function that asks the backend for the list of template studies and sets it
+ */
+ __reloadTemplateStudies: function() {
+ if (osparc.data.Permissions.getInstance().canDo("studies.templates.read")) {
+ osparc.data.Resources.get("templates")
+ .then(templates => {
+ this.__resetTemplateList(templates);
+ })
+ .catch(err => {
+ console.error(err);
+ });
+ } else {
+ this.__resetTemplateList([]);
+ }
+ },
+
+ /**
+ * Function that asks the backend for the list of services and sets it
+ */
+ __reloadServices: function() {
+ const store = osparc.store.Store.getInstance();
+ store.getServicesDAGs()
+ .then(services => {
+ const servicesList = [];
+ for (const serviceKey in services) {
+ const latestService = osparc.utils.Services.getLatest(services, serviceKey);
+ servicesList.push(latestService);
+ }
+ this.__resetServicesList(servicesList);
+ })
+ .catch(err => {
+ console.error(err);
+ });
+ },
+
+ __initResources: function() {
+ this.__showLoadingPage(this.tr("Discovering Templates and Apps"));
+
+ const servicesTags = this.__getTags();
+ const store = osparc.store.Store.getInstance();
+ const servicesPromise = store.getServicesDAGs(true);
+
+ Promise.all([
+ servicesTags,
+ servicesPromise
+ ])
+ .then(() => {
+ this.__hideLoadingPage();
+ this.__createResourcesLayout();
+ this.__reloadResources();
+ this.__attachEventHandlers();
+ });
+ },
+
+ __reloadResources: function() {
+ this.__reloadTemplateStudies();
+ this.__reloadServices();
+ },
+
+ __getTags: function() {
+ return new Promise((resolve, reject) => {
+ if (osparc.data.Permissions.getInstance().canDo("study.tag")) {
+ osparc.data.Resources.get("tags")
+ .catch(console.error)
+ .finally(() => resolve());
+ } else {
+ resolve();
+ }
+ });
+ },
+
+ __createResourcesLayout: function() {
+ const exploreFilters = this.__exploreFilters = new osparc.component.filter.group.StudyFilterGroup("exploreBrowser").set({
+ paddingTop: 5
+ });
+ this._add(exploreFilters);
+
+ const exploreBrowserLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(16));
+
+ const tempStudyLayout = this.__createTemplateStudiesLayout();
+ exploreBrowserLayout.add(tempStudyLayout);
+
+ const servicesLayout = this.__createServicesLayout();
+ exploreBrowserLayout.add(servicesLayout);
+
+ const scrollStudies = new qx.ui.container.Scroll();
+ scrollStudies.add(exploreBrowserLayout);
+ this._add(scrollStudies, {
+ flex: 1
+ });
+ },
+
+ __createButtonsLayout: function(title, content) {
+ const userStudyLayout = new osparc.component.widget.CollapsibleView(title);
+ userStudyLayout.getChildControl("title").set({
+ font: "title-16"
+ });
+ userStudyLayout._getLayout().setSpacing(8); // eslint-disable-line no-underscore-dangle
+ userStudyLayout.setContent(content);
+ return userStudyLayout;
+ },
+
+ __createTemplateStudiesLayout: function() {
+ const templateStudyContainer = this.__templateStudyContainer = this.__createResourceListLayout();
+ osparc.utils.Utils.setIdToWidget(templateStudyContainer, "templateStudiesList");
+ const tempStudyLayout = this.__createButtonsLayout(this.tr("Templates"), templateStudyContainer);
+ return tempStudyLayout;
+ },
+
+ __createServicesLayout: function() {
+ const servicesContainer = this.__servicesContainer = this.__createResourceListLayout();
+ osparc.utils.Utils.setIdToWidget(servicesContainer, "servicesList");
+ const servicesLayout = this.__createButtonsLayout(this.tr("Apps"), servicesContainer);
+
+ const servicesTitleContainer = servicesLayout.getTitleBar();
+ this.__addNewServiceButtons(servicesTitleContainer);
+
+ return servicesLayout;
+ },
+
+ __attachEventHandlers: function() {
+ const textfield = this.__exploreFilters.getTextFilter().getChildControl("textfield");
+ textfield.addListener("appear", () => {
+ textfield.focus();
+ }, this);
+ },
+
+ __createStudyFromService: function(serviceKey, serviceVersion) {
+ if (!this.__checkLoggedIn()) {
+ return;
+ }
+
+ this.__showLoadingPage(this.tr("Creating Study"));
+ const store = osparc.store.Store.getInstance();
+ store.getServicesDAGs()
+ .then(services => {
+ if (serviceKey in services) {
+ let service = null;
+ if (serviceVersion) {
+ service= osparc.utils.Services.getFromObject(services, serviceKey, serviceVersion);
+ } else {
+ service= osparc.utils.Services.getLatest(services, serviceKey);
+ }
+ const newUuid = osparc.utils.Utils.uuidv4();
+ const minStudyData = osparc.data.model.Study.createMinimumStudyObject();
+ minStudyData["name"] = service["name"];
+ minStudyData["workbench"] = {};
+ minStudyData["workbench"][newUuid] = {
+ "key": service["key"],
+ "version": service["version"],
+ "label": service["name"],
+ "inputs": {},
+ "inputNodes": [],
+ "thumbnail": "",
+ "position": {
+ "x": 50,
+ "y": 50
+ }
+ };
+ const params = {
+ data: minStudyData
+ };
+ osparc.data.Resources.fetch("studies", "post", params)
+ .then(studyData => {
+ this.__startStudy(studyData);
+ })
+ .catch(er => {
+ console.error(er);
+ });
+ }
+ })
+ .catch(err => {
+ console.error(err);
+ });
+ },
+
+ __createStudy: function(minStudyData, templateId) {
+ if (!this.__checkLoggedIn()) {
+ return;
+ }
+
+ this.__showLoadingPage(this.tr("Creating ") + (minStudyData.name || this.tr("Study")));
+
+ const params = {
+ url: {
+ templateId: templateId
+ },
+ data: minStudyData
+ };
+ osparc.data.Resources.fetch("studies", "postFromTemplate", params)
+ .then(studyData => {
+ this.__startStudy(studyData);
+ })
+ .catch(err => {
+ console.error(err);
+ });
+ },
+
+ __startStudy: function(studyData) {
+ if (!this.__checkLoggedIn()) {
+ return;
+ }
+
+ this.__showLoadingPage(this.tr("Starting ") + (studyData.name || this.tr("Study")));
+ osparc.store.Store.getInstance().getServicesDAGs()
+ .then(() => {
+ this.__hideLoadingPage();
+ this.__loadStudy(studyData);
+ });
+ },
+
+ __loadStudy: function(studyData) {
+ const study = new osparc.data.model.Study(studyData);
+ this.fireDataEvent("startStudy", study);
+ },
+
+ __showResourcesLayout: function(show) {
+ this._getChildren().forEach(children => {
+ children.setVisibility(show ? "visible" : "excluded");
+ });
+ },
+
+ __resetTemplateList: function(tempStudyList) {
+ this.__templateStudies = tempStudyList;
+ this.__templateStudyContainer.removeAll();
+ this.self().sortTemplateList(tempStudyList);
+ tempStudyList.forEach(tempStudy => {
+ tempStudy["resourceType"] = "template";
+ this.__templateStudyContainer.add(this.__createStudyItem(tempStudy));
+ });
+ },
+
+ __resetServicesList: function(servicesList) {
+ this.__services = servicesList;
+ this.__servicesContainer.removeAll();
+ servicesList.forEach(service => {
+ service["resourceType"] = "service";
+ this.__servicesContainer.add(this.__createStudyItem(service));
+ });
+ },
+
+ __removeFromStudyList: function(studyId) {
+ const studyContainer = this.__templateStudyContainer;
+ const items = studyContainer.getChildren();
+ for (let i=0; i study.tags.includes(tag.id)) : [];
+
+ const item = new osparc.dashboard.StudyBrowserButtonItem().set({
+ resourceType: study.resourceType,
+ uuid: study.uuid,
+ studyTitle: study.name,
+ studyDescription: study.description,
+ creator: study.prjOwner ? study.prjOwner : null,
+ accessRights: study.accessRights ? study.accessRights : null,
+ lastChangeDate: study.lastChangeDate ? new Date(study.lastChangeDate) : null,
+ icon: study.thumbnail || defaultThumbnail,
+ tags
+ });
+ const menu = this.__getStudyItemMenu(item, study);
+ item.setMenu(menu);
+ item.subscribeToFilterGroup("exploreBrowser");
+ item.addListener("execute", () => {
+ this.__itemClicked(item);
+ }, this);
+
+ return item;
+ },
+
+ __getStudyItemMenu: function(item, studyData) {
+ const menu = new qx.ui.menu.Menu().set({
+ position: "bottom-right"
+ });
+
+ const moreInfoButton = this.__getMoreInfoMenuButton(studyData, true);
+ if (moreInfoButton) {
+ menu.add(moreInfoButton);
+ }
+
+ const deleteButton = this.__getDeleteTemplateMenuButton(studyData, true);
+ if (deleteButton) {
+ menu.addSeparator();
+ menu.add(deleteButton);
+ }
+
+ return menu;
+ },
+
+ __getMoreInfoMenuButton: function(studyData, isTemplate) {
+ const moreInfoButton = new qx.ui.menu.Button(this.tr("More Info"));
+ moreInfoButton.addListener("execute", () => {
+ if (studyData["resourceType"] === "service") {
+ const win = new osparc.component.metadata.ServiceStarterWindow(studyData);
+ win.addListener("startService", e => {
+ const {
+ serviceKey,
+ serviceVersion
+ } = e.getData();
+ this.__createStudyFromService(serviceKey, serviceVersion);
+ win.close();
+ });
+ win.open();
+ win.center();
+ } else {
+ const winWidth = 400;
+ this.__createStudyDetailsEditor(studyData, winWidth);
+ }
+ }, this);
+ return moreInfoButton;
+ },
+
+ __getDeleteTemplateMenuButton: function(studyData) {
+ const isCurrentUserOwner = this.__isUserOwner(studyData);
+ if (!isCurrentUserOwner) {
+ return null;
+ }
+
+ const deleteButton = new qx.ui.menu.Button(this.tr("Delete"));
+ osparc.utils.Utils.setIdToWidget(deleteButton, "studyItemMenuDelete");
+ deleteButton.addListener("execute", () => {
+ const win = this.__createConfirmWindow(false);
+ win.center();
+ win.open();
+ win.addListener("close", () => {
+ if (win.getConfirmed()) {
+ this.__deleteStudy(studyData, true);
+ }
+ }, this);
+ }, this);
+ return deleteButton;
+ },
+
+ __itemClicked: function(item) {
+ if (item.isResourceType("service")) {
+ const serviceKey = item.getUuid();
+ this.__createStudyFromService(serviceKey, null);
+ } else {
+ const matchesId = study => study.uuid === item.getUuid();
+ const studyData = this.__templateStudies.find(matchesId);
+ this.__startStudy(studyData);
+ }
+ this.resetSelection();
+ },
+
+ __createStudyDetailsEditor: function(studyData, winWidth) {
+ const studyDetails = new osparc.component.metadata.StudyDetailsEditor(studyData, true, winWidth);
+ studyDetails.addListener("updateTemplate", () => this.__reloadTemplateStudies(), this);
+ studyDetails.addListener("openStudy", () => {
+ this.__createStudyBtnClkd(studyData);
+ }, this);
+ studyDetails.addListener("updateTags", () => {
+ this.__resetTemplateList(osparc.store.Store.getInstance().getTemplates());
+ });
+
+ const height = 400;
+ const title = this.tr("Study Details Editor");
+ const win = osparc.component.metadata.StudyDetailsEditor.popUpInWindow(title, studyDetails, winWidth, height);
+ studyDetails.addListener("updateTemplate", () => win.close());
+ },
+
+ __createStudyBtnClkd: function(templateData) {
+ const minStudyData = osparc.data.model.Study.createMinimumStudyObject();
+ minStudyData["name"] = templateData.name;
+ minStudyData["description"] = templateData.description;
+ this.__createStudy(minStudyData, templateData.uuid);
+ },
+
+ __updateDeleteTemplatesButton: function(templateDeleteButton) {
+ const templateSelection = this.__templateStudyContainer.getSelection();
+ const canDeleteTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.delete");
+ let allMine = Boolean(templateSelection.length) && canDeleteTemplate;
+ for (let i=0; i 1 ? this.tr("Delete selected")+" ("+nSelected+")" : this.tr("Delete"));
+ templateDeleteButton.setVisibility("visible");
+ } else {
+ templateDeleteButton.setVisibility("excluded");
+ }
+ },
+
+ __deleteStudy: function(studyData, isTemplate = false) {
+ const myGid = osparc.auth.Data.getInstance().getGroupId();
+ const collabGids = Object.keys(studyData["accessRights"]);
+ const amICollaborator = collabGids.indexOf(myGid) > -1;
+
+ const params = {
+ url: {
+ projectId: studyData.uuid
+ }
+ };
+ let operationPromise = null;
+ if (collabGids.length > 1 && amICollaborator) {
+ // remove collaborator
+ const permissions = osparc.component.export.Permissions;
+ permissions.removeCollaborator(studyData, myGid);
+ params["data"] = studyData;
+ operationPromise = osparc.data.Resources.fetch(isTemplate ? "templates" : "studies", "put", params);
+ } else {
+ // delete study
+ operationPromise = osparc.data.Resources.fetch(isTemplate ? "templates" : "studies", "delete", params, studyData.uuid);
+ }
+ operationPromise
+ .then(() => this.__removeFromStudyList(studyData.uuid, isTemplate))
+ .catch(err => {
+ console.error(err);
+ osparc.component.message.FlashMessenger.getInstance().logAs(err, "ERROR");
+ });
+ },
+
+ __createConfirmWindow: function(isMulti) {
+ const msg = isMulti ? this.tr("Are you sure you want to delete the studies?") : this.tr("Are you sure you want to delete the study?");
+ return new osparc.ui.window.Confirmation(msg);
+ },
+
+ __showLoadingPage: function(label) {
+ this.__hideLoadingPage();
+
+ this.__showResourcesLayout(false);
+
+ if (this.__loadingIFrame === null) {
+ this.__loadingIFrame = new osparc.ui.message.Loading(label);
+ } else {
+ this.__loadingIFrame.setHeader(label);
+ }
+ this._add(this.__loadingIFrame, {
+ flex: 1
+ });
+ },
+
+ __hideLoadingPage: function() {
+ if (this.__loadingIFrame) {
+ const idx = this._indexOf(this.__loadingIFrame);
+ if (idx !== -1) {
+ this._remove(this.__loadingIFrame);
+ }
+ }
+
+ this.__showResourcesLayout(true);
+ },
+
+ __isUserOwner: function(studyData) {
+ const myEmail = osparc.auth.Data.getInstance().getEmail();
+ if ("prjOwner" in studyData) {
+ return studyData.prjOwner === myEmail;
+ } else if ("getCreator" in studyData) {
+ return studyData.getCreator() === myEmail;
+ }
+ return false;
+ },
+
+ __addNewServiceButtons: function(layout) {
+ layout.add(new qx.ui.core.Spacer(20, null));
+
+
+ osparc.utils.LibVersions.getPlatformName()
+ .then(platformName => {
+ if (platformName === "dev") {
+ const testDataButton = new qx.ui.form.Button(this.tr("Test with data"), "@FontAwesome5Solid/plus-circle/14");
+ testDataButton.addListener("execute", () => {
+ osparc.utils.Utils.fetchJSON("/resource/form/service-data.json")
+ .then(data => {
+ this.__displayServiceSubmissionForm(data);
+ });
+ });
+ layout.add(testDataButton);
+ }
+ });
+
+ const addServiceButton = new qx.ui.form.Button(this.tr("Submit new service"), "@FontAwesome5Solid/plus-circle/14");
+ addServiceButton.addListener("execute", () => {
+ this.__displayServiceSubmissionForm();
+ });
+ layout.add(addServiceButton);
+ },
+
+ __displayServiceSubmissionForm: function(formData) {
+ const addServiceWindow = new qx.ui.window.Window(this.tr("Submit a new service")).set({
+ appearance: "service-window",
+ modal: true,
+ autoDestroy: true,
+ showMinimize: false,
+ allowMinimize: false,
+ centerOnAppear: true,
+ layout: new qx.ui.layout.Grow(),
+ width: 600,
+ height: 660
+ });
+ const scroll = new qx.ui.container.Scroll();
+ addServiceWindow.add(scroll);
+ const form = new osparc.component.form.json.JsonSchemaForm("/resource/form/service.json", formData);
+ form.addListener("ready", () => {
+ addServiceWindow.open();
+ });
+ form.addListener("submit", e => {
+ const data = e.getData();
+ const headers = new Headers();
+ headers.append("Accept", "application/json");
+ const body = new FormData();
+ body.append("metadata", new Blob([JSON.stringify(data.json)], {
+ type: "application/json"
+ }));
+ if (data.files && data.files.length) {
+ const size = data.files[0].size;
+ const maxSize = 10; // 10 MB
+ if (size > maxSize * 1024 * 1024) {
+ osparc.component.message.FlashMessenger.logAs(`The file is too big. Maximum size is ${maxSize}MB. Please provide with a smaller file or a repository URL.`, "ERROR");
+ return;
+ }
+ body.append("attachment", data.files[0], data.files[0].name);
+ }
+ form.setFetching(true);
+ fetch("/v0/publications/service-submission", {
+ method: "POST",
+ headers,
+ body
+ })
+ .then(resp => {
+ if (resp.ok) {
+ osparc.component.message.FlashMessenger.logAs("Your data was sent to our curation team. We will get back to you shortly.", "INFO");
+ addServiceWindow.close();
+ } else {
+ osparc.component.message.FlashMessenger.logAs("A problem occured while processing your data", "ERROR");
+ }
+ })
+ .finally(() => form.setFetching(false));
+ });
+ scroll.add(form);
+ }
+ }
+});
diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js
index f07bc02e376..74ecb71ae59 100644
--- a/services/web/client/source/class/osparc/dashboard/StudyBrowser.js
+++ b/services/web/client/source/class/osparc/dashboard/StudyBrowser.js
@@ -16,12 +16,9 @@
************************************************************************ */
/**
- * Widget that shows two lists of studies and study editor form:
- * - List1: User's studies (StudyBrowserButtonItem)
- * - List2: Template studies to start from (StudyBrowserButtonItem)
- * - Form: Extra editable information of the selected study
+ * Widget that shows lists user's studies.
*
- * It is the entry point to start editing or creatina new study.
+ * It is the entry point to start editing or creating a new study.
*
* Also takes care of retrieveing the list of services and pushing the changes in the metadata.
*
@@ -76,19 +73,21 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
__loadingIFrame: null,
__studyFilters: null,
__userStudyContainer: null,
- __templateStudyContainer: null,
__userStudies: null,
- __templateStudies: null,
__newStudyBtn: null,
/**
* Function that resets the selected item
*/
resetSelection: function() {
+ if (this.__userStudyContainer) {
+ this.__userStudyContainer.resetSelection();
+ }
+ },
+ resetFilter: function() {
if (this.__studyFilters) {
this.__studyFilters.reset();
}
- this.__itemSelected(null);
},
__reloadUserStudy: function(studyId) {
@@ -115,7 +114,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
osparc.data.Resources.get("studies")
.then(studies => {
this.__resetStudyList(studies);
- this.__itemSelected(null);
+ this.resetSelection();
})
.catch(err => {
console.error(err);
@@ -125,32 +124,21 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
}
},
- /**
- * Function that asks the backend for the list of template studies and sets it
- */
- reloadTemplateStudies: function() {
- if (osparc.data.Permissions.getInstance().canDo("studies.templates.read")) {
- osparc.data.Resources.get("templates")
- .then(templates => {
- this.__resetTemplateList(templates);
- this.__itemSelected(null);
- })
- .catch(err => {
- console.error(err);
- });
- } else {
- this.__resetTemplateList([]);
- }
- },
-
__initResources: function() {
- this.__showLoadingPage(this.tr("Loading studies"));
+ this.__showLoadingPage(this.tr("Loading Studies"));
+
+ const servicesTags = this.__getTags();
+ const store = osparc.store.Store.getInstance();
+ const servicesPromise = store.getServicesDAGs(true);
- this.__getTags()
+ Promise.all([
+ servicesTags,
+ servicesPromise
+ ])
.then(() => {
this.__hideLoadingPage();
this.__createStudiesLayout();
- this.__reloadStudies();
+ this.__reloadResources();
this.__attachEventHandlers();
const loadStudyId = osparc.store.Store.getInstance().getCurrentStudyId();
if (loadStudyId) {
@@ -159,10 +147,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
});
},
- __reloadStudies: function() {
+ __reloadResources: function() {
this.__getActiveStudy();
this.reloadUserStudies();
- this.reloadTemplateStudies();
},
__getActiveStudy: function() {
@@ -203,8 +190,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
this._add(studyFilters);
const studyBrowserLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(16));
- const tempStudyLayout = this.__createTemplateStudiesLayout();
- studyBrowserLayout.add(tempStudyLayout);
const userStudyLayout = this.__createUserStudiesLayout();
studyBrowserLayout.add(userStudyLayout);
@@ -223,24 +208,34 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
return newStudyBtn;
},
- __createUserStudiesLayout: function() {
- const userStudyLayout = new osparc.component.widget.CollapsibleView(this.tr("Recent studies"));
+ __createButtonsLayout: function(title, content) {
+ const userStudyLayout = new osparc.component.widget.CollapsibleView(title);
userStudyLayout.getChildControl("title").set({
font: "title-16"
});
userStudyLayout._getLayout().setSpacing(8); // eslint-disable-line no-underscore-dangle
+ userStudyLayout.setContent(content);
+ return userStudyLayout;
+ },
+
+ __createUserStudiesLayout: function() {
+ const userStudyContainer = this.__userStudyContainer = this.__createStudyListLayout();
+ osparc.utils.Utils.setIdToWidget(userStudyContainer, "userStudiesList");
+ const userStudyLayout = this.__createButtonsLayout(this.tr("Recent studies"), userStudyContainer);
- const studiesDeleteButton = this.__createDeleteButton(false);
const studiesTitleContainer = userStudyLayout.getTitleBar();
+
+ // Delete Studies Button
+ const studiesDeleteButton = this.__createDeleteButton(false);
studiesTitleContainer.add(new qx.ui.core.Spacer(20, null));
studiesTitleContainer.add(studiesDeleteButton);
-
- const userStudyContainer = this.__userStudyContainer = this.__createUserStudyList();
- userStudyLayout.setContent(userStudyContainer);
userStudyContainer.addListener("changeSelection", e => {
const nSelected = e.getData().length;
+ this.__newStudyBtn.setEnabled(!nSelected);
this.__userStudyContainer.getChildren().forEach(userStudyItem => {
- userStudyItem.multiSelection(nSelected);
+ if (userStudyItem instanceof osparc.dashboard.StudyBrowserButtonItem) {
+ userStudyItem.multiSelection(nSelected);
+ }
});
this.__updateDeleteStudiesButton(studiesDeleteButton);
}, this);
@@ -248,34 +243,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
return userStudyLayout;
},
- __createTemplateStudiesLayout: function() {
- const tempStudyLayout = new osparc.component.widget.CollapsibleView(this.tr("New studies"));
- tempStudyLayout.getChildControl("title").set({
- font: "title-16"
- });
- tempStudyLayout._getLayout().setSpacing(8); // eslint-disable-line no-underscore-dangle
-
- const templateDeleteButton = this.__createDeleteButton(true);
- const templateTitleContainer = tempStudyLayout.getTitleBar();
- templateTitleContainer.add(new qx.ui.core.Spacer(20, null));
- templateTitleContainer.add(templateDeleteButton);
-
- const templateStudyContainer = this.__templateStudyContainer = this.__createTemplateStudyList();
- tempStudyLayout.setContent(templateStudyContainer);
- templateStudyContainer.addListener("changeSelection", e => {
- const nSelected = e.getData().length;
- this.__newStudyBtn.setEnabled(!nSelected);
- this.__templateStudyContainer.getChildren().forEach(templateStudyItem => {
- if (templateStudyItem instanceof osparc.dashboard.StudyBrowserButtonItem) {
- templateStudyItem.multiSelection(nSelected);
- }
- });
- this.__updateDeleteTemplatesButton(templateDeleteButton);
- }, this);
-
- return tempStudyLayout;
- },
-
__getStudyAndStart: function(loadStudyId) {
const params = {
url: {
@@ -295,19 +262,19 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
});
},
- __createDeleteButton: function(areTemplates) {
+ __createDeleteButton: function() {
const deleteButton = new qx.ui.form.Button(this.tr("Delete"), "@FontAwesome5Solid/trash/14").set({
visibility: "excluded"
});
osparc.utils.Utils.setIdToWidget(deleteButton, "deleteStudiesBtn");
deleteButton.addListener("execute", () => {
- const selection = areTemplates ? this.__templateStudyContainer.getSelection() : this.__userStudyContainer.getSelection();
+ const selection = this.__userStudyContainer.getSelection();
const win = this.__createConfirmWindow(selection.length > 1);
win.center();
win.open();
win.addListener("close", () => {
if (win.getConfirmed()) {
- this.__deleteStudies(selection.map(button => this.__getStudyData(button.getUuid(), areTemplates)), areTemplates);
+ this.__deleteStudies(selection.map(button => this.__getStudyData(button.getUuid(), false)), false);
}
}, this);
}, this);
@@ -321,14 +288,14 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
}, this);
const commandEsc = new qx.ui.command.Command("Esc");
commandEsc.addListener("execute", e => {
- this.__itemSelected(null);
+ this.resetSelection();
});
osparc.store.Store.getInstance().addListener("changeTags", () => this.__resetStudyList(osparc.store.Store.getInstance().getStudies()), this);
},
- __createStudyBtnClkd: function(templateData) {
+ __createStudyBtnClkd: function() {
const minStudyData = osparc.data.model.Study.createMinimumStudyObject();
- let title = templateData ? templateData.name : "New study";
+ let title = "New study";
const existingTitles = this.__userStudies.map(study => study.name);
if (existingTitles.includes(title)) {
let cont = 1;
@@ -338,43 +305,28 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
title += ` (${cont})`;
}
minStudyData["name"] = title;
- minStudyData["description"] = templateData ? templateData.description : "";
- this.__createStudy(minStudyData, templateData ? templateData.uuid : null);
+ minStudyData["description"] = "";
+ this.__createStudy(minStudyData, null);
},
- __createStudy: function(minStudyData, templateId) {
+ __createStudy: function(minStudyData) {
this.__showLoadingPage(this.tr("Creating ") + (minStudyData.name || this.tr("Study")));
- if (templateId) {
- const params = {
- url: {
- templateId: templateId
- },
- data: minStudyData
- };
- osparc.data.Resources.fetch("studies", "postFromTemplate", params)
- .then(studyData => {
- this.__startStudy(studyData);
- })
- .catch(err => {
- console.error(err);
- });
- } else {
- const params = {
- data: minStudyData
- };
- osparc.data.Resources.fetch("studies", "post", params)
- .then(studyData => {
- this.__startStudy(studyData);
- })
- .catch(err => {
- console.error(err);
- });
- }
+
+ const params = {
+ data: minStudyData
+ };
+ osparc.data.Resources.fetch("studies", "post", params)
+ .then(studyData => {
+ this.__startStudy(studyData);
+ })
+ .catch(err => {
+ console.error(err);
+ });
},
__startStudy: function(studyData) {
this.__showLoadingPage(this.tr("Starting ") + (studyData.name || this.tr("Study")));
- osparc.store.Store.getInstance().getServicesDAGs(false)
+ osparc.store.Store.getInstance().getServicesDAGs()
.then(() => {
this.__hideLoadingPage();
this.__loadStudy(studyData);
@@ -383,9 +335,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
__loadStudy: function(studyData) {
const study = new osparc.data.model.Study(studyData);
- this.__studyEditor = this.__studyEditor || new osparc.desktop.StudyEditor();
- this.__studyEditor.setStudy(study);
- this.fireDataEvent("startStudy", this.__studyEditor);
+ this.fireDataEvent("startStudy", study);
},
__showStudiesLayout: function(show) {
@@ -394,18 +344,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
});
},
- __createUserStudyList: function() {
- const usrLst = this.__userStudyContainer = this.__createStudyListLayout();
- osparc.utils.Utils.setIdToWidget(usrLst, "userStudiesList");
- return usrLst;
- },
-
- __createTemplateStudyList: function() {
- const tempList = this.__templateStudyContainer = this.__createStudyListLayout();
- osparc.utils.Utils.setIdToWidget(tempList, "templateStudiesList");
- return tempList;
- },
-
__resetStudyItem: function(studyData) {
const userStudyList = this.__userStudies;
const index = userStudyList.findIndex(userStudy => userStudy["uuid"] === studyData["uuid"]);
@@ -418,25 +356,17 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
__resetStudyList: function(userStudyList) {
this.__userStudies = userStudyList;
this.__userStudyContainer.removeAll();
+ this.__userStudyContainer.add(this.__createNewStudyButton());
this.self().sortStudyList(userStudyList);
- for (let i=0; i {
+ userStudy["resourceType"] = "study";
+ this.__userStudyContainer.add(this.__createStudyItem(userStudy));
+ });
osparc.component.filter.UIFilterController.dispatch("studyBrowser");
},
- __resetTemplateList: function(tempStudyList) {
- this.__templateStudies = tempStudyList;
- this.__templateStudyContainer.removeAll();
- this.__templateStudyContainer.add(this.__createNewStudyButton());
- this.self().sortStudyList(tempStudyList);
- for (let i=0; i study.tags.includes(tag.id)) :
- [];
+ __createStudyItem: function(study) {
+ let defaultThumbnail = "";
+ switch (study["resourceType"]) {
+ case "study":
+ defaultThumbnail = "@FontAwesome5Solid/file-alt/50";
+ break;
+ }
+ const tags = study.tags ? osparc.store.Store.getInstance().getTags().filter(tag => study.tags.includes(tag.id)) : [];
+
const item = new osparc.dashboard.StudyBrowserButtonItem().set({
- isTemplate,
+ resourceType: study.resourceType,
uuid: study.uuid,
studyTitle: study.name,
studyDescription: study.description,
creator: study.prjOwner ? study.prjOwner : null,
accessRights: study.accessRights ? study.accessRights : null,
lastChangeDate: study.lastChangeDate ? new Date(study.lastChangeDate) : null,
- icon: study.thumbnail || (isTemplate ? "@FontAwesome5Solid/copy/50" : "@FontAwesome5Solid/file-alt/50"),
+ icon: study.thumbnail || defaultThumbnail,
tags
});
- const menu = this.__getStudyItemMenu(item, study, isTemplate);
+ const menu = this.__getStudyItemMenu(item, study);
item.setMenu(menu);
item.subscribeToFilterGroup("studyBrowser");
-
item.addListener("execute", () => {
- this.__itemClicked(item, isTemplate);
+ this.__itemClicked(item);
}, this);
return item;
},
- __getStudyItemMenu: function(item, studyData, isTemplate) {
+ __getStudyItemMenu: function(item, studyData) {
const menu = new qx.ui.menu.Menu().set({
position: "bottom-right"
});
- const selectButton = this.__getSelectMenuButton(item, isTemplate);
- menu.add(selectButton);
+ const selectButton = this.__getSelectMenuButton(item, studyData);
+ if (selectButton) {
+ menu.add(selectButton);
+ }
- const moreInfoButton = this.__getMoreInfoMenuButton(studyData, isTemplate);
- menu.add(moreInfoButton);
+ const moreInfoButton = this.__getMoreInfoMenuButton(studyData);
+ if (moreInfoButton) {
+ menu.add(moreInfoButton);
+ }
- if (!isTemplate) {
- const shareStudyButton = this.__getPermissionsMenuButton(studyData);
- menu.add(shareStudyButton);
+ const shareStudyButton = this.__getPermissionsMenuButton(studyData);
+ menu.add(shareStudyButton);
- const isCurrentUserOwner = this.__isUserOwner(studyData);
- const canCreateTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.create");
- if (isCurrentUserOwner && canCreateTemplate) {
- const saveAsTemplateButton = this.__getSaveAsTemplateMenuButton(studyData);
- menu.add(saveAsTemplateButton);
- }
+ const isCurrentUserOwner = this.__isUserOwner(studyData);
+ const canCreateTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.create");
+ if (isCurrentUserOwner && canCreateTemplate) {
+ const saveAsTemplateButton = this.__getSaveAsTemplateMenuButton(studyData);
+ menu.add(saveAsTemplateButton);
}
- const deleteButton = this.__getDeleteStudyMenuButton(studyData, isTemplate);
+ const deleteButton = this.__getDeleteStudyMenuButton(studyData, false);
if (deleteButton) {
menu.addSeparator();
menu.add(deleteButton);
@@ -511,19 +446,19 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
return menu;
},
- __getSelectMenuButton: function(item, isTemplate) {
+ __getSelectMenuButton: function(item, studyData) {
const selectButton = new qx.ui.menu.Button(this.tr("Select"));
selectButton.addListener("execute", () => {
item.setValue(true);
- this.__itemMultiSelected(item, isTemplate);
}, this);
return selectButton;
},
- __getMoreInfoMenuButton: function(studyData, isTemplate) {
+ __getMoreInfoMenuButton: function(studyData) {
const moreInfoButton = new qx.ui.menu.Button(this.tr("More Info"));
moreInfoButton.addListener("execute", () => {
- this.__createStudyDetailsEditor(studyData, isTemplate);
+ const winWidth = 400;
+ this.__createStudyDetailsEditor(studyData, winWidth);
}, this);
return moreInfoButton;
},
@@ -555,7 +490,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
saveAsTemplateView.addListener("finished", e => {
const template = e.getData();
if (template) {
- this.reloadTemplateStudies();
+ console.log("templates should be reloaded");
window.close();
}
}, this);
@@ -564,12 +499,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
return saveAsTemplateButton;
},
- __getDeleteStudyMenuButton: function(studyData, isTemplate) {
- const isCurrentUserOwner = this.__isUserOwner(studyData);
- if (isTemplate && !isCurrentUserOwner) {
- return null;
- }
-
+ __getDeleteStudyMenuButton: function(studyData) {
const deleteButton = new qx.ui.menu.Button(this.tr("Delete"));
osparc.utils.Utils.setIdToWidget(deleteButton, "studyItemMenuDelete");
deleteButton.addListener("execute", () => {
@@ -578,80 +508,35 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
win.open();
win.addListener("close", () => {
if (win.getConfirmed()) {
- this.__deleteStudy(studyData, isTemplate);
+ this.__deleteStudy(studyData);
}
}, this);
}, this);
return deleteButton;
},
- __getStudyData: function(id, isTemplate) {
+ __getStudyData: function(id) {
const matchesId = study => study.uuid === id;
- return isTemplate ? this.__templateStudies.find(matchesId) : this.__userStudies.find(matchesId);
+ return this.__userStudies.find(matchesId);
},
- __itemClicked: function(item, isTemplate) {
+ __itemClicked: function(item) {
const selected = item.getValue();
- const studyData = this.__getStudyData(item.getUuid(), isTemplate);
- const studyContainer = isTemplate ? this.__templateStudyContainer : this.__userStudyContainer;
-
- const selection = studyContainer.getSelection();
- if (selection.length > 1) {
- this.__itemMultiSelected(item, isTemplate);
- } else if (selected) {
- isTemplate ? this.__createStudyBtnClkd(studyData) : this.__startStudy(studyData);
+ const selection = this.__userStudyContainer.getSelection();
+ if (selected && selection.length === 1) {
+ const studyData = this.__getStudyData(item.getUuid(), false);
+ this.__startStudy(studyData);
}
},
- __itemMultiSelected: function(item, isTemplate) {
- // Selection logic
- if (item.getValue()) {
- this.__itemSelected(item.getUuid());
- } else if (isTemplate) {
- const selection = this.__templateStudyContainer.getSelection();
- if (selection.length) {
- this.__itemSelected(selection[0].getUuid());
- } else {
- this.__itemSelected(null);
- }
- } else {
- const selection = this.__userStudyContainer.getSelection();
- if (selection.length) {
- this.__itemSelected(selection[0].getUuid());
- } else {
- this.__itemSelected(null);
- }
- }
- },
-
- __itemSelected: function(studyId) {
- if (studyId === null) {
- if (this.__userStudyContainer) {
- this.__userStudyContainer.resetSelection();
- }
- if (this.__templateStudyContainer) {
- this.__templateStudyContainer.resetSelection();
- }
- }
- },
-
- __createStudyDetailsEditor: function(studyData, isTemplate, winWidth) {
- const studyDetails = new osparc.component.metadata.StudyDetailsEditor(studyData, isTemplate, winWidth);
+ __createStudyDetailsEditor: function(studyData, winWidth) {
+ const studyDetails = new osparc.component.metadata.StudyDetailsEditor(studyData, false, winWidth);
studyDetails.addListener("updateStudy", () => this.reloadUserStudies(), this);
- studyDetails.addListener("updateTemplate", () => this.reloadTemplateStudies(), this);
studyDetails.addListener("openStudy", () => {
- if (isTemplate) {
- this.__createStudyBtnClkd(studyData);
- } else {
- this.__startStudy(studyData);
- }
+ this.__startStudy(studyData);
}, this);
studyDetails.addListener("updateTags", () => {
- if (isTemplate) {
- this.__resetTemplateList(osparc.store.Store.getInstance().getTemplates());
- } else {
- this.__resetStudyList(osparc.store.Store.getInstance().getStudies());
- }
+ this.__resetStudyList(osparc.store.Store.getInstance().getStudies());
});
const height = 400;
@@ -659,7 +544,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
const win = osparc.component.metadata.StudyDetailsEditor.popUpInWindow(title, studyDetails, winWidth, height);
[
"updateStudy",
- "updateTemplate",
"openStudy"
].forEach(event => studyDetails.addListener(event, () => win.close()));
},
@@ -674,28 +558,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
}
},
- __updateDeleteTemplatesButton: function(templateDeleteButton) {
- const templateSelection = this.__templateStudyContainer.getSelection();
- const canDeleteTemplate = osparc.data.Permissions.getInstance().canDo("studies.template.delete");
- let allMine = Boolean(templateSelection.length) && canDeleteTemplate;
- for (let i=0; i 1 ? this.tr("Delete selected")+" ("+nSelected+")" : this.tr("Delete"));
- templateDeleteButton.setVisibility("visible");
- } else {
- templateDeleteButton.setVisibility("excluded");
- }
- },
-
- __deleteStudy: function(studyData, isTemplate = false) {
+ __deleteStudy: function(studyData) {
const myGid = osparc.auth.Data.getInstance().getGroupId();
const collabGids = Object.keys(studyData["accessRights"]);
const amICollaborator = collabGids.indexOf(myGid) > -1;
@@ -711,23 +574,23 @@ qx.Class.define("osparc.dashboard.StudyBrowser", {
const permissions = osparc.component.export.Permissions;
permissions.removeCollaborator(studyData, myGid);
params["data"] = studyData;
- operationPromise = osparc.data.Resources.fetch(isTemplate ? "templates" : "studies", "put", params);
+ operationPromise = osparc.data.Resources.fetch("studies", "put", params);
} else {
// delete study
- operationPromise = osparc.data.Resources.fetch(isTemplate ? "templates" : "studies", "delete", params, studyData.uuid);
+ operationPromise = osparc.data.Resources.fetch("studies", "delete", params, studyData.uuid);
}
operationPromise
- .then(() => this.__removeFromStudyList(studyData.uuid, isTemplate))
+ .then(() => this.__removeFromStudyList(studyData.uuid, false))
.catch(err => {
console.error(err);
osparc.component.message.FlashMessenger.getInstance().logAs(err, "ERROR");
})
- .finally(this.__itemSelected(null));
+ .finally(this.resetSelection());
},
- __deleteStudies: function(studiesData, areTemplates = false) {
+ __deleteStudies: function(studiesData) {
studiesData.forEach(studyData => {
- this.__deleteStudy(studyData, areTemplates);
+ this.__deleteStudy(studyData);
});
},
diff --git a/services/web/client/source/class/osparc/dashboard/StudyBrowserButtonItem.js b/services/web/client/source/class/osparc/dashboard/StudyBrowserButtonItem.js
index 55e4e796eb1..b55950ad807 100644
--- a/services/web/client/source/class/osparc/dashboard/StudyBrowserButtonItem.js
+++ b/services/web/client/source/class/osparc/dashboard/StudyBrowserButtonItem.js
@@ -50,11 +50,10 @@ qx.Class.define("osparc.dashboard.StudyBrowserButtonItem", {
},
properties: {
- isTemplate: {
- check: "Boolean",
+ resourceType: {
+ check: ["study", "template", "service"],
nullable: false,
- init: false,
- event: "changeIsTemplate"
+ event: "changeResourceType"
},
menu: {
@@ -117,6 +116,10 @@ qx.Class.define("osparc.dashboard.StudyBrowserButtonItem", {
__dateFormat: null,
__timeFormat: null,
+ isResourceType: function(resourceType) {
+ return this.getResourceType() === resourceType;
+ },
+
multiSelection: function(on) {
const menuButton = this.getChildControl("menu-button");
menuButton.setVisibility(on ? "excluded" : "visible");
@@ -190,7 +193,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserButtonItem", {
_applyStudyDescription: function(value, old) {
/*
- if (value !== "" && this.getIsTemplate()) {
+ if (value !== "" && this.isResourceType("template")) {
const label = this.getChildControl("description");
label.setValue(value);
}
@@ -198,7 +201,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserButtonItem", {
},
_applyLastChangeDate: function(value, old) {
- if (value && !this.getIsTemplate()) {
+ if (value && this.isResourceType("study")) {
const label = this.getChildControl("description2");
let dateStr = null;
if (value.getDate() === (new Date()).getDate()) {
@@ -214,7 +217,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserButtonItem", {
},
_applyCreator: function(value, old) {
- if (this.getIsTemplate()) {
+ if (this.isResourceType("service") || this.isResourceType("template")) {
const label = this.getChildControl("description2");
label.setValue(value);
}
@@ -254,7 +257,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserButtonItem", {
const gids = Object.keys(value);
for (let j=0; j group["gid"] === gid);
diff --git a/services/web/client/source/class/osparc/data/model/Node.js b/services/web/client/source/class/osparc/data/model/Node.js
index 41fd0daddfc..22334d0baf6 100644
--- a/services/web/client/source/class/osparc/data/model/Node.js
+++ b/services/web/client/source/class/osparc/data/model/Node.js
@@ -950,14 +950,15 @@ qx.Class.define("osparc.data.model.Node", {
__onNodeState: function(data) {
const serviceState = data["service_state"];
switch (serviceState) {
- case "starting": {
- this.setInteractiveStatus("starting");
- const interval = 5000;
+ case "idle": {
+ this.setInteractiveStatus("idle");
+ const interval = 1000;
qx.event.Timer.once(() => this.__nodeState(), this, interval);
break;
}
+ case "starting":
case "pulling": {
- this.setInteractiveStatus("pulling");
+ this.setInteractiveStatus(serviceState);
const interval = 5000;
qx.event.Timer.once(() => this.__nodeState(), this, interval);
break;
diff --git a/services/web/client/source/class/osparc/desktop/MainPage.js b/services/web/client/source/class/osparc/desktop/MainPage.js
index d4548a4fec8..52e975202db 100644
--- a/services/web/client/source/class/osparc/desktop/MainPage.js
+++ b/services/web/client/source/class/osparc/desktop/MainPage.js
@@ -20,11 +20,11 @@
*
* It offers a:
* - NavigationBar
- * - Main View (Stack).
- * - Dashboard (Stack):
+ * - Main Stack
+ * - Dashboard Stack
* - StudyBrowser
- * - ServiceBrowser
* - DataManager
+ * - ExploreBrowser
* - StudyEditor
*
*
@@ -41,28 +41,27 @@ qx.Class.define("osparc.desktop.MainPage", {
this._setLayout(new qx.ui.layout.VBox());
- let navBar = this.__navBar = this.__createNavigationBar();
+ const navBar = this.__navBar = this.__createNavigationBar();
this._add(navBar);
- let prjStack = this.__prjStack = this.__createMainView();
- this._add(prjStack, {
+ const mainStack = this.__mainStack = this.__createMainStack();
+ this._add(mainStack, {
flex: 1
});
- },
- events: {},
+ this.__attachHandlers();
+ },
members: {
__navBar: null,
- __prjStack: null,
+ __mainStack: null,
__dashboard: null,
__dashboardLayout: null,
__studyEditor: null,
__createNavigationBar: function() {
- let navBar = new osparc.desktop.NavigationBar().set({
- height: 100
- });
+ const navBar = new osparc.desktop.NavigationBar();
+ navBar.buildLayout();
navBar.addListener("dashboardPressed", () => {
if (!osparc.data.Permissions.getInstance().canDo("studies.user.create", true)) {
@@ -80,21 +79,30 @@ qx.Class.define("osparc.desktop.MainPage", {
this.__studyEditor.nodeSelected(nodeId);
}
}, this);
+
return navBar;
},
- __createMainView: function() {
- const prjStack = new qx.ui.container.Stack();
+ __createMainStack: function() {
+ const mainStack = new qx.ui.container.Stack().set({
+ alignX: "center"
+ });
+
+ const dashboardLayout = this.__createDashboardStack();
+ mainStack.add(dashboardLayout);
+
+ const studyEditor = this.__studyEditor = new osparc.desktop.StudyEditor();
+ mainStack.add(studyEditor);
+ return mainStack;
+ },
+
+ __createDashboardStack: function() {
const nStudyItemsPerRow = 5;
const studyButtons = osparc.dashboard.StudyBrowserButtonBase;
const dashboard = this.__dashboard = new osparc.dashboard.Dashboard().set({
- width: nStudyItemsPerRow * (studyButtons.ITEM_WIDTH + studyButtons.SPACING) + 25 // padding + scrollbar
+ width: nStudyItemsPerRow * (studyButtons.ITEM_WIDTH + studyButtons.SPACING) + 10 // padding + scrollbar
});
- dashboard.getStudyBrowser().addListener("startStudy", e => {
- const studyEditor = e.getData();
- this.__startStudyEditor(studyEditor);
- }, this);
const dashboardLayout = this.__dashboardLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(5));
dashboardLayout.add(new qx.ui.core.Widget(), {
@@ -104,14 +112,25 @@ qx.Class.define("osparc.desktop.MainPage", {
dashboardLayout.add(new qx.ui.core.Widget(), {
flex: 1
});
+ return dashboardLayout;
+ },
- prjStack.add(dashboardLayout);
-
- return prjStack;
+ __attachHandlers: function() {
+ [
+ this.__dashboard.getStudyBrowser(),
+ this.__dashboard.getExploreBrowser()
+ ].forEach(studyStarter => {
+ studyStarter.addListener("startStudy", e => {
+ this.__studyEditor = this.__studyEditor || new osparc.desktop.StudyEditor();
+ const study = e.getData();
+ this.__studyEditor.setStudy(study);
+ this.__startStudyEditor(this.__studyEditor);
+ }, this);
+ });
},
__showDashboard: function() {
- this.__prjStack.setSelection([this.__dashboardLayout]);
+ this.__mainStack.setSelection([this.__dashboardLayout]);
this.__dashboard.getStudyBrowser().reloadUserStudies();
this.__navBar.setPathButtons([]);
if (this.__studyEditor) {
@@ -121,13 +140,14 @@ qx.Class.define("osparc.desktop.MainPage", {
__startStudyEditor: function(studyEditor) {
if (this.__studyEditor) {
- this.__prjStack.remove(this.__studyEditor);
+ this.__mainStack.remove(this.__studyEditor);
}
this.__studyEditor = studyEditor;
let study = studyEditor.getStudy();
- this.__prjStack.add(this.__studyEditor);
- this.__prjStack.setSelection([this.__studyEditor]);
+ this.__mainStack.add(this.__studyEditor);
+ this.__mainStack.setSelection([this.__studyEditor]);
+
this.__navBar.setStudy(study);
this.__navBar.setPathButtons(this.__studyEditor.getCurrentPathIds());
diff --git a/services/web/client/source/class/osparc/desktop/NavigationBar.js b/services/web/client/source/class/osparc/desktop/NavigationBar.js
index 38f960a2753..26b104915fc 100644
--- a/services/web/client/source/class/osparc/desktop/NavigationBar.js
+++ b/services/web/client/source/class/osparc/desktop/NavigationBar.js
@@ -49,33 +49,10 @@ qx.Class.define("osparc.desktop.NavigationBar", {
this.set({
paddingLeft: 10,
paddingRight: 10,
+ height: 50,
maxHeight: 50,
backgroundColor: "background-main-lighter"
});
-
- this.getChildControl("logo");
- this.getChildControl("platform");
-
- this._add(new qx.ui.core.Spacer(20));
-
- this.__dashboardBtn = this.getChildControl("dashboard-button");
- this.__dashboardLabel = this.getChildControl("dashboard-label");
- this.__dashboardContext();
-
- this._add(new qx.ui.core.Spacer(20));
-
- const studyTitle = this.__studyTitle = this.__createStudyTitle();
- this._add(studyTitle);
- this.__mainViewCaptionLayout = this.getChildControl("study-path-container");
-
- this._add(new qx.ui.core.Spacer(), {
- flex: 1
- });
-
- this.getChildControl("user-manual");
- this.getChildControl("feedback");
- this.getChildControl("theme-switch");
- this.getChildControl("user-menu");
},
events: {
@@ -107,6 +84,32 @@ qx.Class.define("osparc.desktop.NavigationBar", {
__studyTitle: null,
__mainViewCaptionLayout: null,
+ buildLayout: function() {
+ this.getChildControl("logo");
+ this.getChildControl("platform");
+
+ this._add(new qx.ui.core.Spacer(20));
+
+ this.__dashboardBtn = this.getChildControl("dashboard-button");
+ this.__dashboardLabel = this.getChildControl("dashboard-label");
+ this.__dashboardContext();
+
+ this._add(new qx.ui.core.Spacer(20));
+
+ const studyTitle = this.__studyTitle = this.__createStudyTitle();
+ this._add(studyTitle);
+ this.__mainViewCaptionLayout = this.getChildControl("study-path-container");
+
+ this._add(new qx.ui.core.Spacer(), {
+ flex: 1
+ });
+
+ this.getChildControl("user-manual");
+ this.getChildControl("feedback");
+ this.getChildControl("theme-switch");
+ this.getChildControl("user-menu");
+ },
+
_createChildControlImpl: function(id) {
let control;
switch (id) {
diff --git a/services/web/client/source/class/osparc/desktop/StudyEditor.js b/services/web/client/source/class/osparc/desktop/StudyEditor.js
index dfd6a02a123..6cc7b3f3580 100644
--- a/services/web/client/source/class/osparc/desktop/StudyEditor.js
+++ b/services/web/client/source/class/osparc/desktop/StudyEditor.js
@@ -72,6 +72,7 @@ qx.Class.define("osparc.desktop.StudyEditor", {
study.openStudy();
this.__initViews();
this.__connectEvents();
+ this.__attachSocketEventHandlers();
this.__startAutoSaveTimer();
this.__openOneNode();
@@ -604,7 +605,7 @@ qx.Class.define("osparc.desktop.StudyEditor", {
this.__scrollContainer.setVisibility("visible");
this.__nodeView._maximizeIFrame(false); // eslint-disable-line no-underscore-dangle
const node = this.getStudy().getWorkbench().getNode(this.__currentNodeId);
- if (node && node.getIFrame() && !this.__nodeView.isSettingsGroupShowable()) {
+ if (node && node.getIFrame() && (node.getInputNodes().length === 0)) {
node.getLoadingPage().maximizeIFrame(true);
node.getIFrame().maximizeIFrame(true);
}
@@ -635,53 +636,59 @@ qx.Class.define("osparc.desktop.StudyEditor", {
controlsBar.addListener("ungroupSelection", this.__ungroupSelection, this);
controlsBar.addListener("startPipeline", this.__startPipeline, this);
controlsBar.addListener("stopPipeline", this.__stopPipeline, this);
+ },
+ __attachSocketEventHandlers: function() {
// Listen to socket
const socket = osparc.wrapper.WebSocket.getInstance();
+
// callback for incoming logs
const slotName = "logger";
- socket.removeSlot(slotName);
- socket.on(slotName, function(jsonString) {
- const data = JSON.parse(jsonString);
- if (Object.prototype.hasOwnProperty.call(data, "project_id") && this.getStudy().getUuid() !== data["project_id"]) {
- // Filtering out logs from other studies
- return;
- }
- this.getLogger().infos(data["Node"], data["Messages"]);
- }, this);
+ if (!socket.slotExists(slotName)) {
+ socket.on(slotName, function(jsonString) {
+ const data = JSON.parse(jsonString);
+ if (Object.prototype.hasOwnProperty.call(data, "project_id") && this.getStudy().getUuid() !== data["project_id"]) {
+ // Filtering out logs from other studies
+ return;
+ }
+ this.getLogger().infos(data["Node"], data["Messages"]);
+ }, this);
+ }
socket.emit(slotName);
// callback for incoming progress
const slotName2 = "progress";
- socket.removeSlot(slotName2);
- socket.on(slotName2, function(data) {
- const d = JSON.parse(data);
- const nodeId = d["Node"];
- const progress = 100 * Number.parseFloat(d["Progress"]).toFixed(4);
- const workbench = this.getStudy().getWorkbench();
- const node = workbench.getNode(nodeId);
- if (node) {
- node.setProgress(progress);
- }
- }, this);
+ if (!socket.slotExists(slotName2)) {
+ socket.on(slotName2, function(data) {
+ const d = JSON.parse(data);
+ const nodeId = d["Node"];
+ const progress = 100 * Number.parseFloat(d["Progress"]).toFixed(4);
+ const workbench = this.getStudy().getWorkbench();
+ const node = workbench.getNode(nodeId);
+ if (node) {
+ node.setProgress(progress);
+ }
+ }, this);
+ }
// callback for node updates
const slotName3 = "nodeUpdated";
- socket.removeSlot(slotName3);
- socket.on(slotName3, data => {
- const d = JSON.parse(data);
- const nodeId = d["Node"];
- const nodeData = d["Data"];
- const workbench = this.getStudy().getWorkbench();
- const node = workbench.getNode(nodeId);
- if (node) {
- node.setOutputData(nodeData.outputs);
- if (nodeData.progress) {
- const progress = Number.parseInt(nodeData.progress);
- node.setProgress(progress);
+ if (!socket.slotExists(slotName3)) {
+ socket.on(slotName3, data => {
+ const d = JSON.parse(data);
+ const nodeId = d["Node"];
+ const nodeData = d["Data"];
+ const workbench = this.getStudy().getWorkbench();
+ const node = workbench.getNode(nodeId);
+ if (node) {
+ node.setOutputData(nodeData.outputs);
+ if (nodeData.progress) {
+ const progress = Number.parseInt(nodeData.progress);
+ node.setProgress(progress);
+ }
}
- }
- }, this);
+ }, this);
+ }
}
}
});
diff --git a/services/web/client/source/class/osparc/store/Store.js b/services/web/client/source/class/osparc/store/Store.js
index 941aadd94f7..e0ee27fddf8 100644
--- a/services/web/client/source/class/osparc/store/Store.js
+++ b/services/web/client/source/class/osparc/store/Store.js
@@ -201,7 +201,7 @@ qx.Class.define("osparc.store.Store", {
* This functions does the needed processing in order to have a working list of services and DAGs.
* @param {Boolean} reload ?
*/
- getServicesDAGs: function(reload) {
+ getServicesDAGs: function(reload = false) {
return new Promise((resolve, reject) => {
const allServices = osparc.utils.Services.getBuiltInServices();
const servicesPromise = osparc.data.Resources.get("services", null, !reload);
diff --git a/services/web/client/source/class/osparc/utils/Services.js b/services/web/client/source/class/osparc/utils/Services.js
index 4a4803ec6d2..567eb09c216 100644
--- a/services/web/client/source/class/osparc/utils/Services.js
+++ b/services/web/client/source/class/osparc/utils/Services.js
@@ -233,12 +233,35 @@ qx.Class.define("osparc.utils.Services", {
servicesToCache: function(services) {
this.servicesCached = {};
- this.__addCategoryToServices(services);
+ this.__addExtraInfo(services);
this.servicesCached = Object.assign(this.servicesCached, services);
},
- __addCategoryToServices: function(services) {
- const cats = {
+ __addExtraInfo: function(services) {
+ const categories = this.__getCategories();
+ Object.values(services).forEach(serviceWVersion => {
+ Object.values(serviceWVersion).forEach(service => {
+ service["uuid"] = service["key"];
+ service["prjOwner"] = service["contact"];
+ service["thumbnail"] = "@FontAwesome5Solid/paw/50";
+ service["accessRights"] = {
+ "1": {
+ "read": true,
+ "write": true,
+ "delete": false
+ }
+ };
+ if (Object.prototype.hasOwnProperty.call(categories, service["key"])) {
+ service["category"] = categories[service["key"]]["category"];
+ } else {
+ service["category"] = "Unknown";
+ }
+ });
+ });
+ },
+
+ __getCategories: function(services) {
+ return {
"simcore/services/frontend/file-picker": {
"category": "Data"
},
@@ -400,25 +423,6 @@ qx.Class.define("osparc.utils.Services", {
"category": "PostPro"
}
};
- for (const serviceKey in services) {
- if (Object.prototype.hasOwnProperty.call(services, serviceKey)) {
- let service = services[serviceKey];
- if (serviceKey in cats) {
- for (const version in service) {
- let serv = service[version];
- if (Object.prototype.hasOwnProperty.call(service, version)) {
- serv["category"] = cats[serviceKey]["category"];
- } else {
- serv["category"] = "Unknown";
- }
- }
- } else {
- for (const version in service) {
- service[version]["category"] = "Unknown";
- }
- }
- }
- }
}
}
});
diff --git a/services/web/client/source/class/osparc/wrapper/Plotly.js b/services/web/client/source/class/osparc/wrapper/Plotly.js
deleted file mode 100644
index 4afdf2d2c53..00000000000
--- a/services/web/client/source/class/osparc/wrapper/Plotly.js
+++ /dev/null
@@ -1,138 +0,0 @@
-/* ************************************************************************
-
- osparc - the simcore frontend
-
- https://osparc.io
-
- Copyright:
- 2018 IT'IS Foundation, https://itis.swiss
-
- License:
- MIT: https://opensource.org/licenses/MIT
-
- Authors:
- * Odei Maiz (odeimaiz)
-
-************************************************************************ */
-
-/* global Plotly */
-
-/**
- * @asset(plotly/plotly.min.js)
- * @ignore(Plotly)
- */
-
-/**
- * A qooxdoo wrapper for
- * Plotly
- */
-
-qx.Class.define("osparc.wrapper.Plotly", {
- extend: qx.core.Object,
-
- statics: {
- NAME: "plotly",
- VERSION: "1.49.1",
- URL: "https://github.com/plotly/plotly.js"
- },
-
- construct: function() {
- this.base(arguments);
-
- this.__data = [];
- this.__layout = {};
- },
-
- properties: {
- libReady: {
- nullable: false,
- init: false,
- check: "Boolean"
- }
- },
-
- events: {
- "plotlyLibReady": "qx.event.type.Data"
- },
-
- members: {
- __layout: null,
- __data: null,
- __plotId: null,
-
- init: function() {
- // initialize the script loading
- let plotlyPath = "plotly/plotly.min.js";
- let dynLoader = new qx.util.DynamicScriptLoader([
- plotlyPath
- ]);
-
- dynLoader.addListenerOnce("ready", e => {
- console.log(plotlyPath + " loaded");
- this.setLibReady(true);
- this.fireDataEvent("plotlyLibReady", true);
- }, this);
-
- dynLoader.addListener("failed", e => {
- let data = e.getData();
- console.log("failed to load " + data.script);
- this.fireDataEvent("plotlyLibReady", false);
- }, this);
-
- dynLoader.start();
- },
-
- createEmptyPlot: function(id) {
- this.__plotId = id;
- const margin = 25;
- const bigFont = osparc.utils.Utils.getFont(14);
- const smallFont = osparc.utils.Utils.getFont(12);
- this.__layout = {
- titlefont: {
- color: "#bfbfbf",
- size: bigFont.getSize(),
- family: bigFont.getFamily()
- },
- font: {
- color: "#bfbfbf",
- size: smallFont.getSize(),
- family: smallFont.getFamily()
- },
- margin: {
- l: margin,
- r: margin,
- t: margin,
- b: margin,
- pad: 0
- },
- "plot_bgcolor": "rgba(0, 0, 0, 0)",
- "paper_bgcolor": "rgba(0, 0, 0, 0)"
- };
- this.__data = [];
- Plotly.newPlot(this.__plotId, this.__data, this.__layout);
- },
-
- resize: function() {
- let d3 = Plotly.d3;
- var gd3 = d3.select("div[id="+this.__plotId+"]");
- let gd = gd3.node();
- Plotly.Plots.resize(gd);
- },
-
- setData: function(ids, labels, values, tooltips, title) {
- this.__data = [{
- ids: ids,
- labels: labels,
- values: values,
- text: tooltips,
- textinfo: "label+percent",
- hoverinfo: "text",
- showlegend: false,
- type: "pie"
- }];
- this.__layout["title"] = title;
-
- Plotly.react(this.__plotId, this.__data, this.__layout);
- }
- }
-});
diff --git a/services/web/client/source/class/osparc/wrapper/WebSocket.js b/services/web/client/source/class/osparc/wrapper/WebSocket.js
index a257f0263b1..47ab23723a1 100644
--- a/services/web/client/source/class/osparc/wrapper/WebSocket.js
+++ b/services/web/client/source/class/osparc/wrapper/WebSocket.js
@@ -240,10 +240,13 @@ qx.Class.define("osparc.wrapper.WebSocket", {
*/
on: function(name, fn, that) {
this.__name.push(name);
- if (typeof (that) !== "undefined" && that !== null) {
- this.getSocket().on(name, qx.lang.Function.bind(fn, that));
- } else {
- this.getSocket().on(name, fn);
+ const socket = this.getSocket();
+ if (socket) {
+ if (typeof (that) !== "undefined" && that !== null) {
+ socket.on(name, qx.lang.Function.bind(fn, that));
+ } else {
+ socket.on(name, fn);
+ }
}
},
diff --git a/services/web/client/source/resource/osparc/img0.jpg b/services/web/client/source/resource/osparc/img0.jpg
deleted file mode 100644
index ab9e47cc01871b0d276e02268971e9b9dfbb4d46..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001
literal 218892
zcmeFa2UHYI(^
zNs@eL;nBzE{oe1p@45HfbN+LFhqFD^Rn^_q-PPSQT|LFgr;}-lY-K+udjQbZ1~>r#
zzy)w1WB?R|AmASWp#iW?VF0j&(Efs*A(#H*!2o##05Aj?!Jj8Y@D!#1;e-e$0Q0Z3
zMc{n^z&{KH0FHA%?+SKqws0mrM>mw4m!q3IlcFNiC2bp+6A~T23<&ZIh)W6xND4|Y
z35ZAv3rh-$fw=$xGzapxd^s3D%fJxJf&PRcTBn=a2p`R#l^+L
zCB(xcBqhKnAf+TBBqX7vA}6OLC#NDMJiY$9okjjNgb)!B5D^odBPKpaK}<|cfewf%
z&XSP+CmEbP2S{;&>p%_^!VF-LLZGCOlL@er_$RL+a-fu0kkbnk7=1wj2nHq=HV!Tx
zJ^`oz|0F^H4CqhdIRJtQfns1{VBuinVnGRoKq4s=^CFo5mV&+wIkQKYAT~w9qpB+`
ztds^XY=wk9Z}uwUuq7IV&4se^AsT48~NPHL;=O|Fuvl7eLVmVNC0w^xDmeQU8cuSlQjgQisZbMpQXst0m2UWEb
zl)TURyfr52SXVdFzoCMNb##eYeR3QY8{As1y?p{a2w7O(YI}Jcqh~KOAy#OfHM&^C
z*>2*oBAca1YZ2*ItZYKS6*%Q0BQ{bq(0&X*jwyu+$uV*iOOD<_yjjws6dttTs%jJo
zb$#AFDkkRDmOJ8Bb4(652@vtW7p@}BJdL5dn@>z~q{@Ad8kg0_Po2q8C-cQ+bKZ!L
zJ3b|=*Rj06e$|h0DW`g#R<{#yKBf>+yRYljrmou?(tKQ3)WMSl`%W1kh@*d4jvdum
zwH?1hM#qXnB2eE%TL4MaQXS7zf=+j6c{2*Hw=<0_iS`ZOlU7}O(=KU-CtKRVP@&Rn
zQQLhC+X{Xk(r2+`VOQmeGom)>+=e&VD$|)cni>I%X#c#GD^_$6cVcbSQ;gQ`{v}k2_dSP38H7
zlydc$l%KHPm$Lj2P4Yvd?PW-1p(1le)k?6;3PZk51l6$4^wh)l+Ohbsfv8ICYgN*u
zOp|T=tE5(ni_(d&wk!|n?vymmBj0jMeo$aMN6E&D$8!B1bq))kPTvK~loEp*23il_
zKFg*E6BN;U=t-t{(ldK?S-~sJCD2(Vd&Hprn`k1f!^r))2#Gq8ZmEdPd&L8_D}mS6
z`i$H9PXJYieI@u;Ij3Tc1HVHrDp1rPXGdx@N~3wwOSI
zyS#*68*4Y%%>AwQe73k{?NNw##k$hggOhAeWKdPpLz
zMmMtW1YH;%GutmKym-5|6IIpK9+IE5;uX@%qtkL%K(Kn#*(R98a3~3q-`T-@AlCAz
zbDOI^B;UB1abNUOp*Y@sv$Q6h-G-cywUDh0iqLAQr4}l_w$d3(O-3ZviwET$^(JlS
zv%RKL+*(uNTe~&ibhpiGgh*KWjz|`cbW|mh;c$zja2l@0RlnjKQRu^Y?7H$UFVULC
znq6?<9$hG?^OWs(&}d_1=-d`g@)qeJCL0SLUJ!WH)_EKb`ya?4ydv2%u2=ym7^y)^%)kN9usFvz$1;cU>|`bnZ)y2PVP&7nY&N
zM?U;wUKv|;BTFWavfn%+45!1n?eu2f^tcpaTxTOv9Tmr}gze{oBudR}hKi~4YCQSL?A^3|jHkUktIIwksMlfuEJ4^8vB;St$S
zg#05*@)=T--5m^g
z7c3On-*+w;Q|`Fre7Ls#@q_CwS2asR@;2X%4vD#*0LGmGUHOv<9y=>uKF#h4SXU*=
z36Y{g*^6pp4c~S9ywc@T8cjdosaW_O(cTwS(UVqk7I)cdXj6tB)y9X2q#xQCnIQcl
zlE$g_N{U|DbAR=KjHt6F2I42hUa^~rPs~2)o#Si&Sjc3uw2^@*KM#|fGumG%JEC3e
z808wPZOhwkOFsb!;La(r?{Vz)c)VqYp47XTk+f|g%J6;6x4;2;xYlpUXu5cR*YG+o
z{goz3KOSD*iI!EuZ62?YZzllGG12?bMeesELM%a@A`Ml8jb&R8*Cr;HgLHl8g70Jx
zm^TzIkg)`(EUlja)cv9H7^-@~Lsq8zT<@2~jlW8)z*7-Ym=gJ&zIjL8jIH&aRvwl4
z4kkW>DgD&~*N0t2GK$XEalsUpS!#Okvec
zCqQ3Fo;@;Hyi~HKRv@i5^y+@=opQOnLYXHATyCR9UNRbs;Ocvfmz{TYLZRVPWnJhr
za{q{?MCAJ%=|V=m)L!Zk!nfv_WPh1BXGmmXLVs9IlAPlNU@xF<(`=7$TSiX5PMVs`TVQLp`)Dp_FcMx-
zp|a^n%#92u**>b+2|QXtz7pS&*sfZqQ#ryN+wzt8$a}1DJw_!B?AG4<4eARy{F;?P
z^?hcWJT44$i!?#N(CTs5w#H=APC?VS&jpb}rx^p0eLh6&;aaC{-EQr@+KP%g+H8x;
z@rl`<&lbLg^@yyj&br`)?5+~G+FC?C!^<(c-P&z_gkxEV$Wuk1s_#1?QVz%W44919FIql+CP@DiV7RM1imHpQ`w*y2QZ!I)9
zt#cgje=eBygomql-&H%u%&hvsYUl)@zkW!}f(LsRahQKh$GA@mwL#t53Iqx*GA!ze
zd~Pc3Eyyj!-;I&>R@-D`-6LmK7Ylr%w7$|0AOA8_V)u?I+Ccq)U0Fj%0uLNoY9S|m
z(DfawWI+pSuWTui@*a8j35$pGV!q!KF15sBL|G`*(OnyZ_`ArD4vUNL2Tyb#ez4k0
zDWw>@j(9+MO_`@~E7|UoZlR$=zqeSXPYuVS(DApBTWY5LONg6?`$P!$pb-0szz-44
zUWyhLv_}r3)ix~8SuPp?L^76grJ#BY&)l=djKj;B^$y%vSS}M?eSELYy{@tNqZi7x
z-?%lVb>iOG;qE?{NsJ6vkzIpmx~0d&QnYKd$5iuHR;g$C3D77z6(qW!y)M^PlBrtK
zzd#4xzjAlhnU5B61uDxoiW&cg~!XPp~;c$F}e{o#4nTZB(XPs
z<61CG#!dIQu7)VES~DZZqfeHm)Y4tBqJH4v$i=jl$AN)IqN^Nx-10dBLw(CzMGK#`
za9OsHHyB;aw(XgpyegNsiku!SfxqEme|KypBv&^J8`?Cy%e`}9qb$6=c@bf9+?xcK
z&8(|?n|jlx95N=~yNaBG=?i)`h2-qg9I1pa^gP!|uM5&{d+babki_Q_bRN600xRRO
zIAm4u_UcfUvR~`--E(Vtenmf7G7<0;bDvU0g5BB7%8{cmpAUB6|1;?h%d*UEXk_C*N3;&
z?_%zFg(Y&de=m@UyO;e&>fx|-<73Q^e*WwppZ!QateyaEmpXE_LO<0es=C9C-KzN3+LuvxRt^xg6bqb!s<-dwwCp<^5SQhmg!T-t=VN0
zzMlZ^SQR1z>+0_^ChciFxeYrkt4NL<-w3vv5xetrXlKNl*G1-Andf(sh4$6DO1?lMf#)1
zI*FPx?ZHYYrIzqtdsxNnBN%BRLHDj+!166-uTi_%^8r_}f
zo@0$EgEwEFniFlMs|+?AVi|7tLBj9^0u7Hef5d!JW{0-;X7e#
z_A7-Cn2z{1sll}@8(ge1I{d2prngjLeqfE;3Zx#a(k?ojY;OWggD$p8zXUN4Vff
zc)uMS9ir`ziuQQ8P%)HE?M1T_>{3_1ABD8IA4gF|ZZPh*8^u-na8hlXL4mh)7#4beA$?kVo!0F>2hP
ze~R3yI{|DQzX;Z2KW(k!Cn-=VwEDq6zjoBEum3>;q8BN+T#)U&E}19|D5{W0V)f{H
zeXbNuzj`r8W?Z`2wLD~4MO`QN1l!q$TQ7uFKb$0UZ
z9J`}aZhOmO($#BBZZ%AgYD^hP%WIp#Dd0Kg3T@X)(7k=`slDum(m88_hvuEA=B=Ta
zAc3y->|A(O_9FKfs4U+aW7(d!7L0=`ul~}uu6?lud*z6&*(V_62j4y$xBb=&3roXS
zRj(AhsOU&akBO>J0Jl*Y-Tb8!V6<|{cuA4kj_e6urXzOAqk(55iw`n(`tUdsUXYGi
z!7_D^Lr#Dw#b$;%50xB6+NWCdYrfh<3rmDGhuyh7V8s7!l-9y2a2?Y8AMoL
zvD2QdByDBAtw(_R8C`DXpyR^LJmmq&d2*|cq_Md^ux$M0@bQ;EjFuQeA-1t@8zP*s
z!@}Oh>b4(zb3NG&0?!Y^c(=M9z1sEZZtfqeIVMu(e)u|a>%FYLT)I4w*pMdHy>m0H
z^{>gB$GSm%8q<_2(GzS6Rz?ht`9AfpAD3GsY8$g)6>|h`GAVq&qsjKP;VQu^NW}{q
zrg*m4@y)`ykUN@$c^ov@FR&$
zdR{0)TQ@h?Qx0_`3JynlBhUmijJ_+ndZObbr)dqH{Lma%oKWaAX!2Q}a8Df@1l&m3
z%m_rC6?T?D&(qD#-Vl!RcGtCahJi#vKo9T)+`vD3fC(@J-~bBn2HeqE0r>8}xWU+I
zHdkCx$iE~awDop!ML8i)vjpjQr`(F#=9*`%A%)HYg9i-$t})qfHP&@UIk_RdK>7tV
zr=mLw`LhyWC2c)_MGPFgw0}huJ&}rkN06w$BU-kuUcV?h4k({r5oLs{(ys{Ej(=x;
z1?J-LS3{h&5@4WuMG=$+z1IvF*fBBLxp~{lyM0DSPcKzZEjFBi6
z76Vsw-~PI;xY{xOJG_CHE9w-l=jSSK@QWSa2M$BIc`DhUY|u406;jVZ&+DgFpkXvK
zct?v3Xu<8zviX-{jliw!e^;y_%=HYf=LwTHN9S;Qor8J0xtlt|!OjIW)CuYEv-yb8
z1Orf}D{d&18^YBM>F`$)!k-kh2*2Tpe&P+B92|cq5d0*7vi;&mj}_?3nG9ze+Thmx
z*#<=sBTssViV_8`R-48g5>@Phn>dGsLt7e??De-#XW@`#*b(3<}j<}}YAU>rb?
ztdoCxK^M^Ee@i;~zoDJB1v+c6ZUE#ohBk}*kHL^ZJ*O4nv`fMk@?}9v;7R
z5RCte4g*(y>H3)%2ESDKv|89+u1+wxmx-$udN4x%Q4ZEA4MdZHKb+Huy3%iD;W&7@
zdAt7uh3)3)ewP=dOE-1M?uV@kY6+!jW)K8x-6QL;yzq?mw3a{4-`W22D{%
zI52^?|JU`0dV9M5W3f4H{C{G!y&V3zAYt3MqKs@D{=r2IgS)yK!TnI`UTQ|#T0f^e
z+@I)wFylG8c?KxBIywAoDU!3wtNlcSHOH}o+uL}bPQiFSa8J~~VK@1S{wF(ut%IVQ
ztDEO{9_>eV<7xvApBz>{9_>eV<7xvApBz>{9_>eV<7xvApBz>{9_>eV<7xv
zApBz>{J%30p6$+E1oz(n(541%!J)U~NB{+J!`lt81vkE#K(m`82&2HC*D2%AzYDOl72GGr!m1SBLR`1l3+1O<6O4jwOmB+AB*2kFJ~GY2KO7tGV?)WXPw
z&Jnbpd81^QK}pZ5fN=kn?Z0iS-x#&E|7TVN0^I{nWMa7X>LrgktsxGNk1vU!1>DDbN&_n%*={7UkVdi)}v_B^@(
zcS%K0xD9AaGy?62KRfDgBlbVPbNMX+9JD>cWth<)zVHYL@CZs6{rvES3*CMEXRO+w
z9@v3K%Kwbn&Cbc*|36{X)|OO9dZBEPFu1ys3^P~@UMD9zNiktTTU!ZRF&Za#GSGqtsIgMoScY9UFmX5zy9w){M{
zVm1Oi0x)4Xj}2VNo<~GNNX$-5)J70y3p=ePnA@2Z#>VycT+ve5fjNo_!-OOxVB$Rd
zb|NA?!a|^Y;^H=N9x=GM2)}@ct*E`YATyI4Ow!)X6JZ1Di4(%c0nX>_4tHQ?`dge+
zFe9Y*|@S!K8vrggr*Zw-C
z{Wfv^WCv$MH27;8mi;+h{<;5Yfj=$qrv?7Bz@HZQ(*plLYJtCB*mBi2~r`Boq3%5)J=#fp4ulLuZGx`gzp!n3#SN{=+*d%-s__{Rq(m`GxGjS27U3
z4Z^OzD0eg-eUyd5)&&h?pwD4Zc!C0gFclhhID@%QdCp*YG;D`Jf~TG^&@#K*A?(ob
zYY@KciHb9+!F^rfC=`z#_;zjM
zX$QXhBHV3|{s3@RXLKrn?7uv-pQ6u#|B?6`-~R-n`}T~!sejsMkn
z@DLw()Das0H_r9}0My?B0IJcyajZGu=EZFQsO$c%KIhT;^4pua6CB2iZqQ%X|CZqw
z=YJ3URvs_9ykGA-OiE|xx|q;f4Fk_ftwp}CZ2y);{W4>zlrsm9NY$Qd$=bY
zd?RHt0ksS~8w++f(#{F}QpMzi{Ks3_Z#n!QCi^W1GVap#3*asA0q6k+fid7SFbgaL>%b0h0Q!5tfe=B+A=D5?
z2pfb8A_x(O$U>AMS`Y(>Im8;`06{{$AvYkmAkmOSNIK*Zq!>~KX@tChyo2;YMj+FW
zMaXx^J_Zzn5Q74P9)lf&4?`S70YekR2*VP?0mB0$0OK}BEXI9|#~5W8br>%&K41)D
ze8yPD*aqEk5J1mEFG6{s;!s7XF7ztY9_k4VhDJeCp^u>z&?aa*v>*Bz`VG2=iH%8)
z$%M&+DTS$qX^aWO^uP?pyoZ^NS%g`K`5Lnya~g9U^B9W=iw=tmOA<>R%M8l_%NHvg
zD;28%s}}1G)*#j_)($o{_IYd$Y%y##Y;$a9>_F^$*qPXs*w3+hv1hQiad2>`aky|~
zaCC8Oa8NkmI1g~jaN2PCaAt9S;1b|6;tJxb;9kW=;@-qf#Vx^Y#qGnL$34J1hsTB|
ziKmNahv$zMhnI)fgx8HXi+6xeiqC;BgKvcIj30)7AHM?s4gNU(CIJBfGl2wwK7kWK
zD8YS#Cj=b?(*%2jWQ3fAiiFn)y$NFp3kjbSju37V5fQNy$rD{ALJ`Fh6%)N8nk4!`
zOis*8tWIo4973EyTu0nTyh?&c!bYM%a-AfAB$cFwq?_d1IlOc1=M>M`oC`jeajx;)
z@VRYLa#8_OT~Y+;J<<}=cccqsIArW(%4BwAx5;wJUXjg^L&;gl70F@bx5)Fz-;mEz
zU{i2Vs8cvoL{pSgbWyBRl2Hm%no#;tW>B_Jem)O9&wgJ0yzBY6^VR2v&hKBKzo2jd
zej)Ng#f82LyHvDP@>FoDD5^@TL8^UfCTeAB7wQD+dg=)pOd3uaeHveyESfho-)Jdl
zrD$QaQMA>xqjVT_oOA|sfpocaAL+K~8R%8%-RU3DzoP%faGpV)!I>eMp_O5gk(^PM
z(TOpcv5j$wiIPcy$(89o(`%;ni*y&&E_z>lbg}E=0W$})G4oC4O6Ca`d=_yQdzNID
zmn`e7jI7$MH&{zqN7?Y$#MvC#(%9PBcG)@DP1z&Z>)97Ls5sO)0ys)J#xD_FlDp(_
z>G7q3%h;F2FS}gMyxhZy!70M&$eF>}&4s}w%H_nB$<@n^#Vx^&zLR4U1EV>xthJ?~uTf
zxFQiE@l0Y*QdANpSuMFH#Ute+RU)+@%^?k!&Xb;zVV1Fxc_i~$mPyuHHb?fe+(kJX
zxyN!}bj>eXoLZh*P1+dR
z8rn(P<2r0QNS%6JKv!KiNq0hzL(fC6Sszb8ZlMT*7jHIZxK
z*M=-PEd4CsU8lM3dcDPp#LC91#v04o)Vjp_&_>thvCX!vy6r>Tb(kV74Yq72Z=W%59Aq4l92Onr98(>?IVn1&J8d|tIcGcXy6CzTx}3P0x>h1^5!Q%C
zBpK2L`Pz-a&CjjZoy$Gk{j-OpM~cU~r>19t7sSibs{us;K1_Y|zT|z!d)i0VC&TB5
zuZeGsAE_VG@1sAbf299>fJ#7qAZ8#e@a2t*H^OdA2Pp(S3WfyR2EPnp3Ar6I8>$*w
z6owz>64rT>|7PONty`wInr_qI4!u2dNA*r=IB~dV_;7?&MD|_GyH0nzB84I!L>@=k
zMZJ&ak4}v~ya&7YK1Lwse#}X%eQZ~pXk1o2c04kEC_yfvD3LTVAaN#1JE<<2DLE>6
zE5$11U8-KB{}n{x~%kJJ%<7F3%*dJzqS(xPYc0y5Oh~S@^lgpy+k6SaER)T}fOi
zMyYq{V%fE_o^qw~h63H@|@?GP5@%Qx~L_gGh6!}=&DbiWnCE8Wj
zE!N%8Bhk~;E8W}LC)f9)U#b7?fcn6PL7l*M>!`FsCf3o|uFycD0G3qmVFcvzF
zH6Ar_ZX$J(Zu0Te<*CZgqMuu)m8L&_G5j(%1Djd?>h<+-_VygnT^=9Fg@Yc(1z3s^z#LmHP
z)DP+(MSJ3V?fa(t^9R0%IEVL-E*~`=YaWlEAixh&{`Mdx4?YW=d5}8u&-5RK7=K^T
zU)OIb(f*P!ei8mt-~{bK3ZjJ}28Dnwm;Ux3r3!kG0v~GuYz)u~(yt2#7aI!`4~h?Z
z(gHn50oYKm{9iE*b|@nMXw0Qw4vzDjl{=FMqbJ)Vydu9#wMmT
z)i3+Le7C!WO1cftt$Ecyb6MPe)0ldbP3WKh%
zs9;JPkE_{5sEtIG?KtqgZY8Op(DE9?y-mb!-#Ha{^D<3x-cz*mtCTbESC>>x?ZtmN
z!1~+!m71BO-`)E~4IQ5ren_aBJNZXGC~kZ=vbe`7sd3dgAS%72>HX-^{y&{m4fMMk
z8W&t?P_?wk=PaRcE-iAPu@5BcNbxz>k
zzRzzUykU;zRj!NA_~9{q-;jB0=EXQ;yyrQO-q3=d*Xk`&>fSFe5IkR66ba{JR;E{a
zd1*+MyGlhmLd)Ya{^LVHCaUW(g$rvamM)q6mtcp;y)C~-ERW-fWyB9p0NHd;YMQ3o
zK_azT6@De@Gsn97f&SD38;imlZ(Hb^o)MjnzsGphq
z%KRvt#JJutKi8RGRtdETH61kkVaj1lspa}ScTg+m))sUZt8%60qG2gpKoDYZSRN2x
z5|wRsS4_C(URIh?lIir!(?la7HGWY=Q0R_Q<9?RR$cVYfzyNAoph`
z-06apTSBay6)#vCFSNufbA-!Rrb=$@Nj{k2GBm$lFtbZmVXXjN47!0Kv!_|Sf~@6y
zxu8_K7khu9+8kGK;3jv9hoV6gpRvqGq(-$^#cRQXfcb46j=^^IqgK_N*+YHS3!3t<
zKD9l=2vf_$%2qjes}ZvERrSW8#Zu0fDQewY83>*7RK%OaVq*UFqiwhdH)?=t!dR%Q
zb0(af=7;ERxRqapfAE!P=Ld+HG_PjD^DLI!xlh%;Vmuex0kG7uZyU5=5q;G&)>v{j
z?SX8{Ey-L*ZY&UGY-HH&gzLH^@zyi%3@3A0)U7|Do8-HkN!x}rer2p2|Iv^;C3(B1
zozsb*XWH>Af-`8kqW9yC!7jS?8iEDd1=b9ud}s-`t}*I51%tQDAV%Tiv9wtEUE=Kk
zSP!xI(w%@?Pn+sz->$6%O@CJmG2NPENU~d_uOFsf$*nK!FzKA24G*uYaha>88kM*h
zo^5X!smWI%E8f{$D>vF4c;}`2+NFD+hF+?ROcpds|B-m}
zioivRo~rMQwB{0y>}|$s=8B7l$}T^O*9VOHBd6myjWbvpFD*Hk*NCKiS76-|GAj3W
z-P#Uu5sp{q<})&Y=}Nt|gg%x{rAelZ^Y^B;_&|Z6QxUeN;sLmdFd+D9McCoDqwbMO
z(vQjS6ji!mx{wzM>tV#J7LL4ZEM$26uvMeedGxhaWsLJYr`$3}y=tm+P(W*y3I_j$
z{tspDNa+cdz01{&xMtD(lwQf+$B8TwrqDy>_BM4liVp?w0lE7JBx?3%_LhRLsX_#H
z7_t}YSC<4iMX%d4rZ_FJIOy9vvSe?H4$*~gB1CO8F|C6~k5Mv0M0cK5=F{c+I~*A=
z4qtBeT7jfT6*zu%WV;h{t=hOQ66V;I@}1jxq0$?vW=%3P>Vjycw?VSn>pkB|d`1*J
z$|$fHo7Fr!kT~@w-O|gDf`V$hLRVhB!*0>eF|V@XqD~@Bi4QAud-k}see@ZexKIW{
zEt_r{Yy4a`NL>QI&e*QQWa-QDuGgpN6F`BZ?v@%JPK3r)DSKr*sA=JcV*a^OGGpWN
zaz=|2Al1lXNgr{I!BSCls%Nw!K=Fcr3_c^>x`vR&_EyTg|u>?F4el~y#T^2
zsH^pXV0D&$!;bxc*6rn_&S`#3rQC}%;hYad?!Hoi-k(Zo)C{m33u>9-pP*tnyU6@Tr_&hfPXo~MDYP}-DA)nDP|>c!4E
z0Zw02Y!sx)6lNn{FSB|od>wk2P(*ORs9isjs}xxxN|?tlAj7g(+7^kwnQtaz&
z7vnf1SCc2UL6;ste5Gk|QLHq3;|@I0M=~%A(;?KAD$uwmC+6l0|
zIn`8-Q+WNDBN6nLaL4!98*~q5C38@z7vSy4_~CupY>RB)9&|Jln)m|Ik}>2qOfiwx
zR+DTbXn!Youkzy;pSLf^#%{KXRpb_S>dy#Lj?e^LCR5Z^VomD9c^0tyWhTJ}X;?eA
z-ITSn(3QHb%z_D_v|MF4hic}ch!wMTycQZTU~{PcjE?lm>-FM8Ic5Me?!IggMc3{g
z<;YQjm0@kQOj^_Cm<3UKhY^k2%md07cAFRuH?QTh;gAT55b4Czv
z_hp#d_EAR;7KI+L2P^FfK<(An+q;k?J4fA
zyRBf)*rlb`s1|Oc()H}VB+KU9Gc5Qj|6Yy$m0V5f<-N}E&Yd=6)YJ!}Y^&7nl|!BJ
z*IR2IcC#f}H-;uBo>t0SZ-2LXIew}9`FvG%x1vJ4tv+iW#mDzroS$aC1urskZJJ63
z!kV%%zJHnBCBT#$c}J>Oum8p>Ty31YPf5`DV<yd%Px
zb0kN+Aa@fQm$3NY9FHB9Y|wSzfZ1lbxvAH4%i1rLvnMR;H+-U5(iit1vcXIp1dU41
zUs_#Bl6+O7-Kmf;RwV*IccrFW3Hqj}S>=<>`-APu+Tb~a^h+_S>8~pXS{KUsIjbJ=
zT4&`QCn?L2e!ys=uhj!y7;;^cM?Dg1AI}}=OFqCAJ`Qr}YSg;LUKL+CeP_+kfLj8S
zpBKNu!xp!JW&fbtOK(69^va>PSbJ&1FZyBJr;z6^Up-|f8`*W4X*%O8Y67kfdQC09
zH~}Pn?B*uu_gC$-rhax0$WUNMwia+Y2P|z~OjG19&U^_)8Y^Yh+w80E#I!MtG&ICc
zilr)!X`BG1hb=r~b<3GAS7|ljvg}Rd8a+269l2c@-=iwXS_b;)OYG6$N4e0GpC5wxKbm!pgGV*}DhqZYX$ALX0f4|*-H
zmY}i8{)l=2uvXYHY@SktGv1OZ6;GPZ1&9O{HYqk4V2>QQ5ElZUzCG?o#X^xfO}Roj
z)>oUUz6purBG)Bwpz@HSO2ejh%ql#r?+Yw>
zO;h`xGcJBBE}v0%Ja=rD6=U%Ii&+Vra_E!Uo)tGiwLw(hUWyNX^4~+8?M&W*w
zyS(?xjH<`Ux2+0mq#PfcP#PNcS`H3=xCnV+TzQVGhq2}$iLJ(yye?3{^vV-{;>V`n
zHSomD9@O#^@&-R1-mxBYh|$HvlObn-iN3h=F)Ea9o&J&(%iMWIpD$L7k)FvOm|+i9
za-Ln{#u*1{q|1N-P1*xPc=ZParwb-|mJ9{**&rgwiA$MTFiazsVhcO_^Fo+|mp^w5
z-1~-iZFuo)LPTnbkKIjOeIo{K0=72x9meGY1`8-v41L_~AerD0-bX|qx!*c{j?o?+
z{?bYDHS8&tybFCdo_>3wFr&vX
zfINS>(QE?oZB!{zKIBeAX|PJlp6TO?>r?ZynJ0kD+@)MWnX*R{a#IOCkt|!QSLEqL
zu)Z&7tU7WEq%>b(i|H8Fj2xGuiLIeG-A$?=SJ1UIct=3Z3G0!V@`aVoRk23-+m$n-rlWGk3@&DCan0(=HnrzHgFJ5J`H
ztBtojh9)H6YIfM_^>?eyCl%nD)_4Le&$oGBCDCa))MYyXr0~}cm$Q#s19z|IY%NzM
zz9Ylgz+VkntO!{MnVWATR~Srjer{McFd`k6?A&3pc$q`aSN~1V=CRkl)dXwBLyw}x
z0~u_aBA-RK4~!c@Hq=`2*dsK{lzk!Z=f7TK{IPhX+Wc@UQzu^jv$Ev!k1Wk&AIhn{
zVY#ClyQ?hA-QtH1JJn&fD>=>OYn4-^wRo^=slX?+>TG?_k{C#DsoS$UZ=vHrShZv^(|
zgtZC3u*Kn%onlP1R4t7}weu2JT?$ZH0}OI_oD4JS{6qwBd`t`4x|(
zX9F2B%s4!OUv$-wUXsnneQ>7laiqojYAzR^w*_J>o1JSbQU=K~Ji5_(?-pm+v|h5M
z0Ah4a(fKEU^k&Ddl52lqzF|QQ>h?$5tB)C*@7AaC8S)%Q$vtiBiF3Jl|8n0riSmWi
z0;LV3UE(A*PQrjI%bp>cB!h0-E8VqB`F=Bxzq9qqb52b!ZX#TdW}BC)V#3hfC{trwT3uYTngcBt$68e+V0p)axl?c
zC+7v*m71-4YY4T#|Ewhyi7`|gB$caaY#x_ubWiOzr`4v-TK4tbwV4P0HJ-+`v4}eH
zA$~gFuyb!n;_U~V=C5s2E;-T9QvwfzZb9>`?DX9%yIX5AqMsPbSws}PvuA&9SK}Ya
zam!CRszg0+d=Rok`CTO8!?7*3{C-5$=Rob+@sUx_x#9DD8_@@MjXLv%7^)PggjXnj
zd}eO;!tGTWdV7ft#WSM$uRdKch!*OPHY3~qy&O9K=+{i$3fNAN~dlk^&1jS
zoIY7q@4j%qaf1btZ0yz!0F_1_v_ei)%lJ;;dAcP-_4l}gLqj5ES6%Y@3-mbdw!J2n
zG*X%?Na)wCkRpgBirCS}?yla!kWj$Ec|_>yP;Xc0xy5K-)pci9p(rC!t`PJlYAuuf
zY5Z0`ZG=3}Cc^!?5hX|7kyw0f9G33P$h{`{q6G<;FXuV^ZuK|6wlx0ssI4TLakqVx
zEr>NL9few7)_V<1<`!@HL!o6}^e%%Efms!0lI+G6fy$jzLaz+fOLQeJL)
z1zvVj?M0>1SW9Dl!=vjWi}jJunhHYg^I_%+RoqX~+8DmQTe)ya;n|2e^=?Mw
zON&y~FY+3c{$!uSO=4K@rI?>@vXGX?(Ijn5eoJ8caf`$ukNtAl*nNS}4&&AP-^n`<71`oDq9RM+<|U6D
z0u`kCQ}k$eO2-IddVQ==GTj!XZSF_}ojT=EL@Z{iF4;x=3QCyvMTn7i*HhDCNmk#n
z=OuMfpZez%I#^aP8SCTn?U_R{%9Fu=$qT&(#g#Z25?a6NjtOd(4??3zg
zLhlAe<2?oqs*)nOyj^SwruU`yB+o2%yT^jo0-NgQ7)+X7c$VI#o!`ZsWi?N%#eXGi
ztww<(kqb-mwHjtz?r6-*_dOuWb_q}cE<+C0$MXz
zqDz){D7R4=?Hp;b#)V_|A9`a^#3iIQ<~TzMBIRA6oQUfSp4o!mS;t5DWV54wr%y7Hw@jStBZu%}tSo!w}xK#K9x3
zPdXkALzC;>n8o4wLfWh%BEs9+!Xg9rzJBac+ISMk5y^7jE&q
z(6Fc|{m?gRGLyzjLxvOqQ2kk*yOo_mF~o27NLyX3%U%7+#&<>^ENxG_w(lDkUMsXLq?5bz!Q7>4Z^oW!upc+
zWUL0YMB`=D>P#5d+%F~~3@$}>m8CKP$uKcA~Nc%3qy{i0jtzr=DQ{g)3vijYCCbdipa=$?xXK
zzm^mhUF_wyc%?w!f#;5s6t{g8;j8Ym!4Mre3(vn4h0V1zf0(H*UF!6aQIBoR6#+}G
zEBET?j=yXvKypz}YGWtsjzGdhVqZ|EP9RdBtI^*!Z%k-_nj28{DScNy0zQ)EhIKzF
z>_=R_oXSN|DEz|dd12+tEGsvI?loSw^j_E1-J@#2EV1fSP11sTqDRsWq29x+JF8EU
zYKxfO8M(%c)K5EqVz}8@vLhnk0l_kR!!42gfFgn^o^J)~s}x<|+u`PG`wV;on
zg{ricyHX>go#(qdMODOJl_H8DheI>Ql7ze1bKi~{lS{8gb+*JOvfKGz2Hhggr0Wf1
zrEmm%O8k0+S6>ro*=C(-bg5Bsvu9?{Nd|LQ$9jH@!;1@%bm1lFAkB%}8Mfd
zc~+Ln`F2On;y}bjMiL{T+m1iD(=bLRDHh&z(
zdW>E!u4S2^{lHpaksMR$Myq;m+hcpDj&|X*-|(#yprnO-%C+CJUCQQB<-9KEK8?o$
zTjk56Fh8WeN__4Cd+|FwaObgzKy0hNrl{gxS>9~iAu6OMtVeL!<$NS_?57tLTfBbw
zH$qwW4f9q_u?%&Uc}!Ac#jMWR%I3e>79!@>Wtb>2dKIor@hcr*JZ%
zaoLo0*xcOPVu&}2e_jf;B^e<7A=}OGNh}GRGfJ^hp;LXd;5_1X{&2)N(>KjCX~ns)
zdxfwO>+2iklok=Bz&C<-vCgyL&d8WzU6yfB*zTL4TQ_b!;2x@)D7@vt2)P*7cr8FS
zJ#;e16mwh3lk3$gBlF~e!?O;P8OP*F#ISI2y+vGqDs;|9@Q#lTjpfr_&Mh*#4Rbo)
zD>rmZQi?q(K2m-;(tm$1#QAZe1y*t1L(kY}q%W#4Au;6>wJXnjeG((;kvJb2MQnbU
z`QJJwev~=vIKCtEe#T*l+~JenLG~(X3H;B6DdokD
z$9Ei(CS|JzbS-i3IvF3}=n9KBJ?wiUINYPEPJZ<gO!f=a%s3rY3Nr)FeFy?}lEcS(w1u
z;}GGSagRm7?ZgO4dM3Y;x;ZD+v*eNKmlT!>tUTo2s?GM-v##8DJD`kzzxGI@BX@vr
zON?P{YATpZ$V5k}0kKdb5aSVE<3|1?`=gF(&WIB4MQ
z_%;1@UGBx>+b*Qbgu7Pk8Ml+xL#jzwL(03`_)`3~3nk&(e1A;rcSbfk|xn;9INuprav$ei{5-*eR5#5F$^ks~$z_HFvF&EN%x(Gcg
zcR6mxIe|gx8$2Ivj+Nbr?&yy+az^dnh&|5FdvB9iV)kU)TR2Hq#)LOjEdGNwmgkI-
zBdeNrnCy4xBvHl8*ioAQt+d>p9&mGkiJHf&>By4uQrYI0ScT+}$m>yGw9y>;;0m1*dTd?$&5G65I*yWcdE7
z`H!Zi=JYv!YTtXWb*+0{3W`#sN%>cz=IM20(l0E7o=j`)e50PrUrW~?c|o$sZnKBr
zNy2D$;~VTE;zXd42&tkKunoXwoDX=h4=RIj**IuYlhJV=CyK@^c|Ug>@-!luyS!=C
z<%lZ7Qm2&hlQOD%PHNvCs}TF@TlcnK-ss7GhO0y7srSrbQ9_Jt&Aw%J4YfI3!X5ex
z50o!B4Nw1Fmk(qA&^MfpW+D^)L{zKZ=uL!)!weY9;&w4ZcwXO#eR5npU@CW0xGr;F
z&C)R}Bo61t_{5t~vc9P-OM%jfYWhs|Pq=ub?tA3`7G9`^Xe)-pFP>Wh6=DqD;>CVHHzX<4z#
z@s-GjVre7m15k@pMl0qbYh_ymoy;RV>swW-9C~~-@>@&Ab>83lyJM(TNRz)rjeUv~
z!DaPIT$cJXnIgO?c}PvxQMDTW;`8|*!25Ox#>8e3NL|u7{hid;GLqhHPG$un+Wm|3
z!N!w!*sRz;ye#E
zJ2aC{H%2fZFU}YW;qkS@XdLQ(B5dkdNP40CcanMJn*i=P`|sg8aw7h>V7>sNlYQ5Y4c5NDw+I~M?$^?
zgLyH@5YVm0*rfZy+hz1dr3acUF~gz3tTDpNuH?sUS9@<&K28`^%RWxtF|P}n{gJuu
zz`0hJM&DYBMEx=^Q?EekrHVaj^aLqhUG!#q#{Pv#6@Z&
z3iz{c6t#d^YLjaY;)`G-w*h7`Qq_Y8=kg{R(z7&M#fm~%ukQpDLh!$2N{|EIZM3BJ
zUQJ8iatbzroLe}!?7+I|{D=l$E5WjxTw7@elhc}l?)UIds805RGB9&`#v$n<_EuhG
zH;gFi>cnvxZfuU^qa(x+s+!;RR6Bm};O}`ZXj^kH#dSgn%;Q`N?qffos{dMgx@32<
zF|}-jA;lYTf#FjFAIO3lWufW4e6NP=lg$E%Uv|=?TTZUUb^A&!PO_=wn2f|{V&B_8O;^>iG$ZY+&*I#wRIN`o>R+C|`M4JzcK5CO*)a$S
z@d()N;lP#Uxyh8=zJ&YGbjVOiXmewWN1;4_{`5bW8z%wJ#XjA#wzv;y{J6s>m^DxQ
zGGlMXFIVCxt_IjIUD675t~x#us!=NS)91%OE3nuz#wx~!Y;|tP(R(r{0%;Lfvk##j
zL2U}maj&!@;(zHVj2%u}Y~^ZxZDaP&97X>FtQ)B0)hEie#d@cI+X|5t*0fM-Ykm$X;1k4oa4_^d`l_xH6OZ#k03aNnsXZvrLuCQFBFcRI8eC?x0Rm
z#gyjVSEI2Md8NLaMIOh4QspohJ+u^4QO@iBm-)IQ&tAKQj#m~i*QA*;pV%@s{1HGK
z+D^1>+Pk^2tfpJ5X0wUDrOTu;%y=YmrQ+IrX~7uuT`jFSVO*#t{AqI2i9?#mktMG`
z)4xf8)jnK_oG-cWvaeRbb*~|5XSV`Al>RF`mwv;;yu5p!0tuYM@Ul7F3rxhca1`DW
z(tuEFCp>%iIdCADS2$WL2M<--4F5J)#{20`{tuvNXvhj7A^Pt;l#gd-tVbV@Ao{&Y
zfQ6l^lX@Z%6PB`P%r&!A+Yl>>Onih?JQJR&8h5m*YfUM`hXN+btSm{(dde3uNZpm>
z9M?IRAcR~}J4!g!M5Z?RNGxHG#Jw)DS$CW90a_-ns-upo$B`sPxf$n9;(2ndao<&+
zGBk(gVZp(mG8lNICY4V(JiR+3kyRAKKb9iZg}?yib(@#xPfAwLs2DN%q-4Yu%9`kI)U34GxIjjHH9%Qlk>>*~V%
zWO4JaVB5U`;}ujssVxW3lOW7(;jGle>4NhHh%~k>+*}WrCCqbv>+gkS
z_PE711eu1LRtn>RLH!HlO5>$MT|F+%Yv%IV+FXQ|
zB6z$-I1=~zVLrYdk)GTlG@
zf7kwV`44(lt}&@^jS*<
zJ*JcMDQIQbaF8f@Aux3HzkdKp0Q}AN(_haF4fTcNay3vn<5!4$8UBUzfuP2uH%$bJ
zX#RS2wq7(}Izq{$j-?kAR-G#(&ZF-e6JdPAQZ21~|87y_Bw(=#NO9wHeHU3_?tVp8
zG_$09Z?njQs*ZC*Q|nZQvT@qA>`cbhd*rf*{wnjU>#6eQJn(qUZ=?Or=*_=b>Uyg6
z_&BVz
z7h%SK5E(p5{827>o`#%RN$|oFo4=I0V5YXv!qL7zY5cYIuHn&Ht6*2*7-Cg5P^$XsPtihNh#8-Pz|LO)RqBp2w}*+h?^L
zVWLYR>ed#+Ko0CJkqjN`vUIhQ>Xtd8HK+G0WqBW5lO!r!C}xJ65{
zBq@>H9+RQViEogN@HLnpI+x=pOO=Goj8>+=ni!Lyj?aF8yCQ=q_77mI?=lVXNFNoV
z{hNlJriZYkR**z^h5&r0vy5{=_lOgSl6h=jyj_Z6ApM;)LxCJVWW_6N`(0;y;1q5o
z!<+1t6he$4Xep#|WYJTm^7*c#3P#pGv*DOgr?-N04JwvRA@wV^M9FopmGZ)*yx#>-
zs4M-Fc?j~ye+ZH3b{1HospHI{UHI?B@!>draQV8K(EDh=XrvSWhR+^oM{9>8&rq8D
zPIC|`;a73FmeyV5`pZ+#>91`w9x{k`tml97z|ZJT(g0*kNwsF`w1|8=B1@zXSYl*S
zaQLW4oBpH|rAQ%yuxJ$pOvS&|`DmyoD`@N=Ah}H_kLoFt@1Q>h5i@BJ8QQHL9Ur{LswviPR7bfR6^
z-I*pTZfKu;isV8S2(<~!FsoplAp)|Y3WvISW4@Dp(0=aH$ha(9Ch2kaPlM?)n2e~}
zH;rg2@FM#+n%zP>MpK!UQkt-27iPpSJ5QnnF_v0+5vhc4I{TX($kz>4rI=ZI*raeV
z1PkH0SUFnG5G}vT+$J=ai)OMGn_4L
zTbiZ6inI<68#h^Ap4{T9B@%c5o0@v8FF%%7@1DtpoYCDXXD)%wcbH%TAL@W|5VU(&
z2pNQ0bu39aswi9o8?|Vh-UeUogQBe~d&(=0aC4V0{eBEX7AtAKo^19pe(0ZVyVxTc
zWaazB0LG6TobyFIpZfCF1Bt_0VF0E0)iUx5KaC3e=d#D7dxXPf-uU$VV<>p#S?1M+EHySt
z_$GD`^-8L94NfS17fM-25=ZO`qMS3%h)?4wAQ>q2E<>zhyQ&tsJ
z^6#dEd@r#C6MdUq!Qs@XHD@hbe$uIIl@IQNiiC_BZYB;dtS7&<3J4BF
ztb?nNkNCp3%Q3%wX@hKEiYy^LwOU#{xc}@2Y9j;})GGVDyP82UdrvPIq~;BzICipj
z_&w|_N+?9($bZJOIdF$CCjt5guz$k_it=USyMzs0EY!p(@k)nqNpi9s}d4#lQm7+0+-CQ-am|Ozn9X~SqVpyWR0bIt$NeH^|
zw@7>_v@~o4hr=6TPnJycKdN*g2CuWx+T+P_;iXYa&s*0*Qpe9s)*^#xic$Ih0ScAi2nSgsAsS
zO)Jip-le~mN$V$*+%`4$RsQw$A=$}!)&Cssa9{PCSMq?R9-atzex)3dYey`R%(I@J
zSW;O&g=?*l=x`&IB{3Sc`5$KLq(`D)+Cl$8zfZGuP9!-K(vlG?uzsxkZhDlSJ~bH+
z#&01B;LaKra|i)JK_H`$=qvTDXCmip^f{wZhrRSwA#8zYa=W7?(Nw)e__nrUDcP_>
zHBP}u0<~i&CYJ@07@8=eP!Ul$6yuszO7&TE`iy&rp)JYiB;|}<%)DX!bV9KbgDwHc
z0c_b!;Y<&&a${aSmL!JG`r0U{e$qh%xe{tDacxSl|4tO`cJDiAc94rKot(>RU#YOM
zRSwNp)s~NqmCNI@(=^6U(hTS>zBvR_1(lQUVy$zh4;iB%`=~QjAVtm`ieoIz6?9M4
za;Y^C(fx-^5$@cRO}_mcsZG^o+}KgHm2h)>2045=?70#!%YOO?XujK!aKDvcXlZ%9
z=~ofS6!_fi>TFWSt<#zC)iy@%?}|Z=f|qmxCnMueF@YYg%b>PwUf!$ETzbn69FRG9
zf4eb~kAtN%>fB0MG}QFbYKJKjQd}1qFw_HrGL!f^xfuO=zaYds;JPNFLrBW_7J9Y0W&5qD_*j`W;muJJ7f*f_o{_h7fT(HzIDGxe7zD#eT=f-dUiT6tJLsQH;njk(kS^%7?_J;dyn@zf>)=K
zL!tRz5G&pe7kEuA1^J%dIjc7KZnlywNnPecJ~(Z=dBKboS&`d7@!@MiBv|a0$mYMV
z6-03hTLVX!IMbA2w;y~{<`!vP$eLVg(qmHNH6&GPlL)kB)c4CdenQC$<~&}=XNo2r
z5f$775QzjShN;TH=DLeFX;XEo`!?n`r0GLh4c=R|5dxEOUZ5l{sb2P0xW$^@dokMh
zhopmV^X{K%d0t6^5!mYo@(FCYal~qRGEN_~1lHiI_nn~`=-ag}0Tny^T3hUSAK$hi
z=|>F*6cMB`eQG}Vdh)1ye2MQJrHi2|O3V0*n5TT?t=CVXxQGtMxEv~m<<*c}(m`^R^)F2xsjq}fQnHlDU@%8WaQSO$_P4KG@CI@
zi%0sC2BvoKD(wz)7|QCX6|1Ndh99$^X!qWIy^U?9;xCN`i;B$gI{yRs&=n*?#yk^T
zSDns2_cyjB*m==K9)CgGV1=UR7FPj7Exa4Z_~%aJ4rC5W;hpZ>v79=nId+r4xx7FFQe`yWK8-J)d*@
z-{R;0-GQojlT@!RgP_>TZ~uLB;LBp7@xS?nu`=ZJnsM#+riMNO2N2`Qgd=T`%4?R)
z{JH?q2MwQ_S*ZwVG_mshwu`;2>5
z#egM^(xtnJ%tZ1o|C)^GD80q|n29R$CXYZcE}9l>FX)m|5dzut)1jtir;=P1vPqn{
zj$`guwXZc)ymPC-zn&=gRdNq`!ufmmr`Sb^V|CfHOa%o)2d=C#CqvMeYRic8xC6`k
zdl!VSE*Ez_3ZXnQbH9F3mm!5J3XwXo2qI{Kiip9V03T?A3hUo(Qk37$m=VWcxEB?zF%xS7hNF#d=e6*RK0^J>m#}
za2%6!7rreVCm*Bj|8^VovxEOoh3AQa#nI1fKh5N8HpT29$X&~{pS^FY;2uehBg)e@C9a{9izGYVTxH?{`-sLrC28{Fxj){GDov*4F64
zmPHOPf$Pvh>Lt;Sg3NSQ8AKG%DgCvVp{v;7X*47*4Ht{IXG^b36}
zz;;T($)O_4hCAp!VSG;HV%3x
zO}cBX^npVij189L>G@7X1$be4s|rnGDLOn<@d4)MZ$$T}$`h@tZ*!S|$p0Wfmj7
zNCZqS4~?b=j1VkMM-J;zFa^?0R!DLf;;0!&c;9e%b?tU3V5Q|{3q)g#`-zk9YkSHU
z;!d&m%DcVlCB)zYb8e&rQdv5_Lz%U?{ko@oOP(PC+GT^^+eM`Xqw_LY>RYD6s>5^_
zgkO9JiYnOi)Ho6SBA%bR7s%2kGFl)FNJ*b4!|p3>iXRj6#4EBi=N7Lt8RXOf&WM-C
z2G~!_H$ti~*tqqA42>|q?>@XkPrTuv3PzFua@A?0vJvl$NZSEt$;TGgHRQ^4NBjBo
z3{O61H=N_v*_FhJIkRi#0`Npd|CA$p+-M=0mr?i3DY&)aD44%UBqnV#9M1?g5-HYB
z-#p@&8-DR`Z7lmR$r5o}UhjZWZqr&rHqmgcGRw
zde?$3Q-8~3ED^7u?}Q@;quvK~`N_X8>C6(}yms)zhsC!vT$}d47=KMs83ej=X%YjO
zs}V5lm8mxV3^x*y=}HW%O3b*fGnge8wiV&wWT@KKRcTA9^S4(wH89rjZRv`A{w^i5
zqIz?CtzF7cC@w(ia!K~3V@2z1YEP3=9{2Y{-IYj&g1hW2Bw|18;o0r}t=9C^e}`>p
zmL`--aT9tYYz>nb*a^3!q!@nOUUfIEF!a7Q=dxFmEvEh9KB+`Zgptx~XZ%;Dxj}WY
z3s>JRLaq4^v@+~S3nC)_4-nZ>v?(M1m86xO=x~&Z8^8K*B&tY3>MuddPdw#}#m)z01?fKKrV>VVywF&kP%-keZ|>>JQ85Wk9y;ti`zxP0c%Q1RJ;L#LvhtGXB0nZ)
zId9JiG)Ri#@Q`6rFhaWN0&YK{v}ZVJI&{9G*50UlObb^M-LA2~RS`=0z_CUQBsmdSreo-^bE4iAduJok`%A-4f!)
z5utf*m!?fPq9ES5XyaB3UTCGTKQH%U(3+rrB6X&6rBq0R_Eky%;f|rxqA~56|%5l4+
z__(XK*{Or$9jB~*Qwicgnwhkb9K)ZvSow|`BYdrs{$Qv$!UJ*M?t+DqvPAJGT6oL2RxX3RnUBRxl;p3yoKuBzK#H0&
zo;CZ7y*~3CMp}6x7-VJn4-lM2Z7^+vDWPzp^LStP=!dU1!4Ng{NBkcEZxd5H5)|u%
zL$$^-*cve6qo8Z4(K+7(~U6d~H8A+FGmY{ErM0Rb`=KQdkaWGdVs!LvJP%ma^0`
zzh4{;G9_Dl$}n*TZ^)rv;oipx&lOXw7?mVwuXGJ`{Fp=;wLa=3_!XD~dtn@_7O_#C
z6Y($ISTPG@W4#~Y;wMWXuk)s8+j(~Q*>y;F
zuCk`@F;~!oC=)1)0KJKF?y3{8^xYQ6`cqO^{dfzU>}wU9N;M7&NTo@sCb>$J5eGZ3
zvQDRhJrz$&g1~>iQV6IqcCPucQP;BMR|%6ka*gwBE2$q-!tI$r^Z*Cp3^6;P&*2?;0SY>o#$VRNUhfVOx#PoO475YUEppo=oC;H2GsAMV9dH__HQt!yZ04b
z?WxITNEw7V-eLVHF>WStjsT9DAW-NMUwqRKCprgQf9LU>ayShsf|J;>%Bz1;nZ(z#
zC@C+fDDZd>#3W?R3|_#W<&R}!Jnkd!^V0v(;urVi0VlRfNJFVST9?^9DkhEj;QyG(
za~eADx!XZ*x$5MvPXZQiKkzn08{4tU1QA3Kaaz>Nzc;9DzT=JW9{4MfYc5HnhS%vzNpd0G=s6^P%NYj$9^6Y0
zBNOcDMY_RK1?t%P_#=L5#xpD-^m<2s^T|a?`)?8aZ{7RJmc@qyx@4JnF;NSu_JeQU
zVXgDT$H$8(LJuo#*&fUJi?=*;ytBuyyAH`s-o#Tn`s4!@%v6j_Qe+U*~Eo!=tV#%m4DP4!~zTGXg>nFpf}
zG0VoVX+LXn{+R^IP=-9CBv@_+uBPT4;pXqO-SWx=8UR{hu68Reoinf{zoEWLSu&mWebD4gh;mMuKeq;_T9`()MQDko-+>n<
zbdL%xdvRC{XpI7Yk|AZLHHH>z6zKURvia8!`24)K^YFk=2aC*W9UE%)Q-sI66FlqE
ziP1F?f5&GD7>|pN;1$3K0jm8pRR2J*nq^8O^}%VpS#az~xLvd6`c`1)I96EmOIE2gL`jUbp?){*#S~ZbOzOC~CCLqPs?R;Y3CwwQ1n5bLkMi-rZ-~SVR|7{Hl&{lE2hqo+;nUPv3!FrV1k=
ztBuW1icGHq;XNJLDBXEGhZz9b@SxadEU{PYjU+>D|vUFt1niiYC5OYpr>F@8%f@{
zpeff~*~ofIRk4E>e*QX~cX-;Tb!IL$Z~^16Y4`@cp%a6v>@*&9kbE(~(3ITG95gytJ`Y4q-L_4+-Hja5F^j#-t;A
zQ}MIqbpp=r{(#(T6P$$8-Lm3#-|`yqyxNL{d!DEzzl2E>a>#3>Rpg@ahf^0sL|K$H
zeQR~n5cPTXlY`N<(WU3U-^wDAIZ~x9Zlapihnz4!_Mtp6AE~6{KvfPC5
z?uhE#{DIA7>T(=SfTKEfT(B&9I@-@=ybr<3^=Gj2o+kZ-eTah={M<1+e)JtXhy1%w
z+{F&e<@Li~%6^{|7Gr4&Q3T$t{vzznDC!BJ3^A5V0*E+C-YQ_bIY5`pf?`KyxI&Do
zoGfjQtT`G+X4YASv0E7Ze|TWMMAf3HDBc
zlDi*@!<0XGCTtSQ*5XP${2i|GZAy&v7CN8pkpI});H{Sl^U?&hGTol96pU}1xzb6U?GBJe9Yl9Z^lSgB58nj-t*oWy>Qm3gD4RcVaD8u|3brNo=T?8e1KpljI5
zz~S;UT`)D&K7z4GU487h#@vfhYS~ZdsmpaoYhLcF7Tvn%?pH#9p;mxuQtbeB9DkU2
zpIxtqqhZ2gz%5g>lpy^|21V3uVnZ6xXPXQ)Z@j;~LPmN-Qk9H@uv8PByw^FgfbB*5^jabL2rurbEk47kKZvwd8uN$SaD)!Y`g^LO3QG(?bsc;`6uAmyVqm1k?4lu?d%KDDYg;!oY>)}q))XKAz4!L6Uh^`bJ#uIa-%@P~LNxnH&
zeG5l)LD#6egQE9tyJ$BDQL6jT6mwdUrMTRHcZIbBR{6li`ahP+d~&5(*v|ldVAFuLI)t7hzk%s3UaltKqqoB@&5p`LhP5ZmZxKICuNW>osqWXo;Ty}G7KD2!R2uBlTm)WEtlxZQ;yO$K
z4*%itx6JV88CIAvULd7^6@PJ{_#G-d(eaJ}xErYFa|_GEIjY&m9B0t`!9J36A&ZE+
z1z0frk`@|6*rCTwGOV^uy&KAO;Qg0-5{r$0X?jO_IHBNBdi)=ts5Zk;ccb~6;DRQB
zBOPM%o%GZw)MQXkb=wP@%AjiJZ<}6B#rtM07F+Y0qIkg;FsDl{Y05O|;G`AudH@9O
z`QyL9pYcGqAz4F7p-+6&{yR+t8at+05DIbYv9w_)dLfms5zr76@FK%)}I?*3XDu;XM%F)48
zeNH!!?Z==2KiMO0jJRtz%)H`WQiM6j(wf4oO++h{;ltrZ`U31!y7Byl1(*~&rOypa
zgGjenMJ&x;!(e$8jyv6j778c}7SjEtFmah*#P(+{P#wtqB{WyZ0M~B$O%rbJ%B0Pt
zqRjgm=9)>_f%!T5aO{^Rn5`GVWqRxO8MdmUQuV=?t1}h!!Gs|prN$5Lb~5o6=S9hF
zuPmw_j2#l&e+;U-6Ma`lT{g-g+56}rKp_}Rn5G*|EMDG0acp{Xo3+QyTEFnMng!
zx48ZR0A%bjjFo(!{uAG30*Y&kGv6Os#f}--{R8}0&PJxBw@&!lVt$x%dJ9{tl&p_H
ze6h$9k|ZV3i8*3kxCb)8C!lP@LkUX3J4=iDKRJw)zrEJ<(`LoRR!G=j!ARvW-O#OHhJ9@=H!fKUM#{KiN>>w1;A>m1Lau(j
z?P(!ij;CY~r}5WyLox_#nLOjEGfveK=^#H@`POl`KZ5YR;P93e(skAFqjRI<6!``0
z`cJ%kZ~%e#h|E@6c@a=WJzw{Z_Ur*S>vV0c6GB?IYq6pq#*L&C!zh`bj@1SNr4F7}
zFg7%V#ln)*)Cddz9#()=s;4FfLB&tPqo+;&d3Bd-dC-;Ed(*{N+7?bu>8on%_ktx^
zWjb@8isV96F5hSND|o-*~#jTYg^#6EXR3@;I9K6xq#8Ps-Ae?S$8LKPS$_*P|r|W-%k@9;6TXD05T;q?C
zl-o$GnoI6r+j(MbKAObK{MP_tiapMm`crg%uO7Cij_+1)@n2+arHwT%f%oObN2j$d
zyyUBcW|b94-beAfoyOd$6XsPVHg=dZZ~vh^Q4zLsopQZD9Kz}L{mW#tkreiFg&U&5
zMu0)Xpg?@p%_tH?B5da9qjIL`??;0DZO{J@)JB^
znl+O@dRfFQQ`s$P3)`3IUJ^>jgPgp~sMkDSO&UB?BXL2t7f8w4p!^8+ddPeICBp01^T9twDz)<(7TkH
zU-_+3-b-o~@y8Wd2#Ecbm5tx;Th)rne17kn-nrh{wZoi1O{=HHx|_3qrVziGZx;uH
zNKG&o(12OOzK9WgY&TZ1HoMk`mgrdWr=$Dnh7e;c1vCAD-F1gHV`hg}S3Gm9S62bN
zfLdNfKeRlrCNH@pS^#=dm_iH!2jb2e^j@zEKk!%)e7$@bc#6b%sLi8vvZz)SZ&+GV
zU1EB_O~dq3tO2|(hk0CRYxQvprV);v1h~JAF-O@LTR1Dc^~0#j1vo`zB0yzPQV%`V
zRt>9{30||lj*h3PYXat&ZTDhs_YTIEz{Vm`P2GIJW8C$-zkB~=<4~LqbB45Pw1x(b
zIyC3b?K;o0PiW(&;1{*Ya;wm10*^7%tO9I~r57d|md+;IBz8&&MhV%*SNzCj`=Z^J
zzw7VVq+<9tQ(93fWW7Xgu}<*Zrnmn-tAE!r-)*zUd}b$Zn(~8#xCu|?ok}t^%roi9
zr_D*A?8m%o$v8T%0HI42zQeWylcxN*uhu01$!qcPzwk`NYZsm#_r9-q*;p#AXT8fQ
zrj>`6B3>94)*L#9eM&ZrhfK5Po++OV#Z)A!-Vrl=qev3lGSrt1Y*;zHlT1~?UX+p0
zi^+@DF-c^f`K?%Mvqptg@0hgVIyqPYoqSAI#P0`b=f6(rz$g>Ldn~4kGZ5O#d27Y?
z_A16^?fnBqxEFn@tLwYJ3(Y?;j#@k8YZBm2GeIqVkC~K~Z7QXcnc3%fl0DmnT|q>i
zXZElpDei%U6vhN-hZN>s>Td&S$v*$+BzLcqrWw8I3E}xG(n&W_
z7)MZ0hEon!_6R*jtS8`|oo>$lQsd3KA0L4O9T@UH)&KSDFIELULDB`K=7S>8OM9nV
zYf^xkb1M{~y6a>nV^ky>PuYysJOyVdY6CR(=
zVoHu3Qn?uYn|_8p#N805F#^;48%E4E%`_k=8*fHN{q0T-aluTOw!+G=8*qGEX^0KI
zEJKZPkx%6vf=3793p37>d60v+E=dB8+}8oud}QhR+{o2D+|nVaxMMSZ`A(>uffPbR
z%1C|C&GbIp?^6%n5igN8_5Q$3@@K1$4$X$5whh0r(aU~Egq_sF_IxvBbhqQfNCNp@
z!>)oZ=r(d0e`0qLOyWcT9^$OnkUlwDR~rw~3b#n~&NGhh(BJJ8M!xGmJ~>X;O>vj0
zVI-4Tkqb)6*KV7Y5n)n9Hxwf#(%<(8xri%rHL7d)eU;
zOoZq5osr6!n!y;!37P$o!NeyMhjURd>GSmE(Ns+BJMEYXx+T?$x8qD_a%Dsi@AS17
ze(~DBevdO=I;kg)%{ctaS(Z9U;C*me;*mGgYdwYg=E8VFm+7}8%$AcjIp$sLb^-o;fY&lC+`0UGM{f`
z3A46;Wg|rwsVW_Lpxvyb5Hm5Xa*U8f4%)iFoZJAO_
zv*bhDNn&DymPE~Ag+Q($`>Rc9iuAs$U{saCcGJX<1RN!5qGdYN>a6cJj0bB^CoMK}
zvv7kE5#}`=Ni7Jy)g|+e=7UB0AKlIlX~S~KxPiiy<$cEq3+wtFl@KaNp;}FD8VCG>
z(s230k+J;G7OmQsl=ZFrP}mA8o4tTBCrr__T|Y8D93j7a_^~@c+<#o4md(*sy4c!M
z4XuD*L$l&)3w@O6>cG0sfBwu~={t2RJ+G%$j_YNnoeOk@I7`k!Zg-L16wfAsK9SfS
zt4FvO(mn1P_{SV8HCs|M-kP)!ER$O(4Z3+XU_^DUi`MYO3dG9oGF(`LAr;69uvON{q-=YB3otUL1>~@sl
z;(r%jSJTznLF9j?%x?t(cQMY5Hr!a&!cUNMxgw-L;Ek6ooMvlwpnq=eACkc4BeT8W
z)yIF-fATSn5f!bGBfcoXPA4zSPyYQ=tbIKj(bix1EIzxNedTfL{2n5fTQ(*Zkv}2v
zauaFy9k=r08vkL*2{NuZE;IDp(B;%u~4DW9xoEuT4OeEe2Pzm)mkpBKSmOvaPCKE
zH$yoieWiKeCa45~@{O5mf%UwJb+4R5;s61=fs88vLfG+e<@&+YueA`hX#`PYAx$O7Sf9%g*Gtt9pYl*fNn#b&qG0bVmXJ{W5s>9
z*eJq7uxs2>fiDv3V0@K%l#ITl&|d{ws=y7qM?;JsU6b)O3Ei6k@RnT1W#F2pjWGC3
zTnCG|IYv`1PF+|g5xda4maZ}9^vsM(Y=GkLbKfO3>f8ROy~G6}n{JMz=i&Q55{6l;
zPg&OHXIYiR%S?=(#cA&KYFiMLi~e3P*R=9eQBMj?Oss{hsER
zv}&=kb>0l3sSU4&)9z2#`vdx^DrnwUD$sHd6Pz?L%24!!T=x%Lu2rf!Nus^A#38$?
z589Cmd6S5LUeE4#m-OvgJf)r3GY-j~TA>aT>n_Ew)X^nXmob9CHu{xvH=OrPc71Zm
z#K}~jh{vWoaN5&JA13bLB@@{i&R5|j6}Hk%F`HkAOEtHNN<6-5uoKH7VW$l`|3F~;
zmB{$Ff{S3fHY4*W)0If5J~j_AcO}&a)6h~_IQywD3T~FdEw7wYS|)3Ix!hSKhVSv8
z!hC%?zwduqRTK;il^W=DowJbUwXfT7p3^hy-H>8o%%r3EELvc-6B$U??01GV1INrP
z$nwzICsoeSJ;pMGa{lxe;CfSd>Qn4&5FPKQKZlsV1R(rR=Fi~!jfI8fiMO?E%lgAu
zVo4{bX^=)vQXYLb5A2I-jjFx{v%^bijRgMiGTkP*gFfX>m%VkV`c=NM-y0c_*sjPN
zULkoq4@CTyEXfzC&@=A)AK_dJX`N8DE-j|aWCgc;_F$65ssVzs6+Pb=cd2e=4^({*
z;k)1S?s<+2cV24e@kLTSJUn+JCnve%le60#9t+UghC53v_Y_sFbBCYT@_H>3NE!fc&x
zbFd=2G(a6IMT^{?9)}n^=9p+ZP~JIcIcQL5B&^?Zlb_by1^=hATs6h&!U$faWkN4*
zIsEFCV(xXURx%l5sG00Nk2Iv+q;UihbLWOcM2bwXq{u@V7&u>I=!_#;?AK##^~!R3BL1-s!b3LLE-O
zxEa@I;r=AipCpeFdo6;)DQzf}9hpNckP=e5+Q9JH27V%
zd{rr+VnA2eH#MBo<8XtweX>bmlrNG&(T_oi9pdfy;bdKEm#SP*mq`|+zLEN9f6+f5
zu)JQqrG^|%mko{?y{}_laj&9BKc60mw4rPwy1$?%8xUApRSp=Wb)LD>iP9ey=Pa!5wu;uP@jj_W8@|Ty0~A@72pj!+d3hlIwxqs(@HE=!EDJbM!RTc~{y!!oxhdz*8=
z^6`D*d#gG`G3O8e0P!5jD>@PRCk%FWYs#Mtg(!j&1w3Ab|Ij?}eue4?@RpTE`ko5)
zbY5J02$V=%K3+7@XQ=-xPE8JZqX
zY;9RO34qpqf45rqa@f5tZRdgYvx#X_3@@eath#h@u1DUHruFaU!Q71D@|sf2q0iLU
zg8k%>N55~0y%>m^Zy+)5Y2>jI2rN1E+G-ILJ{`tWX_lKjr-JL6JGjy^&m1Q)u)lgXMoQ_Eu4Gbz!$ABv>FoAh-t!4#C|*
zaCdhKcXt8=hr-?6T?%)1FRXBPclb{I-Q)C4k2AXO_r<?K4FPubb
zUtODosp@Nk*kt3-kumJ;CaeGG;@ekCipNIq$7l}jw%|N+dHYQfIv#wY<-hrbF|em{
zzpto-{wARw*!G*CgwGk;lfxQq2)EE;@l6e#s1cSqdJe{gtU{-tqhZ>U)6(A
zh_%V%l}qcW&hnuBXt)ntoZez(MH>h6+`Zqbt4Yi>=Dr95)XKedL41uP`J1+y$vIe;
z3M*J%sp!HJEyugo09Njf1)=^Mr(K+8i>>ed5@Kpf@*8<@0Tq~%_lW$lxqm%g(2Z;#
z!1f(Y+0EimXAw4}p1+p{_YFd0VYl*G&aJ5D4GHG02}cN2F%8MUeTS;GbYBh3hc~k-
z+RUG3*Rk+zFBb_T?OhpCljvy>00-%|?eWR@VWhm@*P-#QNfuqP%@`L*#TC
zSPzi7wWRmU_l3+f6MLp}i=}G_CCI2KR3UO(uyvI#RE_?R%I{uODL9RB79CyvmGPDf
z5S@|X$ZxTqfgMOv7yHD>3tu;S1Z+mYFLyV(QE0MdJ-o
z|FROm!NViRxFtweoiDo0L-*RE2psng4ePw|j>y;!r#{>;F|pIy|ARrY+=D-1xsLn?8vmpu}f^~nDgv1
ztX^?#?NBq~hxQ#@Sk!mx8r06;AyGFfcKoKbr3P3~FDsyQ*c79~NqKZXP@L@Psk@2g
z+`B6Hy?%8JwIiNx6eXhI^H8Y+p5p?0Eoo%_8qj>7@zr?!5W=`@$N4Z#)wwPqJ6jK&
zNnm!2J}n#?l3dRm7WQI@Gryr#qFhuNe{_u+Je?iTcXym{X0XEfau><;_gjFGy%Wm$
z!z5;*QVcv|?@;0`_74A~6aUL_sb`F{Od552i+yEg`UG0mpP1ml2zo_D%lw>v`JB3E
z-G8o{Z$+rOjVoR-ff2=?QMIF(()HsJHJHg-jt#Y~>TN^Aik~y`(pS;Zygl>P8=ci*
zkb|VZ;N(R44~3vhH0xw=D}sY1rdjewyPlr;54OU~(3{;_{jaQW~!1qjz!>
zK%=J497!)Ocu#kW2O`S?$-4r?81>05X_q+WB~K9I+w^{OZ->?n2LJQ+nOkG9mPR8*
zDFEpDr23qoN?N@pvUgxbR~xyGW{Tl&$8UG0v?Y3J8_p|fChGSe$Hh3in!v=jjb4N{Ipn3LOY3QKZ>i%;kK^uWn)MR!
zvL*^daRT>FnFazCnm$f0Bx!HO^b+O3kVpZ+_4S3MKVD{$D2ZOk%Rzd|k|@iV3bg>>
z>RNF1du_{t`M=Z;4V<2F43dyP*BE63zlac3sDIrO@prf0;0mpQLW!ey!jI(L&L@*s
zk4ZAcIyT^rwSz6j4dcCd|AV3MlEGc~wc)~92}yaK4zb+J&Sp*aI0(uK2Beyy}jwt?u!m{V-5RXC+P;e=$FJaek1K^Mhqtz|n{SN%
zi-jA|c7a}sm-RJ>n$J70lLP?XdMM}QL+>XjDDC0FNQ(L`L6b7$R6D|#GG_$lm31XW
zeAiUsdj273#w3e$ct7hL#XlE(ibzio71}C(_z%X&-i^eUvoGDUCnBcy>a-RG8}|*?`AH4;1sb1-
z@!Hn!pxYPoGl+%iC;H|LXS;zqilD6z^p}q}C|S!hvoV@%N6zau@>~C1%RB)!40Ds9#)|SJ^!-
ztQ=VyRgfpnFDS?hK1f&o#9*a)C9tG(>W!!D+jqfDW{Jb$iASCi7byO>3D(^STNRM{
zmu=2E08Z5nph}4x4^!5Ie@hMHnH{j~5Y-?>iM73z`kZ&IEymJlk}N9Xumv;)>)!7?HClQGnjpPj<)PYVKgi
z4cb^3wDavvh(u9#Y&WNnUu4rz6cMK%ltb0tIVsVm&LC7hrxcx2ty3f8=o}*29M!Qp
z;~N}WYyRVo1?IOEuGE2Yj4;G=i7l!I3<*q16XrB=_wd#l>U;b++%ru4sKUl26Z&G
zhSM@i$NCbfyk|%Fdp}61-apE^61~H=I((N)@3FH@=yuCwsl0D@77*2(>BR=X#wtXW
zR`gFl-lb8I(5C(}WApNz^hp?l!p2!{)n7VpjU1TBllf8)jfIoY)0@~w@`gjMKdWKo
ze#!n<*H86UfyH`_9XB1j9dT{4hln0~QhHFreUhLj!yjluF}4pWyyY+K2vIljlu72I
zPPn9nU>-1kQY9gV!OG1gqCJMLSUh_|G)U_jig)Q1JVp=fTu=n*RoS_q$#jA1H2}28
z`LV_$5V!JFDNgaTF)`yKV8d#;`5nR7D}pXlU%?z8$MN&<-|yT-kAE$PfrjxoTm}v?
z?TR<+893WU6Z{|sFs*u1Eb31QVLV=4Ds3FZtuo9IJt=vC-{4o`RwaYv2vplREA}OFcLicKDuhA_1AspuCvVUvCM00XX3u89n?NWm@Err%63D9
z$Wr3p__ZlCy+ud^$sCP-F?tgdN1qP$4Lv}9zEVDy`{)q@r@xzi@cMz23sx6A}fWqBD!
zJo!mqB2!f1Nw3AdQrSG$1LpY+Pq@y}P^=UW-1YNZ2fAwaIz*(^@9SIka#oXAbV}Vz
z3S)c4>-Q4(C6g~Q2#B?`7N`8L8TLye+m$`l$15vDks=xr=Fxc;R&d6&$tz`}x+@^>
z+4JVhF)(W+ni&YBBlGI9XX~IGNUIc%ncg%ts&xSSrS7WTko8U*tW+>C!`9?h
zX=$vCzLfvcI~V36;kmF%~OX3-zWxp1ixq
zK8~GKb)xq;LCx7%gwYM+W#+9E{Bf1>-%xt=_Jr#Tumk^m{oZHxTXkLvyI*XVl{|}H
z{y?elkyv6XJI2Il0Gw&3O
zPRt`eJn7H0nJ^NE?(*)BI-8%9)v_72hl`v9r{#03W)^x0^{R5w)y6+@@8w)S7@!~(
z8byHf;1|qJHn`vdN~WW|Y>Qzk6K_}}2_yf5QMs!Vl6h65C98};7jqk{tY4GvQ4wHY
z#0^nUp!Yv@{McMwc4<3|`SWNdNp&ue1qjKlt@{tg`|Y(b4#shQMi&QOLD6b^sj|lx
zw)@FH2DI(Y2$8g
zxh@baC~ZR6dE`QjIrX!-c0rWHZCS6>)n&JP5RaRMtvdrzyAum~o`%0Q0?%mdU4pF_R>U{ZYB
z_i2*l0T6Q?izZBp@M1FjVnehr!x+{1PFf<#GY53x^HZ0`HHK;saqqE-`FF{?9m`&v
zmZgKRpvg1|nV2{}Z;r*$tP!=A2AP@
zgkxKuseNCy#DVly(`U$=wfq5@5@x@>xdi4_apnn2MJp+3vnbCDD5|bM{&M|5Aq2Tq
zi#ATF=i1nntb3ez8EjoCC?#$R1jGWRHP-Lr#daf!n;dRsX|9X&7SyDEsKQ4BB0bCJ
zjXLXE_ezog4<$CqDG8N<6y0{g_`z8jab@=d@wIW-Q)`U{`mB)^MGi+`b;VwBQ)6=LR~Ym3FcgATY;#mcNhE#19>@vl6AvO)tovcQYmH!Xf`Y7%)aC6=Xo&}
zE5?73Dl4y&Y40rXdI5MYFQ|3aR7-iT9sn-vcCRV)^1xHGeaMitpJuZs>c_zxI>~|0
zyKscl?=)+J@=k%||B-BAPBp>AmvKGYytmb*4eiW?3axCdvIjS%!zR`n5rLo|jHpbk
zUvBWgmF3f@?IKv|G>wfI3zG4}gbYf@7LR~66_djY&jV7I=i=S8&ap4V-4{38d21tP
zh8aogu;g4;ebJ4}Guq@sDQtavYo7%=Htd$vGyH${a8@hDY`4v7nHdb&?8G-lhZ3IV
zPErs%7QC5K^qYbNznff+$-V<{RwEsZEG>QvDFE}6K~D&xRC2^xp>_lL?buTIe>AC0
zfj5#R8V~Pbp8au;kW;ia6Vw8$=G1X}i?i7=Jc%DsYqqt><#CGB+yQ#^p{ioRbkyrqym}3ZVdJ7tIEhC%9DHtxmL)?=dzq#0Z&miI
zVwHZ%k)-A*em3)vfrJM&Ccb+*S6>1(4u#*f+k}ad~_Hsp+*v
zk65^4Q;2w5BIC7ry@}Zi$&SF)z?9exO?3T{AS!wDYfb{;@j?A{s8hOKE`7*421Ok`
z#nZ{3)k~k#jO+`D{!xfW8hj(Bv$Z)V$M4`(ak+(EWhoiu^Pc_fe8R@-kc(*0RoqMcpkDJUSAHC4x=R>V3oYOv#b*%$|4&&d&x&DAfW{D29n
zf`wVi#Zjg!?xZ}jd^l&PTdQ5LU0wMhoi6L9{WJ0@i7n_u1y~a%B@tKDwi5Bnl^^TD
zo_pOuX$>ix%tD+0SP&Qz6@9EEx(AWQVIHqdO-M^RV@3|qJjA-jo!m*XCK^30JRw3x
z3m|G7yMLx0w2UEUG>w7(YGrwB9>PVAua0R8cY#n5Nl#izGNf)H)>#i(SpSD@k_>l^
zefv4I^hV%!#=!B<-PW#Iu{9DWWiod06Lf3%H#840Y|J-ad-aHXRvQv7cs(M`KyHgY
zgYq_LP={6}-WKw=hSh_lADcQ?&Ab;40_&-mVg@9*v_|
zKp4w1&ETcoTS{5!(RVxa!&U3VQmrHlG^9Iwh1zSEe!ymOi-T9
zIzU>=x$AY=``JUK))#+8p^MKoBxuL3%&imimte3^>lTw
zH}6>AW7NkahVg4ouOi2NtmTQ{LZ{8OH(-|XShwTdxGuMOp}F-cUwmSn{p}d}Lc6g7
z6=3yR-N>bqR={Xk6ehO1WJ&fnf2f6!9`@byVQLaR)D>Y*J+7$F{EUorPtHqgO2c2J
z$-rrj$MT>yQ)T2+_33{w1d77<)9TpQC9RKQ;@>}+iN;DfJhjq&v2|P$QxR2oMB!(W
zw3VHxM-%CMAAXx}9)^e9(Fbc~`HiuZ+-xsgp2z%>X*uUZJ*~AgZ4CVF5yD7-10AWz
zO4_f@x}R+nXKV4f`=3YPMt%ogUQ7>|UI#7Qac?aXbqc6T3MtPhq{LAnbB9&rOJI@@&kpSwk>iwh0$|DjX33;b00K?j6$L4Z6H$c&RZdZw8FOE8~YD|8-Wdx7g^
z9V=M+nUpNdorgN8AeLU8O2;)5H#kg|bAo>Nzz9^9JfUlAQj3nZo>L)%V}X9Eu~qgD
zFt=w*%cm{|baxo*e?w>urb4n%R1>pNJHucc{*hUq?9D1AE3ac1Lwj(R#VtGJhOWTW
zLf6G7hRqGXoR%i?yhVyU4*P()IgPvEQspG3Rlk(ocOwV%S3+-JSY-O)LN;s$={KRh
z?Kx+M+kj~n*uY)tTK;HhrCw+y$+*9njvMpC73!Bn7yJWl61{^@V#1B~t4L%Ye_RYc
zW0Xdo8mdC0m)lhzl$b)xudyRr=xBAVy3sb>_;BdcF^3<1^$Rf7Q0f$3V$9L+^_a&T
zS5YYh#1HwXwNt0>uVi
z`dWqjeI{w$xGwjXaDx_PU@LtruWgjY`{@~|nMl6l(GJ*E&Y#Y;{}JGOvD3o);nU77T8NrH&|7)STo+Nr%4#(fP=moM5T>0`DzVaqj#
zhwPoEVBcCA?adlmad`o8SS}>d6R&I2-8EZ>&p<%{6)U5HsN~6J1ui|Do)IfojJDC_
zLVVWTy?L|v9wg;Hh8xG=9ucq?zp}$yRg)JkX|g3q^(`qjZ>h-zn+&oOH#nxt=L>hz
zXean}{OZ@;R|miOL;2|LN$3E|V>8ARHD?`S9bd2^MA#mL8#uAs@CA(-x0N@g_O|hh
zK?`q8om&yM-XA`DTCrG9q0mw3+ualP@vrhe)~R5}X5i`@-XoTqY^;x(K3bu1n;+SNp-AcQpD!R{
zGZ}k3-b!pg30xq)e|<%P^oiTRB6vxHyojHBNJWvlJU|w>_0Rq1?);g1i`L5IM7s~J
z+po@nte#+d3^mCMR`o=L)E)kody;8_3!*jtpBQ)v4zNjIc96>ME8rJ~@u@>a2=5o#
z{C28qhr;pW3KnqxSTVn(_bS4UQm+Ty!>2M2q^tg&NRBtnPZUclgYW6GUf>@6%W?Za(lUS*VZ>@hqU}P1B5onQk}nZ;P?tW76taXCNrI&mzIY
zPW?u}4RIX{dRqIPens91xnjcn9S3tk;Um{bpdlI?-V
z)qA!RdLG@%`5~N)RkAj}kgUuy`7GuCKK#b53F|9F_KB&P018+q-Fz;#e9>tEHBXeJ
zoAvD8urArWyu|3~i&~nRdPOm)v38eBPgtNzvBpbxaM@`e3JEkHN*qo}F1H367C6KU
z;L`l6a3-}Wl87G-$GZAa%I(Tvn?m7G3u<%wB%)@>pXK1VNBC7xBDdzrNlgXehC=gG
zuOu^`&~N`fD9qDD51%*dK{IX>RLo&Z7wWtVjm@yGMY6pov2HmAXLNlB8!8H)ZLi&{+I8UNHnM12>hAcUZ0m-54gt@|qLS=L^NXk~
z{#o_8y}S}tZsPXL^eWSUW1lM%D^eBeh!=;FIDx(cG)V|y(sT33C=-O&L*wfNrH6^b
zSnELi04$bq6}usgtxIVW7tA%K3b7J~u)P0j?JbG$hkj@|{L|y=vO*FuVoTSjM_XSs
z6gQ;{B=Qr~^VFJu^Jl>6HRr{hUun-jjgOT?^mY^QQ>1-=mS^W>TG11?p-&Sv`JdLy^RP0^O!B(=ul|E6
z{>@GF>mKe7(TC$4vU0_@KP6frC~iYbxvJGs3DMiolWVu2DDB&S_&87>P_G46kt8>K
zNzX0E#!{;b1zN07O4PN)3ykbT5;mCa8J|C>F3nanRtrom%^aeNwMfrcH
z`oGbMtivIh4Tl7hRV67hKmmK
zE9+@~?D&59eB@u!6uZHc91RxK!`iT!_lR2;rM7$uRkX64zUvH~e-r4&$|7rPE`sN8
z3kl2n+oI%6+C#EwM(Gm5y{KzmVnIvp>oqcS9gD=S6KsSe^K{Bk_d?i9P!e1wbyi`cD5_G4P9K5pzh=XsBy?y?Of;BBCD7j
z+)|UJ6FMU
zs9uCl5Vf#%f!%JZ^Qnb`<-5iV6FUGs0fEJh&KgCtlZQ4@^4VIa!9O!;ud05Q8Xp@q
zY$l3e{cC9`OZb*c}Au_mQCjXKifplZo5AUEPs
zZsPH4o-Sp3k2)%r4bn$C_xJt-d{;tFxoH6YVjM5+JXND
zjlB<@&mLAYK36=>-6P0X*k1_LHJSFQsqHwsByzMKE@x~ubEz&U?$)i@!(uL}%}1=z
z99Ni`xk%BDw-n#DyfW8rocSGX=<67)*s$rI>863knp!s8e=m);Uj*6dDeI(Hw
zh45tSv~r#YYxbHu#I4)D7M}Vyag0W~bZK6=dp8*hCT~hvdrO-BtWzc%Cu}01N%rm-
zr}WBfyEUBMJ@Q8MtiD~-B1$pn6x0yPoKSl