diff --git a/ci/github/unit-testing/catalog.bash b/ci/github/unit-testing/catalog.bash index 72c3530c91d..c1004dea7dd 100755 --- a/ci/github/unit-testing/catalog.bash +++ b/ci/github/unit-testing/catalog.bash @@ -12,7 +12,7 @@ install() { test() { pytest --cov=simcore_service_catalog --durations=10 --cov-append \ --color=yes --cov-report=term-missing --cov-report=xml \ - -v -m "not travis" services/catalog/tests + -v -m "not travis" services/catalog/tests/unit } # Check if the function exists (bash specific) diff --git a/ci/helpers/ensure_python_pip.bash b/ci/helpers/ensure_python_pip.bash index ea5c9111411..13f7f9bd5ff 100755 --- a/ci/helpers/ensure_python_pip.bash +++ b/ci/helpers/ensure_python_pip.bash @@ -9,7 +9,7 @@ set -euo pipefail IFS=$'\n\t' # Pin pip version to a compatible release https://www.python.org/dev/peps/pep-0440/#compatible-release -PIP_VERSION=19.3.1 +PIP_VERSION=20.1.1 echo "INFO:" "$(python --version)" "@" "$(command -v python)" diff --git a/ci/travis/unit-testing/catalog.bash b/ci/travis/unit-testing/catalog.bash index e4d91aa3f61..0f8cf8c9ca0 100755 --- a/ci/travis/unit-testing/catalog.bash +++ b/ci/travis/unit-testing/catalog.bash @@ -34,7 +34,7 @@ script() { then pytest --cov=simcore_service_catalog --durations=10 --cov-append \ --color=yes --cov-report=term-missing --cov-report=xml \ - -v -m "not travis" services/catalog/tests + -v -m "not travis" services/catalog/tests/unit else echo "No changes detected. Skipping unit-testing of catalog." fi diff --git a/packages/postgres-database/docker/Dockerfile b/packages/postgres-database/docker/Dockerfile index 9bcd9b37935..a03ad77c13e 100644 --- a/packages/postgres-database/docker/Dockerfile +++ b/packages/postgres-database/docker/Dockerfile @@ -23,7 +23,7 @@ RUN apt-get update &&\ RUN python -m venv ${VIRTUAL_ENV} RUN pip --no-cache-dir install --upgrade \ - pip~=20.0.2 \ + pip~=20.1.1 \ wheel \ setuptools diff --git a/packages/pytest-simcore/src/pytest_simcore/postgres_service2.py b/packages/pytest-simcore/src/pytest_simcore/postgres_service2.py new file mode 100644 index 00000000000..fc4b593b7b5 --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/postgres_service2.py @@ -0,0 +1,148 @@ +""" + sets up a docker-compose + +IMPORTANT: incompatible with pytest_simcore.docker_compose and pytest_simcore.postgres_service + +""" +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + +import os +import shutil +import subprocess +import sys +from pathlib import Path +from typing import Callable, Coroutine, Dict, Union + +import aiopg.sa +import pytest +import sqlalchemy as sa +import yaml +from dotenv import dotenv_values + +import simcore_postgres_database.cli as pg_cli +from simcore_postgres_database.models.base import metadata + +current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + + +@pytest.fixture(scope="session") +def env_devel_file(project_slug_dir: Path) -> Path: + # takes as a bas + env_devel_path = project_slug_dir / ".env-devel" + assert env_devel_path.exists() + return env_devel_path + + +@pytest.fixture(scope="session") +def test_environment(env_devel_file: Path) -> Dict[str, str]: + env = dotenv_values(env_devel_file, verbose=True, interpolate=True) + return env + + +@pytest.fixture(scope="session") +def test_docker_compose_file(pytestconfig) -> Path: + """Get an absolute path to the `docker-compose.yml` file. + Override this fixture in your tests if you need a custom location. + """ + return os.path.join(str(pytestconfig.rootdir), "tests", "docker-compose.yml") + + +@pytest.fixture(scope="session") +def docker_compose_file(test_environment: Dict[str, str], tmpdir_factory, test_docker_compose_file) -> Path: + # Overrides fixture in https://github.com/avast/pytest-docker + + environ = dict( + os.environ + ) # NOTE: do not forget to add the current environ here, otherwise docker-compose fails + environ.update(test_environment) + + # assumes prototype in cwd + src_path = test_docker_compose_file + assert src_path.exists, f"Expected prototype at cwd, i.e. {src_path.resolve()}" + + dst_path = Path( + str( + tmpdir_factory.mktemp("docker_compose_file_fixture").join( + "docker-compose.yml" + ) + ) + ) + + shutil.copy(src_path, dst_path.parent) + assert dst_path.exists() + + # configs + subprocess.run( + f'docker-compose --file "{src_path}" config > "{dst_path}"', + shell=True, + check=True, + env=environ, + ) + + return dst_path + + +@pytest.fixture(scope="session") +def postgres_service2(docker_services, docker_ip, docker_compose_file: Path) -> Dict: + + # check docker-compose's environ is resolved properly + config = yaml.safe_load(docker_compose_file.read_text()) + environ = config["services"]["postgres"]["environment"] + + # builds DSN + config = dict( + user=environ["POSTGRES_USER"], + password=environ["POSTGRES_PASSWORD"], + host=docker_ip, + port=docker_services.port_for("postgres", 5432), + database=environ["POSTGRES_DB"], + ) + + dsn = "postgresql://{user}:{password}@{host}:{port}/{database}".format(**config) + + def _create_checker() -> Callable: + def is_postgres_responsive() -> bool: + try: + engine = sa.create_engine(dsn) + conn = engine.connect() + conn.close() + except sa.exc.OperationalError: + return False + return True + + return is_postgres_responsive + + # Wait until service is responsive. + docker_services.wait_until_responsive( + check=_create_checker(), timeout=30.0, pause=0.1, + ) + + config["dsn"] = dsn + return config + + +@pytest.fixture(scope="session") +def make_engine(postgres_service2: Dict) -> Callable: + dsn = postgres_service2["dsn"] # session scope freezes dsn + + def maker(is_async=True) -> Union[Coroutine, Callable]: + return aiopg.sa.create_engine(dsn) if is_async else sa.create_engine(dsn) + + return maker + + +@pytest.fixture +def apply_migration(postgres_service2: Dict, make_engine) -> None: + kwargs = postgres_service2.copy() + kwargs.pop("dsn") + pg_cli.discover.callback(**kwargs) + pg_cli.upgrade.callback("head") + yield + pg_cli.downgrade.callback("base") + pg_cli.clean.callback() + + # FIXME: deletes all because downgrade is not reliable! + engine = make_engine(False) + metadata.drop_all(engine) diff --git a/requirements.txt b/requirements.txt index 26e833fe8f0..305907057c6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,6 +15,7 @@ # formatter black +isort # dependency manager pip-tools # version manager diff --git a/scripts/openapi/oas_resolver/Dockerfile b/scripts/openapi/oas_resolver/Dockerfile index 58420fbffed..6c0bda718ee 100644 --- a/scripts/openapi/oas_resolver/Dockerfile +++ b/scripts/openapi/oas_resolver/Dockerfile @@ -12,7 +12,7 @@ WORKDIR /src # update pip RUN pip install --no-cache-dir --upgrade \ - pip~=20.0.2 \ + pip~=20.1.1 \ wheel \ setuptools diff --git a/services/api-server/Dockerfile b/services/api-server/Dockerfile index 172ae927177..9cbe8cf8873 100644 --- a/services/api-server/Dockerfile +++ b/services/api-server/Dockerfile @@ -1,5 +1,5 @@ ARG PYTHON_VERSION="3.6.10" -FROM python:${PYTHON_VERSION}-slim as base +FROM python:${PYTHON_VERSION}-slim-buster as base # # USAGE: # cd sercices/api-server @@ -43,7 +43,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 \ # those from our virtualenv. ENV PATH="${VIRTUAL_ENV}/bin:$PATH" -EXPOSE 8001 +EXPOSE 8000 EXPOSE 3000 # -------------------------- Build stage ------------------- @@ -64,7 +64,7 @@ RUN apt-get update &&\ RUN python -m venv ${VIRTUAL_ENV} RUN pip install --upgrade --no-cache-dir \ - pip~=20.0.2 \ + pip~=20.1.1 \ wheel \ setuptools @@ -111,7 +111,10 @@ ENV PYTHONOPTIMIZE=TRUE WORKDIR /home/scu +# Starting from clean base image, copies pre-installed virtualenv from cache COPY --chown=scu:scu --from=cache ${VIRTUAL_ENV} ${VIRTUAL_ENV} + +# Copies booting scripts COPY --chown=scu:scu services/api-server/docker services/api-server/docker RUN chmod +x services/api-server/docker/*.sh diff --git a/services/api-server/Makefile b/services/api-server/Makefile index d85c03a936f..88e1ecd1891 100644 --- a/services/api-server/Makefile +++ b/services/api-server/Makefile @@ -4,10 +4,13 @@ include ../../scripts/common.Makefile # Custom variables -APP_NAME := $(notdir $(CURDIR)) -APP_CLI_NAME := simcore-service-$(APP_NAME) -export APP_VERSION = $(shell cat VERSION) -SRC_DIR := $(abspath $(CURDIR)/src/$(subst -,_,$(APP_CLI_NAME))) +APP_NAME := $(notdir $(CURDIR)) +APP_CLI_NAME := simcore-service-$(APP_NAME) +APP_PACKAGE_NAME := $(subst -,_,$(APP_CLI_NAME)) +APP_VERSION := $(shell cat VERSION) +SRC_DIR := $(abspath $(CURDIR)/src/$(APP_PACKAGE_NAME)) + +export APP_VERSION .PHONY: reqs reqs: ## compiles pip requirements (.in -> .txt) @@ -71,18 +74,32 @@ down: docker-compose.yml ## stops pg fixture # killing any process using port 8000 -@fuser --kill --verbose --namespace tcp 8000 -###################### -.PHONY: build -build: ## builds docker image (using main services/docker-compose-build.yml) - @$(MAKE_C) ${REPO_BASE_DIR} target=${APP_NAME} $@ +# BUILD ######## + + +.PHONY: build build-nc build-devel build-devel-nc build-cache build-cache-nc +build build-nc build-devel build-devel-nc build-cache build-cache-nc: ## builds docker image (using main services/docker-compose-build.yml) + @$(MAKE_C) ${REPO_BASE_DIR} $@ target=${APP_NAME} + +.PHONY: openapi-specs openapi.json +openapi-specs: openapi.json +openapi.json: .env + # generating openapi specs file + python3 -c "import json; from $(APP_PACKAGE_NAME).__main__ import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ -# GENERATION python client ------------------------------------------------- + +# GENERATION python client ######## .PHONY: python-client generator-help + # SEE https://openapi-generator.tech/docs/usage#generate # SEE https://openapi-generator.tech/docs/generators/python +# +# TODO: put instead to additional-props.yaml and --config=openapi-generator/python-config.yaml +# TODO: copy this code to https://github.com/ITISFoundation/osparc-simcore-python-client/blob/master/Makefile +# # NOTE: assumes this repo exists GIT_USER_ID := ITISFoundation @@ -91,7 +108,6 @@ GIT_REPO_ID := osparc-simcore-python-client SCRIPTS_DIR := $(abspath $(CURDIR)/../../scripts) GENERATOR_NAME := python -# TODO: put instead to additional-props.yaml and --config=openapi-generator/python-config.yaml ADDITIONAL_PROPS := \ generateSourceCodeOnly=false\ hideGenerationTimestamp=true\ @@ -106,21 +122,13 @@ null := space := $(null) # comma := , -# TODO: fix this, shall be generated upon start when flag is provided - - - -# TODO: code_samples still added by hand! client: # cloning $(GIT_USER_ID)/$(GIT_REPO_ID) -> $@ git clone git@github.com:$(GIT_USER_ID)/$(GIT_REPO_ID).git $@ cd client; git checkout -b "upgrade-${APP_VERSION}" -python-client: client ## runs python client generator - # download openapi.json - curl -O http://localhost:8000/api/v0/openapi.json - +python-client: client openapi.json ## runs python client generator cd $(CURDIR); \ $(SCRIPTS_DIR)/openapi-generator-cli.bash generate \ --generator-name=$(GENERATOR_NAME) \ @@ -134,8 +142,6 @@ python-client: client ## runs python client generator --release-note="Updated to $(APP_VERSION)" - - generator-help: ## help on client-api generator # generate help @$(SCRIPTS_DIR)/openapi-generator-cli.bash help generate diff --git a/services/api-server/openapi.json b/services/api-server/openapi.json index cc38013fa9e..c22bbf0a345 100644 --- a/services/api-server/openapi.json +++ b/services/api-server/openapi.json @@ -2,7 +2,7 @@ "openapi": "3.0.2", "info": { "title": "Public API Server", - "description": "**osparc-simcore Public RESTful API Specifications**\n## Python Client\n- Github [repo](https://github.com/ITISFoundation/osparc-simcore-python-client)\n- Quick install: ``pip install git+https://github.com/ITISFoundation/osparc-simcore-python-client.git``\n", + "description": "**osparc-simcore Public RESTful API Specifications**\n## Python Library\n- Check the [documentation](https://itisfoundation.github.io/osparc-simcore-python-client)\n- Quick install: ``pip install git+https://github.com/ITISFoundation/osparc-simcore-python-client.git``\n", "version": "0.3.0", "x-logo": { "url": "https://raw.githubusercontent.com/ITISFoundation/osparc-manual/b809d93619512eb60c827b7e769c6145758378d0/_media/osparc-logo.svg", diff --git a/services/catalog/.cookiecutterrc b/services/catalog/.cookiecutterrc deleted file mode 100644 index 4a1cb9d42fa..00000000000 --- a/services/catalog/.cookiecutterrc +++ /dev/null @@ -1,23 +0,0 @@ -# This file exists so you can easily regenerate your project. -# -# cookiecutter --overwrite-if-exists --config-file=.cookiecutterrc ../cookiecutter-simcore-py-fastapi/ -# - -default_context: - - _extensions: ['jinja2_time.TimeExtension'] - _template: '../cookiecutter-simcore-py-fastapi/' - command_line_interface_bin_name: 'simcore-service-catalog' - distribution_name: 'simcore-service-catalog' - dockercompose_service_api_port: '8000' - dockercompose_service_name: 'catalog' - enable_aiohttp_swagger: 'false' - full_name: 'Pedro Crespo' - github_username: 'pcrespov' - openapi_specs_version: 'v0' - package_name: 'simcore_service_catalog' - project_name: 'Components Catalog Service' - project_short_description: 'Manages and maintains a catalog of all published components (e.g. macro-algorithms, scripts, etc)' - project_slug: 'catalog' - version: '0.3.2' - year: '2020' diff --git a/services/catalog/.env-devel b/services/catalog/.env-devel new file mode 100644 index 00000000000..d77e2e84be5 --- /dev/null +++ b/services/catalog/.env-devel @@ -0,0 +1,13 @@ +# +# Environment variables used to configure this service +# + +LOG_LEVEL=DEBUG + +POSTGRES_USER=test +POSTGRES_PASSWORD=test +POSTGRES_DB=test +POSTGRES_HOST=localhost + +# Enables debug +SC_BOOT_MODE=debug-ptvsd diff --git a/services/catalog/.gitignore b/services/catalog/.gitignore new file mode 100644 index 00000000000..2bb202b8906 --- /dev/null +++ b/services/catalog/.gitignore @@ -0,0 +1,2 @@ +docker-compose.yml +.env diff --git a/services/catalog/Dockerfile b/services/catalog/Dockerfile index 48868012a0b..53b48c4180b 100644 --- a/services/catalog/Dockerfile +++ b/services/catalog/Dockerfile @@ -1,4 +1,6 @@ -FROM python:3.6.10-alpine3.11 as base +ARG PYTHON_VERSION="3.6.10" +FROM python:${PYTHON_VERSION}-slim-buster as base +# # # USAGE: # cd sercices/catalog @@ -9,20 +11,42 @@ FROM python:3.6.10-alpine3.11 as base LABEL maintainer=pcrespov -RUN adduser -D -u 8004 -s /bin/sh -h /home/scu scu +RUN set -eux; \ + apt-get update; \ + apt-get install -y gosu; \ + rm -rf /var/lib/apt/lists/*; \ +# verify that the binary works + gosu nobody true + +# simcore-user uid=8004(scu) gid=8004(scu) groups=8004(scu) +ENV SC_USER_ID=8004 \ + SC_USER_NAME=scu \ + SC_BUILD_TARGET=base \ + SC_BOOT_MODE=default + +RUN adduser \ + --uid ${SC_USER_ID} \ + --disabled-password \ + --gecos "" \ + --shell /bin/sh \ + --home /home/${SC_USER_NAME} \ + ${SC_USER_NAME} + -RUN apk add --no-cache \ - su-exec +# Sets utf-8 encoding for Python et al +ENV LANG=C.UTF-8 -ENV PATH "/home/scu/.local/bin:$PATH" +# Turns off writing .pyc files; superfluous on an ephemeral container. +ENV PYTHONDONTWRITEBYTECODE=1 \ + VIRTUAL_ENV=/home/scu/.venv -# NOTE: All SC_ variables are customized -ENV SC_BUILD_TARGET base +# Ensures that the python and pip executables used in the image will be +# those from our virtualenv. +ENV PATH="${VIRTUAL_ENV}/bin:$PATH" EXPOSE 8000 EXPOSE 3000 - # -------------------------- Build stage ------------------- # Installs build/package management tools and third party dependencies # @@ -30,36 +54,27 @@ EXPOSE 3000 # FROM base as build -ENV SC_BUILD_TARGET build +ENV SC_BUILD_TARGET=build -# Installing client libraries and any other package you need -# -# libpq: client library for PostgreSQL https://www.postgresql.org/docs/9.5/libpq.html -# libstdc++: needed in ujson https://github.com/kohlschutter/junixsocket/issues/33 -# -RUN apk update && \ - apk add --no-cache \ - libpq \ - libstdc++ - -RUN apk add --no-cache \ - alpine-sdk \ - python3-dev \ - musl-dev \ - postgresql-dev - -RUN pip3 --no-cache-dir install --upgrade \ - pip~=20.0.2 \ - wheel \ - setuptools +RUN apt-get update &&\ + apt-get install -y --no-install-recommends \ + build-essential + +# NOTE: python virtualenv is used here such that installed +# packages may be moved to production image easily by copying the venv +RUN python -m venv ${VIRTUAL_ENV} + +RUN pip install --upgrade --no-cache-dir \ + pip~=20.1.1 \ + wheel \ + setuptools WORKDIR /build # install base 3rd party dependencies -COPY services/catalog/requirements/*.txt \ - services/catalog/requirements/ - -RUN pip3 --no-cache-dir install -r services/catalog/requirements/_base.txt +# NOTE: copies to /build to avoid overwriting later which would invalidate this layer +COPY --chown=scu:scu services/catalog/requirements/_base.txt . +RUN pip --no-cache-dir install -r _base.txt # --------------------------Cache stage ------------------- @@ -88,28 +103,28 @@ RUN pip3 --no-cache-dir install -r requirements/prod.txt &&\ # + /home/scu $HOME = WORKDIR # + services/catalog [scu:scu] # -FROM cache as production +FROM base as production -ENV SC_BUILD_TARGET production -ENV SC_BOOT_MODE production +ENV SC_BUILD_TARGET=production \ + SC_BOOT_MODE=production + +ENV PYTHONOPTIMIZE=TRUE WORKDIR /home/scu -RUN mkdir -p services/catalog &&\ - chown scu:scu services/catalog &&\ - mv /build/services/catalog/docker services/catalog/docker &&\ - rm -rf /build +# Starting from clean base image, copies pre-installed virtualenv from cache +COPY --chown=scu:scu --from=cache ${VIRTUAL_ENV} ${VIRTUAL_ENV} + +# Copies booting scripts +COPY --chown=scu:scu services/catalog/docker services/catalog/docker +RUN chmod +x services/catalog/docker/*.sh -RUN apk del --no-cache\ - alpine-sdk \ - python3-dev \ - musl-dev HEALTHCHECK --interval=30s \ - --timeout=20s \ - --start-period=30s \ - --retries=3 \ - CMD ["python3", "services/catalog/docker/healthcheck.py", "http://localhost:8000/"] + --timeout=20s \ + --start-period=30s \ + --retries=3 \ + CMD ["python3", "services/catalog/docker/healthcheck.py", "http://localhost:8000/"] ENTRYPOINT [ "/bin/sh", "services/catalog/docker/entrypoint.sh" ] CMD ["/bin/sh", "services/catalog/docker/boot.sh"] @@ -125,12 +140,11 @@ CMD ["/bin/sh", "services/catalog/docker/boot.sh"] # FROM build as development -ENV SC_BUILD_TARGET development -ENV SC_BOOT_MODE development +ENV SC_BUILD_TARGET=development WORKDIR /devel -VOLUME /devel/packages -VOLUME /devel/services/catalog/ + +RUN chown -R scu:scu ${VIRTUAL_ENV} ENTRYPOINT ["/bin/sh", "services/catalog/docker/entrypoint.sh"] CMD ["/bin/sh", "services/catalog/docker/boot.sh"] diff --git a/services/catalog/Makefile b/services/catalog/Makefile index 268b99fe573..df3401cae86 100644 --- a/services/catalog/Makefile +++ b/services/catalog/Makefile @@ -4,13 +4,16 @@ include ../../scripts/common.Makefile # Custom variables -APP_NAME := $(notdir $(CURDIR)) -APP_CLI_NAME := simcore-service-catalog -export APP_VERSION = $(shell cat VERSION) +APP_NAME := $(notdir $(CURDIR)) +APP_CLI_NAME := simcore-service-catalog +APP_PACKAGE_NAME := $(subst -,_,$(APP_CLI_NAME)) +APP_VERSION := $(shell cat VERSION) +SRC_DIR := $(abspath $(CURDIR)/src/$(APP_PACKAGE_NAME)) +export APP_VERSION -.PHONY: requirements -requirements: ## compiles pip requirements (.in -> .txt) +.PHONY: requirements reqs +requirements reqs: ## (or reqs) compiles pip requirements (.in -> .txt) @$(MAKE_C) requirements reqs @@ -34,12 +37,35 @@ tests-integration: ## runs integration tests against local+production images pytest -vv --exitfirst --failed-first --durations=10 --pdb $(CURDIR)/tests/integration -.PHONY: run-devel down-pg up-pg -run-devel run-prod: up-pg ## runs app with pg service +# DEVELOPMENT ######## + + +.env: + cp .env-devel $@ + +docker-compose.yml: + cp $(CURDIR)/tests/unit/with_dbs/docker-compose.yml $@ + + +.PHONY: run-devel down up-pg + +up-pg: docker-compose.yml down-pg + # starting pg database ... + docker-compose -f $< up --detach + +down-pg: docker-compose.yml ## stops pg fixture + # stopping extra services + -@docker-compose -f $< down + + +run-devel run-prod: .env up-pg ## runs app with pg service # starting service ... ifeq ($(subst run-,,$@),devel) # development mode (with reload upon change) - uvicorn simcore_service_catalog.main:app --reload + # start app (under $<) + uvicorn simcore_service_catalog.__main__:the_app \ + --reload --reload-dir $(SRC_DIR) \ + --port=8000 --host=0.0.0.0 else # production mode simcore-service-catalog @@ -47,13 +73,7 @@ endif # stop -up-pg: down-pg - # starting pg database ... - docker-compose -f $(CURDIR)/tests/unit/with_dbs/docker-compose.yml up --detach - -down-pg: ## stops pg fixture - docker-compose -f $(CURDIR)/tests/unit/with_dbs/docker-compose.yml down - +# BUILD ##################### .PHONY: build build-nc build-devel build-devel-nc build-cache build-cache-nc build build-nc build-devel build-devel-nc build-cache build-cache-nc: ## docker image build in many flavours @@ -61,15 +81,8 @@ build build-nc build-devel build-devel-nc build-cache build-cache-nc: ## docker @$(MAKE_C) ${REPO_BASE_DIR} $@ target=${APP_NAME} -.PHONY: openapi-specs -openapi-specs: install-dev ## TODO: implementing a way to serialize openapi - python3 -c "from simcore_service_catalog.main import *; dump_openapi()" - - -.PHONY: replay -replay: .cookiecutterrc ## re-applies cookiecutter - # Replaying ../cookiecutter-simcore-py-fastapi/ ... - @cookiecutter --no-input --overwrite-if-exists \ - --config-file=$< \ - --output-dir="$(abspath $(CURDIR)/..)" \ - "../cookiecutter-simcore-py-fastapi/" +.PHONY: openapi-specs openapi.json +openapi-specs: openapi.json +openapi.json: + # generating openapi specs file + python3 -c "import json; from $(APP_PACKAGE_NAME).__main__ import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ diff --git a/services/catalog/README.md b/services/catalog/README.md index 6f1e91dba5b..29bbe27fb58 100644 --- a/services/catalog/README.md +++ b/services/catalog/README.md @@ -7,42 +7,33 @@ Manages and maintains a catalog of all published components (e.g. macro-algorithms, scripts, etc) +## Development + Typical development workflow: ```cmd +make devenv +source .venv/bin/activate + +cd services/api-service +make install-dev +``` -$ cd services/catalog -$ make help -Recipes for 'catalog': - -devenv build development environment (using main services/docker-compose-build.yml) -requirements compiles pip requirements (.in -> .txt) -install-dev install-prod install-ci install app in development/production or CI mode -tests-unit runs unit tests -tests-integration runs integration tests against local+production images -run-devel runs app with pg fixture for development -down stops pg fixture -build builds docker image (using main services/docker-compose-build.yml) -autoformat runs black python formatter on this service's code [https://black.readthedocs.io/en/stable/] -version-patch commits version with bug fixes not affecting the cookiecuter config -version-minor commits version with backwards-compatible API addition or changes (i.e. can replay) -version-major commits version with backwards-INcompatible addition or changes -replay re-applies cookiecutter -info displays information -clean cleans all unversioned files in project and temp files create by this makefile -help this colorful help - - -$ make devenv -$ make install-dev -$ make run-devel - - -$ make tests -$ make build +Then +```cmd +make run-devel ``` +will start the service in development-mode together with a postgres db initialized with test data. The API can be query using +- http://127.0.0.1:8000/dev/docs: swagger-UI API doc +Finally +```cmd +make tests +make build-devel +make build +``` + diff --git a/services/catalog/docker/boot.sh b/services/catalog/docker/boot.sh index ef94369f3ff..6c011f49bed 100755 --- a/services/catalog/docker/boot.sh +++ b/services/catalog/docker/boot.sh @@ -1,37 +1,36 @@ #!/bin/sh -# +set -o errexit +set -o nounset + +IFS=$(printf '\n\t') + INFO="INFO: [$(basename "$0")] " # BOOTING application --------------------------------------------- echo "$INFO" "Booting in ${SC_BOOT_MODE} mode ..." -echo " User :$(id "$(whoami)")" -echo " Workdir :$(pwd)" - -if [ "${SC_BUILD_TARGET}" = "development" ] -then - echo " Environment :" - printenv | sed 's/=/: /' | sed 's/^/ /' | sort - #-------------------- +echo "$INFO" "User :$(id "$(whoami)")" +echo "$INFO" "Workdir : $(pwd)" - cd /devel/services/catalog || exit - pip3 --no-cache-dir install --user -r requirements/dev.txt - cd /devel || exit - - #-------------------- - echo "$INFO" " Python :" +if [ "${SC_BUILD_TARGET}" = "development" ]; then + echo "$INFO" "Environment :" + printenv | sed 's/=/: /' | sed 's/^/ /' | sort + echo "$INFO" "Python :" python --version | sed 's/^/ /' command -v python | sed 's/^/ /' - echo "$INFO" " PIP :" - pip3 --no-cache-dir list | sed 's/^/ /' -fi + cd services/catalog || exit 1 + pip --quiet --no-cache-dir install -r requirements/dev.txt + cd - || exit 1 + echo "$INFO" "PIP :" + pip list | sed 's/^/ /' +fi # RUNNING application ---------------------------------------- if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ] then # NOTE: ptvsd is programmatically enabled inside of the service # this way we can have reload in place as well - exec uvicorn simcore_service_catalog.main:app --reload --host 0.0.0.0 + exec uvicorn simcore_service_catalog.__main__:the_app --reload --host 0.0.0.0 else exec simcore-service-catalog fi diff --git a/services/catalog/docker/entrypoint.sh b/services/catalog/docker/entrypoint.sh index 1aa3bc90a9e..9e734b5db40 100755 --- a/services/catalog/docker/entrypoint.sh +++ b/services/catalog/docker/entrypoint.sh @@ -1,6 +1,11 @@ #!/bin/sh -# +set -o errexit +set -o nounset + +IFS=$(printf '\n\t') + INFO="INFO: [$(basename "$0")] " +WARNING="WARNING: [$(basename "$0")] " ERROR="ERROR: [$(basename "$0")] " # This entrypoint script: @@ -10,45 +15,61 @@ ERROR="ERROR: [$(basename "$0")] " # *runs* as non-root user [scu] # echo "$INFO" "Entrypoint for stage ${SC_BUILD_TARGET} ..." -echo " User :$(id "$(whoami)")" -echo " Workdir :$(pwd)" - +echo "$INFO" "User :$(id "$(whoami)")" +echo "$INFO" "Workdir : $(pwd)" +echo "$INFO" "User : $(id scu)" +echo "$INFO" "python : $(command -v python)" +echo "$INFO" "pip : $(command -v pip)" -if [ "${SC_BUILD_TARGET}" = "development" ] -then - # NOTE: expects docker run ... -v $(pwd):/devel/services/catalog - DEVEL_MOUNT=/devel/services/catalog +USERNAME=scu +GROUPNAME=scu - stat $DEVEL_MOUNT > /dev/null 2>&1 || \ - (echo "$ERROR" ": You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1) # FIXME: exit does not stop script +if [ "${SC_BUILD_TARGET}" = "development" ]; then + echo "$INFO" "development mode detected..." + # NOTE: expects docker run ... -v $(pwd):$DEVEL_MOUNT + DEVEL_MOUNT=/devel/services/catalog + stat $DEVEL_MOUNT >/dev/null 2>&1 || + (echo "$ERROR" "You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1) - USERID=$(stat -c %u $DEVEL_MOUNT) - GROUPID=$(stat -c %g $DEVEL_MOUNT) - GROUPNAME=$(getent group "${GROUPID}" | cut -d: -f1) - - if [ "$USERID" -eq 0 ] - then - addgroup scu root + echo "$INFO" "setting correct user id/group id..." + HOST_USERID=$(stat --format=%u "${DEVEL_MOUNT}") + HOST_GROUPID=$(stat --format=%g "${DEVEL_MOUNT}") + CONT_GROUPNAME=$(getent group "${HOST_GROUPID}" | cut --delimiter=: --fields=1) + if [ "$HOST_USERID" -eq 0 ]; then + echo "$WARNING" "Folder mounted owned by root user... adding $SC_USER_NAME to root..." + adduser "$SC_USER_NAME" root + else + echo "$INFO" "Folder mounted owned by user $HOST_USERID:$HOST_GROUPID-'$CONT_GROUPNAME'..." + # take host's credentials in $SC_USER_NAME + if [ -z "$CONT_GROUPNAME" ]; then + echo "$WARNING" "Creating new group grp$SC_USER_NAME" + CONT_GROUPNAME=grp$SC_USER_NAME + addgroup --gid "$HOST_GROUPID" "$CONT_GROUPNAME" else - # take host's credentials in host_group - if [ -z "$GROUPNAME" ] - then - GROUPNAME=host_group - addgroup -g "$GROUPID" $GROUPNAME - else - addgroup scu $GROUPNAME - fi - - deluser scu > /dev/null 2>&1 - adduser -u "$USERID" -G $GROUPNAME -D -s /bin/sh scu + echo "$INFO" "group already exists" fi + echo "$INFO" "Adding $SC_USER_NAME to group $CONT_GROUPNAME..." + adduser "$SC_USER_NAME" "$CONT_GROUPNAME" + + echo "$WARNING" "Changing ownership [this could take some time]" + echo "$INFO" "Changing $SC_USER_NAME:$SC_USER_NAME ($SC_USER_ID:$SC_USER_ID) to $SC_USER_NAME:$CONT_GROUPNAME ($HOST_USERID:$HOST_GROUPID)" + usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" + + echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" + find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; + # change user property of files already around + echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" + find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fi fi -if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ] -then +if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ]; then # NOTE: production does NOT pre-installs ptvsd - python3 -m pip install ptvsd + pip install --no-cache-dir ptvsd fi -echo "$INFO" "Starting boot ..." -exec su-exec scu "$@" +echo "$INFO Starting $* ..." +echo " $SC_USER_NAME rights : $(id "$SC_USER_NAME")" +echo " local dir : $(ls -al)" + +exec gosu "$SC_USER_NAME" "$@" diff --git a/services/catalog/openapi.json b/services/catalog/openapi.json new file mode 100644 index 00000000000..1f9fa770653 --- /dev/null +++ b/services/catalog/openapi.json @@ -0,0 +1,715 @@ +{ + "openapi": "3.0.2", + "info": { + "title": "Components Catalog Service", + "description": "Manages and maintains a **catalog** of all published components (e.g. macro-algorithms, scripts, etc)", + "version": "0.3.2" + }, + "paths": { + "/v0/meta": { + "get": { + "tags": [ + "meta" + ], + "summary": "Get Service Metadata", + "operationId": "get_service_metadata_v0_meta_get", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Meta" + } + } + } + } + } + } + }, + "/v0/dags": { + "get": { + "tags": [ + "DAG" + ], + "summary": "List Dags", + "operationId": "list_dags_v0_dags_get", + "parameters": [ + { + "description": "Requests a specific page of the list results", + "required": false, + "schema": { + "title": "Page Token", + "type": "string", + "description": "Requests a specific page of the list results" + }, + "name": "page_token", + "in": "query" + }, + { + "description": "Maximum number of results to be returned by the server", + "required": false, + "schema": { + "title": "Page Size", + "minimum": 0.0, + "type": "integer", + "description": "Maximum number of results to be returned by the server", + "default": 0 + }, + "name": "page_size", + "in": "query" + }, + { + "description": "Sorts in ascending order comma-separated fields", + "required": false, + "schema": { + "title": "Order By", + "type": "string", + "description": "Sorts in ascending order comma-separated fields" + }, + "name": "order_by", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "title": "Response List Dags V0 Dags Get", + "type": "array", + "items": { + "$ref": "#/components/schemas/DAGOut" + } + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + }, + "post": { + "tags": [ + "DAG" + ], + "summary": "Create Dag", + "operationId": "create_dag_v0_dags_post", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DAGIn" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "Successfully created", + "content": { + "application/json": { + "schema": { + "title": "Response Create Dag V0 Dags Post", + "type": "integer" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/dags:batchGet": { + "get": { + "tags": [ + "DAG" + ], + "summary": "Batch Get Dags", + "operationId": "batch_get_dags_v0_dags_batchGet_get", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + } + } + } + }, + "/v0/dags:search": { + "get": { + "tags": [ + "DAG" + ], + "summary": "Search Dags", + "operationId": "search_dags_v0_dags_search_get", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + } + } + } + }, + "/v0/dags/{dag_id}": { + "get": { + "tags": [ + "DAG" + ], + "summary": "Get Dag", + "operationId": "get_dag_v0_dags__dag_id__get", + "parameters": [ + { + "required": true, + "schema": { + "title": "Dag Id", + "type": "integer" + }, + "name": "dag_id", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DAGOut" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + }, + "put": { + "tags": [ + "DAG" + ], + "summary": "Replace Dag", + "operationId": "replace_dag_v0_dags__dag_id__put", + "parameters": [ + { + "required": true, + "schema": { + "title": "Dag Id", + "type": "integer" + }, + "name": "dag_id", + "in": "path" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DAGIn" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DAGOut" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + }, + "delete": { + "tags": [ + "DAG" + ], + "summary": "Delete Dag", + "operationId": "delete_dag_v0_dags__dag_id__delete", + "parameters": [ + { + "required": true, + "schema": { + "title": "Dag Id", + "type": "integer" + }, + "name": "dag_id", + "in": "path" + } + ], + "responses": { + "204": { + "description": "Successfully deleted" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + }, + "patch": { + "tags": [ + "DAG" + ], + "summary": "Udpate Dag", + "operationId": "udpate_dag_v0_dags__dag_id__patch", + "parameters": [ + { + "required": true, + "schema": { + "title": "Dag Id", + "type": "integer" + }, + "name": "dag_id", + "in": "path" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DAGIn" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DAGOut" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + } + }, + "components": { + "schemas": { + "Connection": { + "title": "Connection", + "type": "object", + "properties": { + "nodeUuid": { + "title": "Nodeuuid", + "type": "string" + }, + "output": { + "title": "Output", + "type": "string" + } + } + }, + "DAGIn": { + "title": "DAGIn", + "required": [ + "key", + "version", + "name" + ], + "type": "object", + "properties": { + "key": { + "title": "Key", + "pattern": "^(simcore)/(services)(/demodec)?/(comp|dynamic|frontend)(/[^\\s]+)+$", + "type": "string", + "example": "simcore/services/frontend/nodes-group/macros/1" + }, + "version": { + "title": "Version", + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", + "type": "string", + "example": "1.0.0" + }, + "name": { + "title": "Name", + "type": "string" + }, + "description": { + "title": "Description", + "type": "string" + }, + "contact": { + "title": "Contact", + "type": "string", + "format": "email" + }, + "workbench": { + "title": "Workbench", + "type": "object", + "additionalProperties": { + "$ref": "#/components/schemas/Node" + } + } + } + }, + "DAGOut": { + "title": "DAGOut", + "required": [ + "key", + "version", + "name", + "id" + ], + "type": "object", + "properties": { + "key": { + "title": "Key", + "pattern": "^(simcore)/(services)(/demodec)?/(comp|dynamic|frontend)(/[^\\s]+)+$", + "type": "string", + "example": "simcore/services/frontend/nodes-group/macros/1" + }, + "version": { + "title": "Version", + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", + "type": "string", + "example": "1.0.0" + }, + "name": { + "title": "Name", + "type": "string" + }, + "description": { + "title": "Description", + "type": "string" + }, + "contact": { + "title": "Contact", + "type": "string", + "format": "email" + }, + "id": { + "title": "Id", + "type": "integer" + }, + "workbench": { + "title": "Workbench", + "type": "object", + "additionalProperties": { + "$ref": "#/components/schemas/Node" + } + } + } + }, + "FilePickerOutput": { + "title": "FilePickerOutput", + "required": [ + "store", + "path", + "label" + ], + "type": "object", + "properties": { + "store": { + "title": "Store", + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + } + ] + }, + "dataset": { + "title": "Dataset", + "type": "string" + }, + "path": { + "title": "Path", + "type": "string" + }, + "label": { + "title": "Label", + "type": "string" + } + } + }, + "HTTPValidationError": { + "title": "HTTPValidationError", + "type": "object", + "properties": { + "detail": { + "title": "Detail", + "type": "array", + "items": { + "$ref": "#/components/schemas/ValidationError" + } + } + } + }, + "Meta": { + "title": "Meta", + "required": [ + "name", + "version" + ], + "type": "object", + "properties": { + "name": { + "title": "Name", + "type": "string" + }, + "version": { + "title": "Version", + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", + "type": "string" + }, + "released": { + "title": "Released", + "type": "object", + "additionalProperties": { + "type": "string", + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$" + }, + "description": "Maps every route's path tag with a released version" + } + }, + "example": { + "name": "simcore_service_foo", + "version": "2.4.45", + "released": { + "v1": "1.3.4", + "v2": "2.4.45" + } + } + }, + "Node": { + "title": "Node", + "required": [ + "key", + "version", + "label", + "position" + ], + "type": "object", + "properties": { + "key": { + "title": "Key", + "pattern": "^(simcore)/(services)(/demodec)?/(comp|dynamic|frontend)(/[^\\s]+)+$", + "type": "string", + "example": "simcore/services/comp/sleeper" + }, + "version": { + "title": "Version", + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", + "type": "string", + "example": "6.2.0" + }, + "label": { + "title": "Label", + "type": "string" + }, + "progress": { + "title": "Progress", + "maximum": 100.0, + "minimum": 0.0, + "type": "number", + "default": 0 + }, + "thumbnail": { + "title": "Thumbnail", + "type": "string" + }, + "inputs": { + "title": "Inputs", + "type": "object", + "additionalProperties": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "string" + }, + { + "type": "number" + }, + { + "$ref": "#/components/schemas/Connection" + }, + { + "$ref": "#/components/schemas/FilePickerOutput" + } + ] + } + }, + "inputAccess": { + "title": "Inputaccess", + "type": "object", + "additionalProperties": { + "enum": [ + "ReadAndWrite", + "Invisible", + "ReadOnly" + ], + "type": "string" + } + }, + "inputNodes": { + "title": "Inputnodes", + "type": "array", + "items": { + "type": "string" + }, + "default": [] + }, + "outputs": { + "title": "Outputs", + "type": "object", + "additionalProperties": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "string" + }, + { + "type": "number" + }, + { + "$ref": "#/components/schemas/FilePickerOutput" + } + ] + } + }, + "outputNode": { + "title": "Outputnode", + "type": "boolean", + "deprecated": true + }, + "outputNodes": { + "title": "Outputnodes", + "type": "array", + "items": { + "type": "string" + }, + "default": [] + }, + "parent": { + "title": "Parent", + "type": "string", + "description": "Parent's (group-nodes') node ID s.", + "example": "nodeUUid1" + }, + "position": { + "$ref": "#/components/schemas/Position" + } + } + }, + "Position": { + "title": "Position", + "required": [ + "x", + "y" + ], + "type": "object", + "properties": { + "x": { + "title": "X", + "type": "integer" + }, + "y": { + "title": "Y", + "type": "integer" + } + } + }, + "ValidationError": { + "title": "ValidationError", + "required": [ + "loc", + "msg", + "type" + ], + "type": "object", + "properties": { + "loc": { + "title": "Location", + "type": "array", + "items": { + "type": "string" + } + }, + "msg": { + "title": "Message", + "type": "string" + }, + "type": { + "title": "Error Type", + "type": "string" + } + } + } + } + } +} diff --git a/services/catalog/requirements/_base.in b/services/catalog/requirements/_base.in index aa4eb284375..12465f1409c 100644 --- a/services/catalog/requirements/_base.in +++ b/services/catalog/requirements/_base.in @@ -6,9 +6,16 @@ pyyaml>=5.3 # Vulnerable +# fastapi and extensions fastapi[all] -aiopg[sa] -tenacity - async-exit-stack # not needed when python>=3.7 async-generator # not needed when python>=3.7 + +# data models +pydantic[dotenv] + +# database +aiopg[sa] + +# other +tenacity diff --git a/services/catalog/requirements/_base.txt b/services/catalog/requirements/_base.txt index c6a2bafafa0..e5f6e2414fd 100644 --- a/services/catalog/requirements/_base.txt +++ b/services/catalog/requirements/_base.txt @@ -4,42 +4,44 @@ # # pip-compile --output-file=requirements/_base.txt requirements/_base.in # -aiofiles==0.4.0 # via fastapi +aiofiles==0.5.0 # via fastapi aiopg[sa]==1.0.0 # via -r requirements/_base.in aniso8601==7.0.0 # via graphene async-exit-stack==1.0.1 # via -r requirements/_base.in, fastapi async-generator==1.10 # via -r requirements/_base.in, fastapi -certifi==2019.11.28 # via requests +certifi==2020.6.20 # via requests chardet==3.0.4 # via requests -click==7.0 # via uvicorn +click==7.1.2 # via uvicorn dataclasses==0.7 # via pydantic dnspython==1.16.0 # via email-validator -email-validator==1.0.5 # via fastapi -fastapi[all]==0.48.0 # via -r requirements/_base.in +email-validator==1.1.1 # via fastapi +fastapi[all]==0.58.0 # via -r requirements/_base.in graphene==2.1.8 # via fastapi -graphql-core==2.3.1 # via graphene, graphql-relay +graphql-core==2.3.2 # via graphene, graphql-relay graphql-relay==2.0.1 # via graphene h11==0.9.0 # via uvicorn -httptools==0.0.13 # via uvicorn -idna==2.8 # via email-validator, requests, yarl +httptools==0.1.1 # via uvicorn +idna==2.9 # via email-validator, requests, yarl itsdangerous==1.1.0 # via fastapi -jinja2==2.11.1 # via fastapi +jinja2==2.11.2 # via fastapi markupsafe==1.1.1 # via jinja2 -multidict==4.7.4 # via yarl +multidict==4.7.6 # via yarl +orjson==3.1.2 # via fastapi promise==2.3 # via graphql-core, graphql-relay -psycopg2-binary==2.8.4 # via aiopg, sqlalchemy -pydantic==1.4 # via fastapi +psycopg2-binary==2.8.5 # via aiopg, sqlalchemy +pydantic[dotenv]==1.5.1 # via -r requirements/_base.in, fastapi +python-dotenv==0.13.0 # via pydantic python-multipart==0.0.5 # via fastapi -pyyaml==5.3 # via -r requirements/_base.in, fastapi -requests==2.22.0 # via fastapi +pyyaml==5.3.1 # via -r requirements/_base.in, fastapi +requests==2.24.0 # via fastapi rx==1.6.1 # via graphql-core -six==1.14.0 # via graphene, graphql-core, graphql-relay, python-multipart, tenacity -sqlalchemy[postgresql_psycopg2binary]==1.3.13 # via -r requirements/../../../packages/postgres-database/requirements/_base.in, aiopg -starlette==0.12.9 # via fastapi -tenacity==6.0.0 # via -r requirements/_base.in -ujson==1.35 # via fastapi -urllib3==1.25.8 # via requests -uvicorn==0.11.2 # via fastapi +six==1.15.0 # via graphene, graphql-core, graphql-relay, python-multipart, tenacity +sqlalchemy[postgresql_psycopg2binary]==1.3.17 # via -r requirements/../../../packages/postgres-database/requirements/_base.in, aiopg +starlette==0.13.4 # via fastapi +tenacity==6.2.0 # via -r requirements/_base.in +ujson==3.0.0 # via fastapi +urllib3==1.25.9 # via requests +uvicorn==0.11.5 # via fastapi uvloop==0.14.0 # via uvicorn websockets==8.1 # via uvicorn yarl==1.4.2 # via -r requirements/../../../packages/postgres-database/requirements/_base.in diff --git a/services/catalog/requirements/_test.in b/services/catalog/requirements/_test.in index b75eddfd81a..e9fd578a097 100644 --- a/services/catalog/requirements/_test.in +++ b/services/catalog/requirements/_test.in @@ -8,6 +8,7 @@ # 'services/catalog/tests/unit' dependencies + # testing pytest pytest-aiohttp # incompatible with pytest-asyncio. See https://github.com/pytest-dev/pytest-asyncio/issues/76 @@ -19,6 +20,10 @@ pytest-docker # fixtures Faker +# migration due to pytest_simcore.postgres_service2 +alembic +docker + # tools pylint coveralls diff --git a/services/catalog/requirements/_test.txt b/services/catalog/requirements/_test.txt index f2b0284c413..4ef4eaebc95 100644 --- a/services/catalog/requirements/_test.txt +++ b/services/catalog/requirements/_test.txt @@ -4,76 +4,84 @@ # # pip-compile --output-file=requirements/_test.txt requirements/_test.in # -aiofiles==0.4.0 # via -r requirements/_base.txt, fastapi +aiofiles==0.5.0 # via -r requirements/_base.txt, fastapi aiohttp==3.6.2 # via pytest-aiohttp aiopg[sa]==1.0.0 # via -r requirements/_base.txt +alembic==1.4.2 # via -r requirements/_test.in aniso8601==7.0.0 # via -r requirements/_base.txt, graphene -astroid==2.3.3 # via pylint +astroid==2.4.2 # via pylint async-exit-stack==1.0.1 # via -r requirements/_base.txt, fastapi async-generator==1.10 # via -r requirements/_base.txt, fastapi async-timeout==3.0.1 # via aiohttp attrs==19.3.0 # via aiohttp, pytest, pytest-docker -certifi==2019.11.28 # via -r requirements/_base.txt, requests +certifi==2020.6.20 # via -r requirements/_base.txt, requests chardet==3.0.4 # via -r requirements/_base.txt, aiohttp, requests -click==7.0 # via -r requirements/_base.txt, uvicorn -codecov==2.0.16 # via -r requirements/_test.in -coverage==5.0.3 # via codecov, coveralls, pytest-cov -coveralls==1.11.1 # via -r requirements/_test.in +click==7.1.2 # via -r requirements/_base.txt, uvicorn +codecov==2.1.7 # via -r requirements/_test.in +coverage==5.1 # via codecov, coveralls, pytest-cov +coveralls==2.0.0 # via -r requirements/_test.in dataclasses==0.7 # via -r requirements/_base.txt, pydantic dnspython==1.16.0 # via -r requirements/_base.txt, email-validator +docker==4.2.1 # via -r requirements/_test.in docopt==0.6.2 # via coveralls -email-validator==1.0.5 # via -r requirements/_base.txt, fastapi -faker==4.0.2 # via -r requirements/_test.in -fastapi[all]==0.48.0 # via -r requirements/_base.txt +email-validator==1.1.1 # via -r requirements/_base.txt, fastapi +faker==4.1.1 # via -r requirements/_test.in +fastapi[all]==0.58.0 # via -r requirements/_base.txt graphene==2.1.8 # via -r requirements/_base.txt, fastapi -graphql-core==2.3.1 # via -r requirements/_base.txt, graphene, graphql-relay +graphql-core==2.3.2 # via -r requirements/_base.txt, graphene, graphql-relay graphql-relay==2.0.1 # via -r requirements/_base.txt, graphene h11==0.9.0 # via -r requirements/_base.txt, uvicorn -httptools==0.0.13 # via -r requirements/_base.txt, uvicorn +httptools==0.1.1 # via -r requirements/_base.txt, uvicorn idna-ssl==1.1.0 # via aiohttp -idna==2.8 # via -r requirements/_base.txt, email-validator, requests, yarl -importlib-metadata==1.5.0 # via pluggy, pytest +idna==2.9 # via -r requirements/_base.txt, email-validator, requests, yarl +importlib-metadata==1.6.1 # via pluggy, pytest isort==4.3.21 # via pylint itsdangerous==1.1.0 # via -r requirements/_base.txt, fastapi -jinja2==2.11.1 # via -r requirements/_base.txt, fastapi +jinja2==2.11.2 # via -r requirements/_base.txt, fastapi lazy-object-proxy==1.4.3 # via astroid -markupsafe==1.1.1 # via -r requirements/_base.txt, jinja2 +mako==1.1.3 # via alembic +markupsafe==1.1.1 # via -r requirements/_base.txt, jinja2, mako mccabe==0.6.1 # via pylint -more-itertools==8.2.0 # via pytest -multidict==4.7.4 # via -r requirements/_base.txt, aiohttp, yarl -packaging==20.3 # via pytest +more-itertools==8.4.0 # via pytest +multidict==4.7.6 # via -r requirements/_base.txt, aiohttp, yarl +orjson==3.1.2 # via -r requirements/_base.txt, fastapi +packaging==20.4 # via pytest pluggy==0.13.1 # via pytest promise==2.3 # via -r requirements/_base.txt, graphql-core, graphql-relay -psycopg2-binary==2.8.4 # via -r requirements/_base.txt, aiopg, sqlalchemy +psycopg2-binary==2.8.5 # via -r requirements/_base.txt, aiopg, sqlalchemy ptvsd==4.3.2 # via -r requirements/_test.in -py==1.8.1 # via pytest -pydantic==1.4 # via -r requirements/_base.txt, fastapi -pylint==2.4.4 # via -r requirements/_test.in -pyparsing==2.4.6 # via packaging +py==1.9.0 # via pytest +pydantic[dotenv]==1.5.1 # via -r requirements/_base.txt, fastapi +pylint==2.5.3 # via -r requirements/_test.in +pyparsing==2.4.7 # via packaging pytest-aiohttp==0.3.0 # via -r requirements/_test.in -pytest-cov==2.8.1 # via -r requirements/_test.in +pytest-cov==2.10.0 # via -r requirements/_test.in pytest-docker==0.7.2 # via -r requirements/_test.in -pytest-mock==2.0.0 # via -r requirements/_test.in +pytest-mock==3.1.1 # via -r requirements/_test.in pytest-runner==5.2 # via -r requirements/_test.in pytest==5.4.3 # via -r requirements/_test.in, pytest-aiohttp, pytest-cov, pytest-mock -python-dateutil==2.8.1 # via faker +python-dateutil==2.8.1 # via alembic, faker +python-dotenv==0.13.0 # via -r requirements/_base.txt, pydantic +python-editor==1.0.4 # via alembic python-multipart==0.0.5 # via -r requirements/_base.txt, fastapi -pyyaml==5.3 # via -r requirements/_base.txt, fastapi -requests==2.22.0 # via -r requirements/_base.txt, codecov, coveralls, fastapi +pyyaml==5.3.1 # via -r requirements/_base.txt, fastapi +requests==2.24.0 # via -r requirements/_base.txt, codecov, coveralls, docker, fastapi rx==1.6.1 # via -r requirements/_base.txt, graphql-core -six==1.14.0 # via -r requirements/_base.txt, astroid, graphene, graphql-core, graphql-relay, packaging, promise, python-dateutil, python-multipart, tenacity -sqlalchemy[postgresql_psycopg2binary]==1.3.13 # via -r requirements/_base.txt, aiopg -starlette==0.12.9 # via -r requirements/_base.txt, fastapi -tenacity==6.0.0 # via -r requirements/_base.txt +six==1.15.0 # via -r requirements/_base.txt, astroid, docker, graphene, graphql-core, graphql-relay, packaging, promise, python-dateutil, python-multipart, tenacity, websocket-client +sqlalchemy[postgresql_psycopg2binary]==1.3.17 # via -r requirements/_base.txt, aiopg, alembic +starlette==0.13.4 # via -r requirements/_base.txt, fastapi +tenacity==6.2.0 # via -r requirements/_base.txt text-unidecode==1.3 # via faker +toml==0.10.1 # via pylint typed-ast==1.4.1 # via astroid -typing-extensions==3.7.4.1 # via aiohttp -ujson==1.35 # via -r requirements/_base.txt, fastapi -urllib3==1.25.8 # via -r requirements/_base.txt, requests -uvicorn==0.11.2 # via -r requirements/_base.txt, fastapi +typing-extensions==3.7.4.2 # via aiohttp +ujson==3.0.0 # via -r requirements/_base.txt, fastapi +urllib3==1.25.9 # via -r requirements/_base.txt, requests +uvicorn==0.11.5 # via -r requirements/_base.txt, fastapi uvloop==0.14.0 # via -r requirements/_base.txt, uvicorn -wcwidth==0.1.8 # via pytest +wcwidth==0.2.5 # via pytest +websocket-client==0.57.0 # via docker websockets==8.1 # via -r requirements/_base.txt, uvicorn -wrapt==1.11.2 # via astroid +wrapt==1.12.1 # via astroid yarl==1.4.2 # via -r requirements/_base.txt, aiohttp zipp==3.1.0 # via importlib-metadata diff --git a/services/catalog/requirements/dev.txt b/services/catalog/requirements/dev.txt index ebdfa55d837..99cc51a1cef 100644 --- a/services/catalog/requirements/dev.txt +++ b/services/catalog/requirements/dev.txt @@ -10,7 +10,7 @@ -r _test.txt # installs this repo's packages --e ../../packages/postgres-database/ +-e ../../packages/postgres-database/[migration] -e ../../packages/pytest-simcore/ # installs current package diff --git a/services/catalog/setup.cfg b/services/catalog/setup.cfg index d993000b975..004829b443e 100644 --- a/services/catalog/setup.cfg +++ b/services/catalog/setup.cfg @@ -7,7 +7,3 @@ tag = False [bumpversion:file:VERSION] [bumpversion:file:src/simcore_service_catalog/api/v0/openapi.yaml] - -[bumpversion:file:.cookiecutterrc] -search = '{current_version}' -replace = '{new_version}' diff --git a/services/catalog/src/simcore_service_catalog/__main__.py b/services/catalog/src/simcore_service_catalog/__main__.py index 10fa4f0890e..0228973c881 100644 --- a/services/catalog/src/simcore_service_catalog/__main__.py +++ b/services/catalog/src/simcore_service_catalog/__main__.py @@ -3,14 +3,32 @@ `python -m simcore_service_catalog ...` """ +import sys +from pathlib import Path + import uvicorn +from fastapi import FastAPI + +from simcore_service_catalog.core.application import init_app +from simcore_service_catalog.core.settings import AppSettings, BootModeEnum + +current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + -from simcore_service_catalog.config import uvicorn_settings -from simcore_service_catalog.main import app +# SINGLETON FastAPI app +the_app: FastAPI = init_app() def main(): - uvicorn.run(app, **uvicorn_settings) + cfg: AppSettings = the_app.state.settings + uvicorn.run( + "simcore_service_catalog.__main__:the_app", + host=cfg.host, + port=cfg.port, + reload=cfg.boot_mode == BootModeEnum.development, + reload_dirs=[current_dir,], + log_level=cfg.log_level_name.lower(), + ) if __name__ == "__main__": diff --git a/services/catalog/src/simcore_service_catalog/__version__.py b/services/catalog/src/simcore_service_catalog/__version__.py index 3e72d0de708..ac32db2ddd2 100644 --- a/services/catalog/src/simcore_service_catalog/__version__.py +++ b/services/catalog/src/simcore_service_catalog/__version__.py @@ -5,4 +5,4 @@ major, minor, patch = __version__.split(".") api_version = __version__ -api_version_prefix: str = f"v{major}" +api_vtag: str = f"v{major}" diff --git a/services/catalog/src/simcore_service_catalog/endpoints/__init__.py b/services/catalog/src/simcore_service_catalog/api/__init__.py similarity index 100% rename from services/catalog/src/simcore_service_catalog/endpoints/__init__.py rename to services/catalog/src/simcore_service_catalog/api/__init__.py diff --git a/services/catalog/src/simcore_service_catalog/schemas/__init__.py b/services/catalog/src/simcore_service_catalog/api/dependencies/__init__.py similarity index 100% rename from services/catalog/src/simcore_service_catalog/schemas/__init__.py rename to services/catalog/src/simcore_service_catalog/api/dependencies/__init__.py diff --git a/services/catalog/src/simcore_service_catalog/api/dependencies/database.py b/services/catalog/src/simcore_service_catalog/api/dependencies/database.py new file mode 100644 index 00000000000..ef94b71e26c --- /dev/null +++ b/services/catalog/src/simcore_service_catalog/api/dependencies/database.py @@ -0,0 +1,21 @@ +from typing import AsyncGenerator, Callable, Type + +from aiopg.sa import Engine +from fastapi import Depends +from fastapi.requests import Request + +from ...db.repositories import BaseRepository + + +def _get_db_engine(request: Request) -> Engine: + return request.app.state.engine + + +def get_repository(repo_type: Type[BaseRepository]) -> Callable: + async def _get_repo( + engine: Engine = Depends(_get_db_engine), + ) -> AsyncGenerator[BaseRepository, None]: + async with engine.acquire() as conn: + yield repo_type(conn) + + return _get_repo diff --git a/services/catalog/src/simcore_service_catalog/api/root.py b/services/catalog/src/simcore_service_catalog/api/root.py new file mode 100644 index 00000000000..a17002b1378 --- /dev/null +++ b/services/catalog/src/simcore_service_catalog/api/root.py @@ -0,0 +1,10 @@ +from fastapi import APIRouter + +from .routes import dags, health, meta + +router = APIRouter() +router.include_router(health.router) + +# API +router.include_router(meta.router, tags=["meta"], prefix="/meta") +router.include_router(dags.router, tags=["DAG"], prefix="/dags") diff --git a/services/catalog/src/simcore_service_catalog/utils/__init__.py b/services/catalog/src/simcore_service_catalog/api/routes/__init__.py similarity index 100% rename from services/catalog/src/simcore_service_catalog/utils/__init__.py rename to services/catalog/src/simcore_service_catalog/api/routes/__init__.py diff --git a/services/catalog/src/simcore_service_catalog/endpoints/dags.py b/services/catalog/src/simcore_service_catalog/api/routes/dags.py similarity index 68% rename from services/catalog/src/simcore_service_catalog/endpoints/dags.py rename to services/catalog/src/simcore_service_catalog/api/routes/dags.py index a75c2ed6eb1..a373a987f2f 100644 --- a/services/catalog/src/simcore_service_catalog/endpoints/dags.py +++ b/services/catalog/src/simcore_service_catalog/api/routes/dags.py @@ -9,15 +9,15 @@ HTTP_501_NOT_IMPLEMENTED, ) -from .. import db -from ..schemas import schemas_dags as schemas -from ..store import crud_dags as crud +from ...db.repositories.dags import DAGsRepository +from ...models.schemas.dag import DAGIn, DAGOut +from ..dependencies.database import get_repository router = APIRouter() log = logging.getLogger(__name__) -@router.get("/dags", response_model=List[schemas.DAGOut]) +@router.get("", response_model=List[DAGOut]) async def list_dags( page_token: Optional[str] = Query( None, description="Requests a specific page of the list results" @@ -28,7 +28,7 @@ async def list_dags( order_by: Optional[str] = Query( None, description="Sorts in ascending order comma-separated fields" ), - conn: db.SAConnection = Depends(db.get_cnx), + dags_repo: DAGsRepository = Depends(get_repository(DAGsRepository)), ): # List is suited to data from a single collection that is bounded in size and not cached @@ -41,18 +41,18 @@ async def list_dags( # TODO: filter: https://cloud.google.com/apis/design/naming_convention#list_filter_field # SEE response: https://cloud.google.com/apis/design/naming_convention#list_response log.debug("%s %s %s", page_token, page_size, order_by) - dags = await crud.list_dags(conn) + dags = await dags_repo.list_dags() return dags -@router.get("/dags:batchGet") +@router.get(":batchGet") async def batch_get_dags(): raise HTTPException( status_code=HTTP_501_NOT_IMPLEMENTED, detail="Still not implemented" ) -@router.get("/dags:search") +@router.get(":search") async def search_dags(): # A method that takes multiple resource IDs and returns an object for each of those IDs # Alternative to List for fetching data that does not adhere to List semantics, such as services.search. @@ -62,20 +62,23 @@ async def search_dags(): ) -@router.get("/dags/{dag_id}", response_model=schemas.DAGOut) -async def get_dag(dag_id: int, conn: db.SAConnection = Depends(db.get_cnx)): - dag = await crud.get_dag(conn, dag_id) +@router.get("/{dag_id}", response_model=DAGOut) +async def get_dag( + dag_id: int, dags_repo: DAGsRepository = Depends(get_repository(DAGsRepository)), +): + dag = await dags_repo.get_dag(dag_id) return dag @router.post( - "/dags", + "", response_model=int, status_code=HTTP_201_CREATED, response_description="Successfully created", ) async def create_dag( - dag: schemas.DAGIn = Body(...), conn: db.SAConnection = Depends(db.get_cnx) + dag: DAGIn = Body(...), + dags_repo: DAGsRepository = Depends(get_repository(DAGsRepository)), ): assert dag # nosec @@ -87,40 +90,42 @@ async def create_dag( ) # FIXME: conversion DAG (issue with workbench being json in orm and dict in schema) - dag_id = await crud.create_dag(conn, dag) + dag_id = await dags_repo.create_dag(dag) # TODO: no need to return since there is not extra info?, perhaps return return dag_id -@router.patch("/dags/{dag_id}", response_model=schemas.DAGOut) +@router.patch("/{dag_id}", response_model=DAGOut) async def udpate_dag( dag_id: int, - dag: schemas.DAGIn = Body(None), - conn: db.SAConnection = Depends(db.get_cnx), + dag: DAGIn = Body(None), + dags_repo: DAGsRepository = Depends(get_repository(DAGsRepository)), ): - async with conn.begin(): - await crud.update_dag(conn, dag_id, dag) - updated_dag = await crud.get_dag(conn, dag_id) + async with dags_repo.connection.begin(): + await dags_repo.update_dag(dag_id, dag) + updated_dag = await dags_repo.get_dag(dag_id) return updated_dag -@router.put("/dags/{dag_id}", response_model=Optional[schemas.DAGOut]) +@router.put("/{dag_id}", response_model=Optional[DAGOut]) async def replace_dag( dag_id: int, - dag: schemas.DAGIn = Body(...), - conn: db.SAConnection = Depends(db.get_cnx), + dag: DAGIn = Body(...), + dags_repo: DAGsRepository = Depends(get_repository(DAGsRepository)), ): - await crud.replace_dag(conn, dag_id, dag) + await dags_repo.replace_dag(dag_id, dag) @router.delete( - "/dags/{dag_id}", + "/{dag_id}", status_code=HTTP_204_NO_CONTENT, response_description="Successfully deleted", ) -async def delete_dag(dag_id: int, conn: db.SAConnection = Depends(db.get_cnx)): +async def delete_dag( + dag_id: int, dags_repo: DAGsRepository = Depends(get_repository(DAGsRepository)), +): # If the Delete method immediately removes the resource, it should return an empty response. # If the Delete method initiates a long-running operation, it should return the long-running operation. # If the Delete method only marks the resource as being deleted, it should return the updated resource. - await crud.delete_dag(conn, dag_id) + await dags_repo.delete_dag(dag_id) diff --git a/services/catalog/src/simcore_service_catalog/api/routes/health.py b/services/catalog/src/simcore_service_catalog/api/routes/health.py new file mode 100644 index 00000000000..d8b50c5f504 --- /dev/null +++ b/services/catalog/src/simcore_service_catalog/api/routes/health.py @@ -0,0 +1,8 @@ +from fastapi import APIRouter + +router = APIRouter() + + +@router.get("/", include_in_schema=False) +async def check_service_health(): + return ":-)" diff --git a/services/catalog/src/simcore_service_catalog/api/routes/meta.py b/services/catalog/src/simcore_service_catalog/api/routes/meta.py new file mode 100644 index 00000000000..903ce9666b1 --- /dev/null +++ b/services/catalog/src/simcore_service_catalog/api/routes/meta.py @@ -0,0 +1,15 @@ +from fastapi import APIRouter + +from ...__version__ import __version__, api_version, api_vtag +from ...models.schemas.meta import Meta + +router = APIRouter() + + +@router.get("", response_model=Meta) +async def get_service_metadata(): + return Meta( + name=__name__.split(".")[0], + version=api_version, + released={api_vtag: api_version}, + ) diff --git a/services/catalog/src/simcore_service_catalog/api/v0/openapi.yaml b/services/catalog/src/simcore_service_catalog/api/v0/openapi.yaml deleted file mode 100644 index 41fd31693ca..00000000000 --- a/services/catalog/src/simcore_service_catalog/api/v0/openapi.yaml +++ /dev/null @@ -1,452 +0,0 @@ -components: - schemas: - Connection: - properties: - nodeUuid: - title: Nodeuuid - type: string - output: - title: Output - type: string - title: Connection - type: object - DAGIn: - properties: - contact: - format: email - title: Contact - type: string - description: - title: Description - type: string - key: - example: simcore/services/frontend/nodes-group/macros/1 - pattern: ^(simcore)/(services)(/demodec)?/(comp|dynamic|frontend)(/[^\s]+)+$ - title: Key - type: string - name: - title: Name - type: string - version: - example: 1.0.0 - pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$ - title: Version - type: string - workbench: - additionalProperties: - $ref: '#/components/schemas/Node' - title: Workbench - type: object - required: - - key - - version - - name - title: DAGIn - type: object - DAGOut: - properties: - contact: - format: email - title: Contact - type: string - description: - title: Description - type: string - id: - title: Id - type: integer - key: - example: simcore/services/frontend/nodes-group/macros/1 - pattern: ^(simcore)/(services)(/demodec)?/(comp|dynamic|frontend)(/[^\s]+)+$ - title: Key - type: string - name: - title: Name - type: string - version: - example: 1.0.0 - pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$ - title: Version - type: string - workbench: - additionalProperties: - $ref: '#/components/schemas/Node' - title: Workbench - type: object - required: - - key - - version - - name - - id - title: DAGOut - type: object - FilePickerOutput: - properties: - dataset: - title: Dataset - type: string - label: - title: Label - type: string - path: - title: Path - type: string - store: - anyOf: - - type: string - - type: integer - title: Store - required: - - store - - path - - label - title: FilePickerOutput - type: object - HTTPValidationError: - properties: - detail: - items: - $ref: '#/components/schemas/ValidationError' - title: Detail - type: array - title: HTTPValidationError - type: object - Node: - properties: - inputAccess: - additionalProperties: - enum: - - ReadAndWrite - - Invisible - - ReadOnly - type: string - title: Inputaccess - type: object - inputNodes: - default: [] - items: - type: string - title: Inputnodes - type: array - inputs: - additionalProperties: - anyOf: - - type: integer - - type: string - - type: number - - $ref: '#/components/schemas/Connection' - - $ref: '#/components/schemas/FilePickerOutput' - title: Inputs - type: object - key: - example: simcore/services/comp/sleeper - pattern: ^(simcore)/(services)(/demodec)?/(comp|dynamic|frontend)(/[^\s]+)+$ - title: Key - type: string - label: - title: Label - type: string - outputNode: - deprecated: true - title: Outputnode - type: boolean - outputNodes: - default: [] - items: - type: string - title: Outputnodes - type: array - outputs: - additionalProperties: - anyOf: - - type: integer - - type: string - - type: number - - $ref: '#/components/schemas/FilePickerOutput' - title: Outputs - type: object - parent: - description: Parent's (group-nodes') node ID s. - example: nodeUUid1 - title: Parent - type: string - position: - $ref: '#/components/schemas/Position' - progress: - default: 0 - maximum: 100.0 - minimum: 0.0 - title: Progress - type: number - thumbnail: - title: Thumbnail - type: string - version: - example: 6.2.0 - pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$ - title: Version - type: string - required: - - key - - version - - label - - position - title: Node - type: object - Position: - properties: - x: - title: X - type: integer - y: - title: Y - type: integer - required: - - x - - y - title: Position - type: object - ValidationError: - properties: - loc: - items: - type: string - title: Location - type: array - msg: - title: Message - type: string - type: - title: Error Type - type: string - required: - - loc - - msg - - type - title: ValidationError - type: object -info: - description: Manages and maintains a **catalog** of all published components (e.g. - macro-algorithms, scripts, etc) - title: Components Catalog Service - version: 0.3.2 -openapi: 3.0.2 -paths: - /: - get: - operationId: healthcheck__get - responses: - '200': - content: - application/json: - schema: {} - description: Successful Response - summary: Healthcheck - tags: - - diagnostics - /v0/dags: - get: - operationId: list_dags_v0_dags_get - parameters: - - description: Requests a specific page of the list results - in: query - name: page_token - required: false - schema: - description: Requests a specific page of the list results - title: Page Token - type: string - - description: Maximum number of results to be returned by the server - in: query - name: page_size - required: false - schema: - default: 0 - description: Maximum number of results to be returned by the server - minimum: 0.0 - title: Page Size - type: integer - - description: Sorts in ascending order comma-separated fields - in: query - name: order_by - required: false - schema: - description: Sorts in ascending order comma-separated fields - title: Order By - type: string - responses: - '200': - content: - application/json: - schema: - items: - $ref: '#/components/schemas/DAGOut' - title: Response List Dags V0 Dags Get - type: array - description: Successful Response - '422': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - description: Validation Error - summary: List Dags - tags: - - dags - post: - operationId: create_dag_v0_dags_post - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/DAGIn' - required: true - responses: - '201': - content: - application/json: - schema: - title: Response Create Dag V0 Dags Post - type: integer - description: Successfully created - '422': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - description: Validation Error - summary: Create Dag - tags: - - dags - /v0/dags/{dag_id}: - delete: - operationId: delete_dag_v0_dags__dag_id__delete - parameters: - - in: path - name: dag_id - required: true - schema: - title: Dag Id - type: integer - responses: - '204': - description: Successfully deleted - '422': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - description: Validation Error - summary: Delete Dag - tags: - - dags - get: - operationId: get_dag_v0_dags__dag_id__get - parameters: - - in: path - name: dag_id - required: true - schema: - title: Dag Id - type: integer - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/DAGOut' - description: Successful Response - '422': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - description: Validation Error - summary: Get Dag - tags: - - dags - patch: - operationId: udpate_dag_v0_dags__dag_id__patch - parameters: - - in: path - name: dag_id - required: true - schema: - title: Dag Id - type: integer - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/DAGIn' - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/DAGOut' - description: Successful Response - '422': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - description: Validation Error - summary: Udpate Dag - tags: - - dags - put: - operationId: replace_dag_v0_dags__dag_id__put - parameters: - - in: path - name: dag_id - required: true - schema: - title: Dag Id - type: integer - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/DAGIn' - required: true - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/DAGOut' - description: Successful Response - '422': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPValidationError' - description: Validation Error - summary: Replace Dag - tags: - - dags - /v0/dags:batchGet: - get: - operationId: batch_get_dags_v0_dags_batchGet_get - responses: - '200': - content: - application/json: - schema: {} - description: Successful Response - summary: Batch Get Dags - tags: - - dags - /v0/dags:search: - get: - operationId: search_dags_v0_dags_search_get - responses: - '200': - content: - application/json: - schema: {} - description: Successful Response - summary: Search Dags - tags: - - dags diff --git a/services/catalog/src/simcore_service_catalog/config.py b/services/catalog/src/simcore_service_catalog/config.py deleted file mode 100644 index a04b5d84611..00000000000 --- a/services/catalog/src/simcore_service_catalog/config.py +++ /dev/null @@ -1,56 +0,0 @@ -""" - - NOTE: CONS of programmatic config - - not testing-friendly since variables set upon import. Must reload when fixture is setup -""" -import logging -import os - -from .utils.helpers import cast_to_bool - -# DOCKER -is_container_environ: bool = "SC_BOOT_MODE" in os.environ -is_devel = os.environ.get("SC_BUILD_TARGET") == "development" -is_prod = os.environ.get("SC_BUILD_TARGET") == "production" - - -# LOGGING -log_level_name = os.environ.get("LOGLEVEL", "debug").upper() -log_level = getattr(logging, log_level_name.upper()) -log_formatter = logging.Formatter("%(levelname)s: %(message)s [%(name)s:%(lineno)d]") - -logging.root.setLevel(log_level) -if logging.root.handlers: - logging.root.handlers[0].setFormatter(log_formatter) - - -# TEST MODE -is_testing_enabled: bool = cast_to_bool(os.environ.get("TESTING", "true")) - - -# POSGRESS API -postgres_cfg: dict = { - "user": os.environ.get("POSTGRES_USER", "test"), - "password": os.environ.get("POSTGRES_PASSWORD", "test"), - "database": os.environ.get("POSTGRES_DB", "test"), - "host": os.environ.get("POSTGRES_HOST", "localhost"), - "port": int(os.environ.get("POSTGRES_PORT", "5432")), -} -postgres_dsn: str = "postgresql://{user}:{password}@{host}:{port}/{database}".format( - **postgres_cfg -) -postgres_cfg: dict = {**postgres_cfg, "uri": postgres_dsn} -init_tables: bool = cast_to_bool( - os.environ.get("POSTGRES_INIT_TABLES", "true" if is_devel else "false") -) - -# SERVER -# NOTE: https://www.uvicorn.org/settings/ -uvicorn_settings: dict = { - "host": "0.0.0.0" if is_container_environ else "127.0.0.1", # nosec - "port": 8000, - "log_level": log_level_name.lower(), -} - -# APPLICATION -app_context: dict = {} # FIXME: hate globals! diff --git a/services/catalog/src/simcore_service_catalog/core/__init__.py b/services/catalog/src/simcore_service_catalog/core/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/catalog/src/simcore_service_catalog/core/application.py b/services/catalog/src/simcore_service_catalog/core/application.py new file mode 100644 index 00000000000..3fe1f837297 --- /dev/null +++ b/services/catalog/src/simcore_service_catalog/core/application.py @@ -0,0 +1,57 @@ +import logging +from typing import Optional + +from fastapi import FastAPI + +from ..__version__ import api_version, api_vtag +from ..api.root import router as api_router +from ..api.routes.health import router as health_router +from .events import create_start_app_handler, create_stop_app_handler +from .settings import AppSettings + +# from fastapi.exceptions import RequestValidationError +# from starlette.exceptions import HTTPException + +# from ..api.errors.http_error import http_error_handler +# from ..api.errors.validation_error import http422_error_handler + + +logger = logging.getLogger(__name__) + + +def init_app(settings: Optional[AppSettings] = None) -> FastAPI: + if settings is None: + settings = AppSettings.create_default() + + logging.basicConfig(level=settings.loglevel) + logging.root.setLevel(settings.loglevel) + + app = FastAPI( + debug=settings.debug, + title="Components Catalog Service", + # TODO: get here extended description from setup or the other way around + description="Manages and maintains a **catalog** of all published components (e.g. macro-algorithms, scripts, etc)", + version=api_version, + openapi_url=f"/api/{api_vtag}/openapi.json", + docs_url="/dev/docs", + redoc_url=None, # default disabled + ) + + logger.debug(settings) + app.state.settings = settings + + app.add_event_handler("startup", create_start_app_handler(app)) + app.add_event_handler("shutdown", create_stop_app_handler(app)) + + # app.add_exception_handler(HTTPException, http_error_handler) + # app.add_exception_handler(RequestValidationError, http422_error_handler) + + # Routing + + # healthcheck at / and at /v0/ + app.include_router(health_router) + + # api under /v* + app.include_router(api_router, prefix=f"/{api_vtag}") + + return app diff --git a/services/catalog/src/simcore_service_catalog/core/errors.py b/services/catalog/src/simcore_service_catalog/core/errors.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/catalog/src/simcore_service_catalog/core/events.py b/services/catalog/src/simcore_service_catalog/core/events.py new file mode 100644 index 00000000000..58d91eb81ab --- /dev/null +++ b/services/catalog/src/simcore_service_catalog/core/events.py @@ -0,0 +1,38 @@ +import logging +from typing import Callable + +from fastapi import FastAPI + +from ..db.events import close_db_connection, connect_to_db +from ..services.remote_debug import setup_remote_debugging +from .settings import BootModeEnum + +logger = logging.getLogger(__name__) + + +def create_start_app_handler(app: FastAPI) -> Callable: + async def start_app() -> None: + logger.info("Application started") + + # setup connection to remote debugger (if applies) + setup_remote_debugging( + force_enabled=app.state.settings.boot_mode == BootModeEnum.debug + ) + + # setup connection to pg db + if app.state.settings.postgres.enabled: + await connect_to_db(app) + + return start_app + + +def create_stop_app_handler(app: FastAPI) -> Callable: + async def stop_app() -> None: + try: + logger.info("Application stopping") + if app.state.settings.postgres.enabled: + await close_db_connection(app) + except Exception: # pylint: disable=broad-except + logger.exception("Stopping application") + + return stop_app diff --git a/services/catalog/src/simcore_service_catalog/core/settings.py b/services/catalog/src/simcore_service_catalog/core/settings.py new file mode 100644 index 00000000000..cf684359a5b --- /dev/null +++ b/services/catalog/src/simcore_service_catalog/core/settings.py @@ -0,0 +1,84 @@ +import logging +from enum import Enum +from typing import Optional + +from pydantic import BaseSettings, Field, SecretStr, validator +from yarl import URL + + +class BootModeEnum(str, Enum): + debug = "debug-ptvsd" + production = "production" + development = "development" + + +class _CommonConfig: + case_sensitive = False + env_file = ".env" # SEE https://pydantic-docs.helpmanual.io/usage/settings/#dotenv-env-support + + +class PostgresSettings(BaseSettings): + enabled: bool = Field( + True, description="Enables/Disables connection with postgres service" + ) + user: str + password: SecretStr + db: str + host: str + port: int = 5432 + + minsize: int = 10 + maxsize: int = 10 + + @property + def dsn(self) -> URL: + return URL.build( + scheme="postgresql", + user=self.user, + password=self.password.get_secret_value(), + host=self.host, + port=self.port, + path=f"/{self.db}", + ) + + class Config(_CommonConfig): + env_prefix = "POSTGRES_" + + +class AppSettings(BaseSettings): + @classmethod + def create_default(cls) -> "AppSettings": + # This call triggers parsers + return cls(postgres=PostgresSettings()) + + # pylint: disable=no-self-use + # pylint: disable=no-self-argument + + # DOCKER + boot_mode: Optional[BootModeEnum] = Field(..., env="SC_BOOT_MODE") + + # LOGGING + log_level_name: str = Field("DEBUG", env="LOG_LEVEL") + + @validator("log_level_name") + def match_logging_level(cls, value) -> str: + try: + getattr(logging, value.upper()) + except AttributeError: + raise ValueError(f"{value.upper()} is not a valid level") + return value.upper() + + @property + def loglevel(self) -> int: + return getattr(logging, self.log_level_name) + + # POSTGRES + postgres: PostgresSettings + + # SERVICE SERVER (see : https://www.uvicorn.org/settings/) + host: str = "0.0.0.0" # nosec + port: int = 8000 + debug: bool = False # If True, debug tracebacks should be returned on errors. + + class Config(_CommonConfig): + env_prefix = "" diff --git a/services/catalog/src/simcore_service_catalog/db.py b/services/catalog/src/simcore_service_catalog/db.py deleted file mode 100644 index 675ec192395..00000000000 --- a/services/catalog/src/simcore_service_catalog/db.py +++ /dev/null @@ -1,60 +0,0 @@ -""" Access to postgres service - -""" -from typing import Optional - -import aiopg.sa -from aiopg.sa import Engine -from aiopg.sa.connection import SAConnection -from aiopg.sa.result import ResultProxy, RowProxy -from fastapi import Depends -from sqlalchemy.sql.ddl import CreateTable - -from .config import app_context, postgres_dsn -from .orm import DAG, dags - - -# TODO: idealy context cleanup. This concept here? app-context Dependency? -async def setup_engine() -> Engine: - engine = await aiopg.sa.create_engine( - postgres_dsn, - # unique identifier per app - application_name=f"{__name__}_{id(app_context)}", - minsize=5, - maxsize=10, - ) - app_context["engine"] = engine - - return engine - - -async def teardown_engine() -> None: - engine = app_context["engine"] - engine.close() - await engine.wait_closed() - - -async def create_tables(conn: SAConnection): - # FIXME: this is dangerous since it enforces an empty table - await conn.execute(f"DROP TABLE IF EXISTS {DAG.__tablename__}") - await conn.execute(CreateTable(dags)) - - -def info(engine: Optional[Engine] = None): - engine = engine or get_engine() - props = "closed driver dsn freesize maxsize minsize name size timeout".split() - for p in props: - print(f"{p} = {getattr(engine, p)}") - - -def get_engine() -> Engine: - return app_context["engine"] - - -async def get_cnx(engine: Engine = Depends(get_engine)): - # TODO: problem here is retries?? - async with engine.acquire() as conn: - yield conn - - -__all__ = ("Engine", "ResultProxy", "RowProxy", "SAConnection") diff --git a/services/catalog/src/simcore_service_catalog/db/__init__.py b/services/catalog/src/simcore_service_catalog/db/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/catalog/src/simcore_service_catalog/db/errors.py b/services/catalog/src/simcore_service_catalog/db/errors.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/catalog/src/simcore_service_catalog/db/events.py b/services/catalog/src/simcore_service_catalog/db/events.py new file mode 100644 index 00000000000..eebdcb7c9d8 --- /dev/null +++ b/services/catalog/src/simcore_service_catalog/db/events.py @@ -0,0 +1,56 @@ +import logging +from io import StringIO + +from aiopg.sa import Engine, create_engine +from fastapi import FastAPI +from tenacity import before_sleep_log, retry, stop_after_attempt, wait_fixed + +from ..core.settings import PostgresSettings + +logger = logging.getLogger(__name__) + + +ENGINE_ATTRS = "closed driver dsn freesize maxsize minsize name size timeout".split() + + +pg_retry_policy = dict( + wait=wait_fixed(5), + stop=stop_after_attempt(20), + before_sleep=before_sleep_log(logger, logging.WARNING), + reraise=True, +) + + +def _compose_info_on_engine(app: FastAPI) -> str: + engine = app.state.engine + stm = StringIO() + print("Setup engine:", end=" ", file=stm) + for attr in ENGINE_ATTRS: + print(f"{attr}={getattr(engine, attr)}", end="; ", file=stm) + return stm.getvalue() + + +@retry(**pg_retry_policy) +async def connect_to_db(app: FastAPI) -> None: + logger.debug("Connecting db ...") + + cfg: PostgresSettings = app.state.settings.postgres + engine: Engine = await create_engine( + str(cfg.dsn), + application_name=f"{__name__}_{id(app)}", # unique identifier per app + minsize=cfg.minsize, + maxsize=cfg.maxsize, + ) + logger.debug("Connected to %s", engine.dsn) + app.state.engine = engine + + logger.debug(_compose_info_on_engine(app)) + + +async def close_db_connection(app: FastAPI) -> None: + logger.debug("Disconnecting db ...") + + engine: Engine = app.state.engine + engine.close() + await engine.wait_closed() + logger.debug("Disconnected from %s", engine.dsn) diff --git a/services/catalog/src/simcore_service_catalog/db/repositories/__init__.py b/services/catalog/src/simcore_service_catalog/db/repositories/__init__.py new file mode 100644 index 00000000000..a5eeffe1ff5 --- /dev/null +++ b/services/catalog/src/simcore_service_catalog/db/repositories/__init__.py @@ -0,0 +1 @@ +from ._base import BaseRepository diff --git a/services/catalog/src/simcore_service_catalog/db/repositories/_base.py b/services/catalog/src/simcore_service_catalog/db/repositories/_base.py new file mode 100644 index 00000000000..81f04c0f7b5 --- /dev/null +++ b/services/catalog/src/simcore_service_catalog/db/repositories/_base.py @@ -0,0 +1,15 @@ +from aiopg.sa.connection import SAConnection + + +class BaseRepository: + """ + Repositories are pulled at every request + All queries to db within that request use same connection + """ + + def __init__(self, conn: SAConnection) -> None: + self._conn = conn + + @property + def connection(self) -> SAConnection: + return self._conn diff --git a/services/catalog/src/simcore_service_catalog/db/repositories/dags.py b/services/catalog/src/simcore_service_catalog/db/repositories/dags.py new file mode 100644 index 00000000000..9138378efa3 --- /dev/null +++ b/services/catalog/src/simcore_service_catalog/db/repositories/dags.py @@ -0,0 +1,60 @@ +import json +from typing import List, Optional + +import sqlalchemy as sa +from aiopg.sa.result import RowProxy + +from ...models.domain.dag import DAGAtDB +from ...models.schemas.dag import DAGIn +from ..tables import dags +from ._base import BaseRepository + + +class DAGsRepository(BaseRepository): + async def list_dags(self) -> List[DAGAtDB]: + dagraphs = [] + async for row in self.connection.execute(dags.select()): + if row: + dagraphs.append(DAGAtDB(**row)) + return dagraphs + + async def get_dag(self, dag_id: int) -> Optional[DAGAtDB]: + stmt = dags.select().where(dags.c.id == dag_id) + row: RowProxy = await (await self.connection.execute(stmt)).first() + if row: + return DAGAtDB(**row) + return None + + async def create_dag(self, dag: DAGIn) -> int: + stmt = dags.insert().values( + workbench=json.dumps(dag.dict()["workbench"]), + **dag.dict(exclude={"workbench"}) + ) + new_id: int = await (await self.connection.execute(stmt)).scalar() + return new_id + + async def replace_dag(self, dag_id: int, dag: DAGIn): + stmt = ( + dags.update() + .values( + workbench=json.dumps(dag.dict()["workbench"]), + **dag.dict(exclude={"workbench"}) + ) + .where(dags.c.id == dag_id) + ) + await self.connection.execute(stmt) + + async def update_dag(self, dag_id: int, dag: DAGIn): + patch = dag.dict(exclude_unset=True, exclude={"workbench"}) + if "workbench" in dag.__fields_set__: + patch["workbench"] = json.dumps(patch["workbench"]) + + stmt = sa.update(dags).values(**patch).where(dags.c.id == dag_id) + res = await self.connection.execute(stmt) + + # TODO: dev asserts + assert res.returns_rows == False # nosec + + async def delete_dag(self, dag_id: int): + stmt = sa.delete(dags).where(dags.c.id == dag_id) + await self.connection.execute(stmt) diff --git a/services/catalog/src/simcore_service_catalog/orm.py b/services/catalog/src/simcore_service_catalog/db/tables.py similarity index 62% rename from services/catalog/src/simcore_service_catalog/orm.py rename to services/catalog/src/simcore_service_catalog/db/tables.py index 01557d31052..be6fb11dfda 100644 --- a/services/catalog/src/simcore_service_catalog/orm.py +++ b/services/catalog/src/simcore_service_catalog/db/tables.py @@ -1,3 +1,3 @@ -from simcore_postgres_database.models.direct_acyclic_graphs import DAG, dags +from simcore_postgres_database.models.direct_acyclic_graphs import dags -__all__ = ["dags", "DAG"] +__all__ = ["dags"] diff --git a/services/catalog/src/simcore_service_catalog/endpoints/diagnostics.py b/services/catalog/src/simcore_service_catalog/endpoints/diagnostics.py deleted file mode 100644 index 15411e27b83..00000000000 --- a/services/catalog/src/simcore_service_catalog/endpoints/diagnostics.py +++ /dev/null @@ -1,17 +0,0 @@ -from fastapi import APIRouter - -from ..__version__ import __version__, api_version - -router = APIRouter() - - -@router.get("/") -async def healthcheck(): - # TODO: this is the entrypoint that docker uses to determin whether the service is starting, failed, etc... - # TODO: Reaching this point, what does it means? How is the health of this service? when shall it respond non-succesful? - return { - "name": __name__.split(".")[0], - "version": __version__, - "status": "SERVICE_RUNNING", - "api_version": api_version, - } diff --git a/services/catalog/src/simcore_service_catalog/main.py b/services/catalog/src/simcore_service_catalog/main.py deleted file mode 100644 index 63a1ad27460..00000000000 --- a/services/catalog/src/simcore_service_catalog/main.py +++ /dev/null @@ -1,78 +0,0 @@ -import logging -import os -import sys -from pathlib import Path - -import yaml -from fastapi import FastAPI -from tenacity import before_sleep_log, retry, stop_after_attempt, wait_fixed - -from . import config as cfg -from .__version__ import api_version, api_version_prefix -from .db import create_tables, setup_engine, teardown_engine -from .endpoints import dags, diagnostics -from .utils.remote_debug import setup_remote_debugging - -current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent - -log = logging.getLogger(__name__) -pid = os.getpid() - -app = FastAPI( - debug=cfg.is_testing_enabled, - title="Components Catalog Service", - # TODO: get here extended description from setup - description="Manages and maintains a **catalog** of all published components (e.g. macro-algorithms, scripts, etc)", - version=api_version, - openapi_url=f"/{api_version_prefix}/openapi.json", -) - -# projects -app.include_router(diagnostics.router, tags=["diagnostics"]) -app.include_router(dags.router, tags=["dags"], prefix=f"/{api_version_prefix}") - - -def dump_openapi(): - oas_path: Path = current_dir / f"api/{api_version_prefix}/openapi.yaml" - log.info("Saving openapi schema to %s", oas_path) - with open(oas_path, "wt") as fh: - yaml.safe_dump(app.openapi(), fh) - - -@app.on_event("startup") -def startup_event(): - log.info("Starting app '%d' [%d]...", id(app), pid) - setup_remote_debugging() - - -@app.on_event("startup") -async def start_db(): - log.info("Initializing db") - - @retry( - wait=wait_fixed(5), - stop=stop_after_attempt(20), - before_sleep=before_sleep_log(log, logging.WARNING), - reraise=True, - ) - async def go(): - engine = await setup_engine() - assert engine # nosec - - if cfg.init_tables: - log.info("Creating db tables (testing mode)") - async with engine.acquire() as conn: - await create_tables(conn) - - await go() # NOTE: non-blocking this way - - -@app.on_event("shutdown") -def shutdown_event(): - log.info("Closing app '%d' [%d]...", id(app), pid) - - -@app.on_event("shutdown") -async def shutdown_db(): - log.info("Closing db") - await teardown_engine() diff --git a/services/catalog/src/simcore_service_catalog/models/__init__.py b/services/catalog/src/simcore_service_catalog/models/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/catalog/src/simcore_service_catalog/models/domain/__init__.py b/services/catalog/src/simcore_service_catalog/models/domain/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/catalog/src/simcore_service_catalog/schemas/schemas_dags.py b/services/catalog/src/simcore_service_catalog/models/domain/dag.py similarity index 71% rename from services/catalog/src/simcore_service_catalog/schemas/schemas_dags.py rename to services/catalog/src/simcore_service_catalog/models/domain/dag.py index cfea6816e4b..0bef8016217 100644 --- a/services/catalog/src/simcore_service_catalog/schemas/schemas_dags.py +++ b/services/catalog/src/simcore_service_catalog/models/domain/dag.py @@ -16,18 +16,6 @@ class DAGBase(BaseModel): contact: Optional[EmailStr] -class DAGIn(DAGBase): - workbench: Optional[Dict[str, project.Node]] - - -class DAGInPath(DAGBase): - version: str - name: str - description: Optional[str] - contact: Optional[str] - workbench: Optional[Dict[str, project.Node]] - - class DAGAtDB(DAGBase): id: int workbench: Json[Dict[str, project.Node]] # pylint: disable=unsubscriptable-object @@ -36,5 +24,5 @@ class Config: orm_mode = True -class DAGOut(DAGAtDB): +class DAGData(DAGAtDB): workbench: Optional[Dict[str, project.Node]] diff --git a/services/catalog/src/simcore_service_catalog/schemas/project.py b/services/catalog/src/simcore_service_catalog/models/domain/project.py similarity index 100% rename from services/catalog/src/simcore_service_catalog/schemas/project.py rename to services/catalog/src/simcore_service_catalog/models/domain/project.py diff --git a/services/catalog/src/simcore_service_catalog/models/schemas/__init__.py b/services/catalog/src/simcore_service_catalog/models/schemas/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/catalog/src/simcore_service_catalog/models/schemas/dag.py b/services/catalog/src/simcore_service_catalog/models/schemas/dag.py new file mode 100644 index 00000000000..392f3d3cdb3 --- /dev/null +++ b/services/catalog/src/simcore_service_catalog/models/schemas/dag.py @@ -0,0 +1,20 @@ +from typing import Dict, Optional + +from ..domain import project +from ..domain.dag import DAGBase, DAGData + + +class DAGIn(DAGBase): + workbench: Optional[Dict[str, project.Node]] + + +class DAGInPath(DAGBase): + version: str + name: str + description: Optional[str] + contact: Optional[str] + workbench: Optional[Dict[str, project.Node]] + + +class DAGOut(DAGData): + pass diff --git a/services/catalog/src/simcore_service_catalog/models/schemas/meta.py b/services/catalog/src/simcore_service_catalog/models/schemas/meta.py new file mode 100644 index 00000000000..dd23eced796 --- /dev/null +++ b/services/catalog/src/simcore_service_catalog/models/schemas/meta.py @@ -0,0 +1,27 @@ +from typing import Dict, Optional + +from pydantic import BaseModel, Field, constr + +# TODO: review this RE +# use https://www.python.org/dev/peps/pep-0440/#version-scheme +# or https://www.python.org/dev/peps/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions +# +VERSION_RE = r"^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$" +VersionStr = constr(regex=VERSION_RE) + + +class Meta(BaseModel): + name: str + version: VersionStr + released: Optional[Dict[str, VersionStr]] = Field( + None, description="Maps every route's path tag with a released version" + ) + + class Config: + schema_extra = { + "example": { + "name": "simcore_service_foo", + "version": "2.4.45", + "released": {"v1": "1.3.4", "v2": "2.4.45"}, + } + } diff --git a/services/catalog/src/simcore_service_catalog/services/__init__.py b/services/catalog/src/simcore_service_catalog/services/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/services/catalog/src/simcore_service_catalog/utils/remote_debug.py b/services/catalog/src/simcore_service_catalog/services/remote_debug.py similarity index 58% rename from services/catalog/src/simcore_service_catalog/utils/remote_debug.py rename to services/catalog/src/simcore_service_catalog/services/remote_debug.py index b29069c9fd8..d1ba21d5016 100644 --- a/services/catalog/src/simcore_service_catalog/utils/remote_debug.py +++ b/services/catalog/src/simcore_service_catalog/services/remote_debug.py @@ -1,38 +1,41 @@ """ Setup remote debugger with Python Tools for Visual Studio (PTVSD) """ + import logging import os -REMOTE_DEBUG_PORT = 3000 +logger = logging.getLogger(__name__) -log = logging.getLogger(__name__) +REMOTE_DEBUG_PORT = 3000 -def setup_remote_debugging(force_enabled=False): +def setup_remote_debugging(force_enabled=False, *, boot_mode=None): """ Programaticaly enables remote debugging if SC_BOOT_MODE==debug-ptvsd """ - boot_mode = os.environ.get("SC_BOOT_MODE") + boot_mode = boot_mode or os.environ.get("SC_BOOT_MODE") if boot_mode == "debug-ptvsd" or force_enabled: try: - log.debug("Enabling attach ptvsd ...") + logger.debug("Enabling attach ptvsd ...") # # SEE https://github.com/microsoft/ptvsd#enabling-debugging # import ptvsd ptvsd.enable_attach( - address=("0.0.0.0", REMOTE_DEBUG_PORT), redirect_output=True + address=("0.0.0.0", REMOTE_DEBUG_PORT), # nosec ) # nosec except ImportError: raise ValueError( "Cannot enable remote debugging. Please install ptvsd first" ) - log.info("Remote debugging enabled: listening port %s", REMOTE_DEBUG_PORT) + logger.info("Remote debugging enabled: listening port %s", REMOTE_DEBUG_PORT) else: - log.debug("Booting without remote debugging since SC_BOOT_MODE=%s", boot_mode) + logger.debug( + "Booting without remote debugging since SC_BOOT_MODE=%s", boot_mode + ) __all__ = ["setup_remote_debugging"] diff --git a/services/catalog/src/simcore_service_catalog/store/__init__.py b/services/catalog/src/simcore_service_catalog/store/__init__.py deleted file mode 100644 index 848941974fd..00000000000 --- a/services/catalog/src/simcore_service_catalog/store/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" - Access to data stored in database -""" diff --git a/services/catalog/src/simcore_service_catalog/store/crud_dags.py b/services/catalog/src/simcore_service_catalog/store/crud_dags.py deleted file mode 100644 index a10815bb853..00000000000 --- a/services/catalog/src/simcore_service_catalog/store/crud_dags.py +++ /dev/null @@ -1,60 +0,0 @@ -import json -from typing import List, Optional - -import sqlalchemy as sa - -from .. import db, orm -from ..schemas import schemas_dags as schemas - - -async def list_dags(conn: db.SAConnection) -> List[schemas.DAGAtDB]: - dags = [] - async for row in conn.execute(orm.dags.select()): - if row: - dags.append(schemas.DAGAtDB(**row)) - return dags - - -async def get_dag(conn: db.SAConnection, dag_id: int) -> Optional[schemas.DAGAtDB]: - stmt = orm.dags.select().where(orm.dags.c.id == dag_id) - row: db.RowProxy = await (await conn.execute(stmt)).first() - if row: - return schemas.DAGAtDB(**row) - return None - - -async def create_dag(conn: db.SAConnection, dag: schemas.DAGIn): - stmt = orm.dags.insert().values( - workbench=json.dumps(dag.dict()["workbench"]), **dag.dict(exclude={"workbench"}) - ) - new_id: int = await (await conn.execute(stmt)).scalar() - return new_id - - -async def replace_dag(conn: db.SAConnection, dag_id: int, dag: schemas.DAGIn): - stmt = ( - orm.dags.update() - .values( - workbench=json.dumps(dag.dict()["workbench"]), - **dag.dict(exclude={"workbench"}) - ) - .where(orm.dags.c.id == dag_id) - ) - await conn.execute(stmt) - - -async def update_dag(conn: db.SAConnection, dag_id: int, dag: schemas.DAGIn): - patch = dag.dict(exclude_unset=True, exclude={"workbench"}) - if "workbench" in dag.__fields_set__: - patch["workbench"] = json.dumps(patch["workbench"]) - - stmt = sa.update(orm.dags).values(**patch).where(orm.dags.c.id == dag_id) - res = await conn.execute(stmt) - - # TODO: dev asserts - assert res.returns_rows == False # nosec - - -async def delete_dag(conn: db.SAConnection, dag_id: int): - stmt = sa.delete(orm.dags).where(orm.dags.c.id == dag_id) - await conn.execute(stmt) diff --git a/services/catalog/src/simcore_service_catalog/utils/helpers.py b/services/catalog/src/simcore_service_catalog/utils/helpers.py deleted file mode 100644 index 4d95766d28b..00000000000 --- a/services/catalog/src/simcore_service_catalog/utils/helpers.py +++ /dev/null @@ -1,2 +0,0 @@ -def cast_to_bool(value: str) -> bool: - return value.lower() in ["true", "1", "yes"] diff --git a/services/catalog/tests/unit/conftest.py b/services/catalog/tests/unit/conftest.py index 3c222784c79..c934ce49f28 100644 --- a/services/catalog/tests/unit/conftest.py +++ b/services/catalog/tests/unit/conftest.py @@ -11,11 +11,16 @@ import simcore_service_catalog +pytest_plugins = ["pytest_simcore.postgres_service2"] + current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent +## FOLDER LAYOUT ------ + + @pytest.fixture(scope="session") -def project_slug_dir(): +def project_slug_dir() -> Path: folder = current_dir.parent.parent assert folder.exists() assert any(folder.glob("src/simcore_service_catalog")) @@ -23,7 +28,7 @@ def project_slug_dir(): @pytest.fixture(scope="session") -def package_dir(): +def installed_package_dir(): dirpath = Path(simcore_service_catalog.__file__).resolve().parent assert dirpath.exists() return dirpath @@ -49,6 +54,9 @@ def api_specs_dir(osparc_simcore_root_dir): return specs_dir +# FAKE DATA ------ + + @pytest.fixture() def fake_data_dag_in() -> Dict: DAG_DATA_IN_DICT = { diff --git a/services/catalog/tests/unit/test_package.py b/services/catalog/tests/unit/test_package.py index 3c4e17d5671..11121fc8d4d 100644 --- a/services/catalog/tests/unit/test_package.py +++ b/services/catalog/tests/unit/test_package.py @@ -10,8 +10,6 @@ from pytest_simcore.helpers.utils_pylint import assert_pylint_is_passing -# from simcore_service_catalog.__main__ import main - @pytest.fixture def pylintrc(project_slug_dir, osparc_simcore_root_dir): @@ -22,25 +20,14 @@ def pylintrc(project_slug_dir, osparc_simcore_root_dir): return pylintrc -def test_run_pylint(pylintrc, package_dir): - assert_pylint_is_passing(pylintrc=pylintrc, package_dir=package_dir) - - -# FIXME: main entrypoint -# def test_main(here): # pylint: disable=unused-variable -# """ -# Checks cli in place -# """ -# with pytest.raises(SystemExit) as excinfo: -# main("--help".split()) -# -# assert excinfo.value.code == 0 +def test_run_pylint(pylintrc, installed_package_dir): + assert_pylint_is_passing(pylintrc=pylintrc, package_dir=installed_package_dir) -def test_no_pdbs_in_place(package_dir): +def test_no_pdbs_in_place(installed_package_dir): MATCH = re.compile(r"pdb.set_trace()") EXCLUDE = ["__pycache__", ".git"] - for root, dirs, files in os.walk(package_dir): + for root, dirs, files in os.walk(installed_package_dir): for name in files: if name.endswith(".py"): pypth = Path(root) / name diff --git a/services/catalog/tests/unit/test_schemas.py b/services/catalog/tests/unit/test_schemas.py index be56b2503be..0cfe82f8042 100644 --- a/services/catalog/tests/unit/test_schemas.py +++ b/services/catalog/tests/unit/test_schemas.py @@ -7,28 +7,16 @@ import pytest -from simcore_service_catalog.orm import DAG -from simcore_service_catalog.schemas import schemas_dags - -# from typing import Optional, TypeVar, Generic -# from pydantic import GenericModel, BaseModel - -# DataT = TypeVar('DataT') - -# class Error(BaseModel): -# code: int -# message: str - - -# class Envelope(GenericModel, Generic[DataT]): -# data: Optional[DataT] -# error: Optional[Error] +import simcore_postgres_database.models.direct_acyclic_graphs as orm +from simcore_service_catalog.db import tables +from simcore_service_catalog.models.domain.dag import DAGAtDB +from simcore_service_catalog.models.schemas.dag import DAGIn, DAGOut @pytest.mark.skip(reason="DEV") def test_dev(): - dag_in = schemas_dags.DAGIn( + dag_in = DAGIn( key="simcore/services/frontend/nodes-group/macros/", version="1.0.0", name="foo" ) assert "key" in dag_in.__fields_set__ @@ -44,22 +32,22 @@ def test_dev(): print(dag_in.dict(exclude_none=True)) -def test_api_in_2_orm(fake_data_dag_in): +def test_api_in_to_orm(fake_data_dag_in): # dag in to db - dag_in = schemas_dags.DAGIn(**fake_data_dag_in) + dag_in = DAGIn(**fake_data_dag_in) # TODO: create DAG.from_api( :DAGIn) # SEE crud_dags.create_dag - selection = set(DAG.__table__.columns.keys()).remove("workbench") - dag_orm = DAG( + selection = set(tables.dags.columns.keys()).remove("workbench") + dag_orm = orm.DAG( id=1, workbench=json.dumps(fake_data_dag_in["workbench"]), **dag_in.dict(include=selection, exclude={"workbench"}), ) -def test_orm_2_api_out(fake_data_dag_in): - dag_orm = DAG( +def test_orm_to_api_out(fake_data_dag_in): + dag_orm = orm.DAG( id=1, key="simcore/services/comp/foo", version="1.0.0", @@ -69,8 +57,8 @@ def test_orm_2_api_out(fake_data_dag_in): workbench=json.dumps(fake_data_dag_in["workbench"]), ) - dag_db = schemas_dags.DAGAtDB.from_orm(dag_orm) + dag_db = DAGAtDB.from_orm(dag_orm) assert type(dag_db.workbench) == dict - dag_out = schemas_dags.DAGOut(**dag_db.dict()) - assert dag_out.id == 1 + dag_out = DAGOut(**dag_db.dict()) + assert dag_out.id == 1 # pylint: disable=no-member diff --git a/services/catalog/tests/unit/with_dbs/conftest.py b/services/catalog/tests/unit/with_dbs/conftest.py index 58fa505758c..6c17c38c60a 100644 --- a/services/catalog/tests/unit/with_dbs/conftest.py +++ b/services/catalog/tests/unit/with_dbs/conftest.py @@ -2,73 +2,41 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name -import importlib -import os import sys from pathlib import Path +from typing import Dict import pytest -import sqlalchemy as sa +from fastapi import FastAPI +from starlette.testclient import TestClient -import simcore_service_catalog.config +from simcore_service_catalog.core.application import init_app current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent @pytest.fixture(scope="session") -def environ_context(): - keep = os.environ.copy() +def test_docker_compose_file() -> Path: + # OVERRIDES pytest_simcore.postgres_service2.test_docker_compose_file + return current_dir / "docker-compose.yml" - # config database - os.environ["POSTGRES_DB"] = "test" - os.environ["POSTGRES_USER"] = "test" - os.environ["POSTGRES_PASSWORD"] = "test" - os.environ["POSTGRES_HOST"] = "127.0.0.1" - os.environ["POSTGRES_PORT"] = "5432" - os.environ["POSTGRES_INIT_TABLES"] = "True" - os.environ["TESTING"] = "True" +@pytest.fixture +def app( + monkeypatch, + test_environment: Dict[str, str], # pytest_simcore.postgres_service2 + apply_migration, # pytest_simcore.postgres_service2 +) -> FastAPI: - # FIXME: dirty trick to update configuration with these environs! WARNING: might have side effects - importlib.reload(simcore_service_catalog.config) + # Emulates environ so settings can get config + for key, value in test_environment.items(): + monkeypatch.setenv(key, value) - yield + app = init_app() + yield app - os.environ = keep - -@pytest.fixture(scope="session") -def docker_compose_file(environ_context): - """ Overrides pytest-docker fixture """ - - # docker-compose reads these environs - file_path = current_dir / "docker-compose.yml" - assert file_path.exists() - - yield str(file_path) - - -def is_postgres_responsive(url: str): - """Check if something responds to ``url`` """ - try: - engine = sa.create_engine(url) - conn = engine.connect() - conn.close() - except sa.exc.OperationalError: - return False - return True - - -@pytest.fixture(scope="session") -def postgres_service(docker_services, docker_ip, environ_context): - - url = "postgresql://{e[POSTGRES_USER]}:{e[POSTGRES_PASSWORD]}@{e[POSTGRES_HOST]}:{e[POSTGRES_PORT]}/{e[POSTGRES_DB]}".format( - e=os.environ - ) - - # Wait until service is responsive. - docker_services.wait_until_responsive( - check=lambda: is_postgres_responsive(url), timeout=30.0, pause=0.1, - ) - - return url +@pytest.fixture +def client(app) -> TestClient: + with TestClient(app) as cli: + yield cli diff --git a/services/catalog/tests/unit/with_dbs/test_entrypoint_dags.py b/services/catalog/tests/unit/with_dbs/test_entrypoint_dags.py index 9a70345b589..948c14f0b58 100644 --- a/services/catalog/tests/unit/with_dbs/test_entrypoint_dags.py +++ b/services/catalog/tests/unit/with_dbs/test_entrypoint_dags.py @@ -4,25 +4,22 @@ from typing import List -import pytest -from starlette.testclient import TestClient - -# TODO: app is init globally ... which is bad! -from simcore_service_catalog.main import api_version, app - - -@pytest.fixture -def client(environ_context, postgres_service): - # TODO: create new web-app everyt - with TestClient(app) as cli: - yield cli +from simcore_service_catalog.__version__ import api_version +from simcore_service_catalog.models.schemas.meta import Meta def test_read_healthcheck(client): response = client.get("/") assert response.status_code == 200 - assert "api_version" in response.json() - assert response.json()["api_version"] == api_version + assert response.text == '":-)"' + + +def test_read_meta(client): + response = client.get("/v0/meta") + assert response.status_code == 200 + meta = Meta(**response.json()) + assert meta.version == api_version + assert meta.name == "simcore_service_catalog" def test_list_dags(client): diff --git a/services/director/Dockerfile b/services/director/Dockerfile index a0acda52b7e..cbc0627f1d7 100644 --- a/services/director/Dockerfile +++ b/services/director/Dockerfile @@ -1,5 +1,5 @@ ARG PYTHON_VERSION="3.6.10" -FROM python:${PYTHON_VERSION}-slim as base +FROM python:${PYTHON_VERSION}-slim-buster as base # # USAGE: # cd sercices/director @@ -72,7 +72,7 @@ RUN apt-get update &&\ RUN python -m venv ${VIRTUAL_ENV} RUN pip --no-cache-dir install --upgrade \ - pip~=20.0.2 \ + pip~=20.1.1 \ wheel \ setuptools diff --git a/services/docker-compose.devel.yml b/services/docker-compose.devel.yml index 7d5492cdb78..c4876264f0f 100644 --- a/services/docker-compose.devel.yml +++ b/services/docker-compose.devel.yml @@ -9,7 +9,8 @@ services: api-server: environment: - SC_BOOT_MODE=debug-ptvsd - - LOGLEVEL=debug + - LOG_LEVEL=debug + - DEBUG=true volumes: - ./api-server:/devel/services/api-server - ../packages:/devel/packages @@ -17,8 +18,8 @@ services: catalog: environment: - SC_BOOT_MODE=debug-ptvsd - - TESTING=true - - LOGLEVEL=debug + - LOG_LEVEL=debug + - DEBUG=true volumes: - ./catalog:/devel/services/catalog - ../packages:/devel/packages diff --git a/services/docker-compose.local.yml b/services/docker-compose.local.yml index b0b813ad346..33e07664f0c 100644 --- a/services/docker-compose.local.yml +++ b/services/docker-compose.local.yml @@ -18,8 +18,6 @@ services: - "3006:3000" catalog: - environment: - - SC_BOOT_MODE=${SC_BOOT_MODE:-default} ports: - "8005:8000" - "3005:3000" diff --git a/services/docker-compose.yml b/services/docker-compose.yml index f2da6db23c5..5ef9124a6d1 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -36,8 +36,7 @@ services: - POSTGRES_DB=${POSTGRES_DB} - POSTGRES_HOST=${POSTGRES_HOST} - POSTGRES_PORT=${POSTGRES_PORT} - - TESTING=false - - LOGLEVEL=${LOG_LEVEL:-WARNING} + - LOG_LEVEL=${LOG_LEVEL:-WARNING} depends_on: - postgres networks: diff --git a/services/sidecar/Dockerfile b/services/sidecar/Dockerfile index 50aec0e82bc..b74a30d7759 100644 --- a/services/sidecar/Dockerfile +++ b/services/sidecar/Dockerfile @@ -1,5 +1,5 @@ ARG PYTHON_VERSION="3.6.10" -FROM python:${PYTHON_VERSION}-slim as base +FROM python:${PYTHON_VERSION}-slim-buster as base # # USAGE: # cd sercices/sidecar @@ -61,7 +61,7 @@ RUN apt-get update &&\ RUN python -m venv ${VIRTUAL_ENV} RUN pip --no-cache-dir install --upgrade \ - pip~=20.0.2 \ + pip~=20.1.1 \ wheel \ setuptools diff --git a/services/storage/Dockerfile b/services/storage/Dockerfile index 512ff02b9e6..99dfd140d6c 100644 --- a/services/storage/Dockerfile +++ b/services/storage/Dockerfile @@ -56,7 +56,7 @@ RUN apk add --no-cache \ linux-headers RUN $SC_PIP install --upgrade \ - pip~=20.0.2 \ + pip~=20.1.1 \ wheel \ setuptools diff --git a/services/web/Dockerfile b/services/web/Dockerfile index 2ecae9ea643..e38b0474338 100644 --- a/services/web/Dockerfile +++ b/services/web/Dockerfile @@ -1,5 +1,5 @@ ARG PYTHON_VERSION="3.6.10" -FROM python:${PYTHON_VERSION}-slim as base +FROM python:${PYTHON_VERSION}-slim-buster as base # # USAGE: # cd sercices/web @@ -79,7 +79,7 @@ RUN apt-get update &&\ RUN python -m venv ${VIRTUAL_ENV} RUN pip --no-cache-dir install --upgrade \ - pip~=20.0.2 \ + pip~=20.1.1 \ wheel \ setuptools