diff --git a/api/specs/webserver/v0/components/schemas/my.yaml b/api/specs/webserver/v0/components/schemas/my.yaml index e92a41b2bf9..b9753cdbe9e 100644 --- a/api/specs/webserver/v0/components/schemas/my.yaml +++ b/api/specs/webserver/v0/components/schemas/my.yaml @@ -32,6 +32,13 @@ Token: service: 'github-api-v1' token_key: N1BP5ZSpB + +TokenId: + description: toke identifier + type: string + # format: uuid + + # enveloped and array versions -------------------------- ProfileEnveloped: @@ -70,3 +77,14 @@ TokensArrayEnveloped: error: nullable: true default: null + +TokenIdEnveloped: + type: object + required: + - data + properties: + data: + $ref: '#/TokenId' + error: + nullable: true + default: null diff --git a/api/specs/webserver/v0/openapi-user.yaml b/api/specs/webserver/v0/openapi-user.yaml index 62a0e36de14..99145fc0f80 100644 --- a/api/specs/webserver/v0/openapi-user.yaml +++ b/api/specs/webserver/v0/openapi-user.yaml @@ -44,6 +44,11 @@ paths: responses: '201': description: token created + content: + application/json: + schema: + $ref: './components/schemas/my.yaml#/TokenIdEnveloped' + default: $ref: './openapi.yaml#/components/responses/DefaultErrorResponse' diff --git a/packages/simcore-sdk/setup.py b/packages/simcore-sdk/setup.py index d2c7ab49432..0cd5d67f9d7 100644 --- a/packages/simcore-sdk/setup.py +++ b/packages/simcore-sdk/setup.py @@ -18,12 +18,14 @@ 'mock~=2.0', 'pylint~=2.0', 'pytest~=3.6', - 'pytest-asyncio~=0.9', 'pytest-cov~=2.5', 'pytest-docker~=0.6', 'requests~=2.19', 'docker~=3.5' ] +# NOTE: pytest-aiohttp and pytest-asyncio incompatible +# https://github.com/pytest-dev/pytest-asyncio/issues/76 + setup( name='simcore-sdk', diff --git a/services/director/tests/requirements.txt b/services/director/tests/requirements.txt index 2b3a3752844..c1a7e71f1ca 100644 --- a/services/director/tests/requirements.txt +++ b/services/director/tests/requirements.txt @@ -5,4 +5,6 @@ pytest-aiohttp pytest~=3.6 pytest-cov~=2.5 pytest-docker~=0.6 -pytest-asyncio~=0.9.0 + +# NOTE: pytest-aiohttp and pytest-asyncio incompatible +# https://github.com/pytest-dev/pytest-asyncio/issues/76 diff --git a/services/director/tests/test_handlers.py b/services/director/tests/test_handlers.py index 7e71c3edf4c..78f15fb10da 100644 --- a/services/director/tests/test_handlers.py +++ b/services/director/tests/test_handlers.py @@ -9,11 +9,11 @@ API_VERSIONS = resources.listdir(resources.RESOURCE_OPENAPI_ROOT) -@pytest.mark.asyncio + async def test_root_get(): fake_request = "fake request" web_response = await rest.handlers.root_get(fake_request) - assert web_response.content_type == "application/json" + assert web_response.content_type == "application/json" assert web_response.status == 200 healthcheck_enveloped = json.loads(web_response.text) assert "data" in healthcheck_enveloped @@ -30,18 +30,18 @@ def _check_services(created_services, services, schema_version="v1"): assert len(created_services) == len(services) created_service_descriptions = [x["service_description"] for x in created_services] - + json_schema_path = resources.get_path(resources.RESOURCE_NODE_SCHEMA) assert json_schema_path.exists() == True with json_schema_path.open() as file_pt: service_schema = json.load(file_pt) - for service in services: + for service in services: if schema_version == "v1": assert created_service_descriptions.count(service) == 1 json_schema_validator.validate_instance_object(service, service_schema) -@pytest.mark.asyncio + async def test_services_get(docker_registry, push_services): fake_request = "fake request" # no registry defined @@ -49,10 +49,10 @@ async def test_services_get(docker_registry, push_services): services_enveloped = await rest.handlers.services_get(fake_request) # wrong registry defined - config.REGISTRY_URL = "blahblah" + config.REGISTRY_URL = "blahblah" with pytest.raises(web_exceptions.HTTPInternalServerError, message="Expecting HTTP Internal Error as SSL is enabled by default"): services_enveloped = await rest.handlers.services_get(fake_request) - + # right registry defined config.REGISTRY_URL = docker_registry with pytest.raises(web_exceptions.HTTPInternalServerError, message="Expecting HTTP Internal Error as SSL is enabled by default"): @@ -103,7 +103,7 @@ async def test_services_get(docker_registry, push_services): services = services_enveloped["data"] assert len(services) == 2 -@pytest.mark.asyncio + async def test_v0_services_conversion_to_new(configure_registry_access, push_v0_schema_services): #pylint: disable=W0613, W0621 fake_request = "fake request" created_services = push_v0_schema_services(3,2) @@ -117,7 +117,7 @@ async def test_v0_services_conversion_to_new(configure_registry_access, push_v0_ # ensure old style services are not retrieved assert len(services) == 0 -@pytest.mark.asyncio + async def test_services_by_key_version_get(configure_registry_access, push_services): #pylint: disable=W0613, W0621 fake_request = "fake request" @@ -131,7 +131,7 @@ async def test_services_by_key_version_get(configure_registry_access, push_servi web_response = await rest.handlers.services_by_key_version_get(fake_request, "whatever", "ofwhateverversion") created_services = push_services(3,2) - assert len(created_services) == 5 + assert len(created_services) == 5 retrieved_services = [] for created_service in created_services: @@ -151,19 +151,19 @@ async def _start_get_stop_services(push_services, user_id): with pytest.raises(web_exceptions.HTTPInternalServerError, message="Expecting internal server error"): web_response = await rest.handlers.running_interactive_services_post(fake_request, None, None, None, None) - + with pytest.raises(web_exceptions.HTTPInternalServerError, message="Expecting internal server error"): web_response = await rest.handlers.running_interactive_services_post(fake_request, "None", None, None, None) with pytest.raises(web_exceptions.HTTPInternalServerError, message="Expecting internal server error"): web_response = await rest.handlers.running_interactive_services_post(fake_request, "None", "None", None, None) - + with pytest.raises(web_exceptions.HTTPNotFound, message="Expecting not found error"): web_response = await rest.handlers.running_interactive_services_post(fake_request, "None", "None", "None", None) with pytest.raises(web_exceptions.HTTPNotFound, message="Expecting not found error"): web_response = await rest.handlers.running_interactive_services_post(fake_request, "None", "None", "None", "ablah") - + with pytest.raises(web_exceptions.HTTPInternalServerError, message="Expecting internal server error"): web_response = await rest.handlers.running_interactive_services_get(fake_request, None) @@ -201,11 +201,11 @@ async def _start_get_stop_services(push_services, user_id): assert web_response.content_type == "application/json" assert web_response.text is None -@pytest.mark.asyncio + async def test_running_services_post_and_delete_no_swarm(configure_registry_access, push_services, user_id): #pylint: disable=W0613, W0621 with pytest.raises(web_exceptions.HTTPInternalServerError, message="Expecting internal error as there is no docker swarm"): await _start_get_stop_services(push_services, user_id) -@pytest.mark.asyncio + async def test_running_services_post_and_delete(configure_registry_access, push_services, docker_swarm, user_id): #pylint: disable=W0613, W0621 await _start_get_stop_services(push_services, user_id) diff --git a/services/director/tests/test_producer.py b/services/director/tests/test_producer.py index 8a60a8d0219..d47c6e4a3e0 100644 --- a/services/director/tests/test_producer.py +++ b/services/director/tests/test_producer.py @@ -8,12 +8,12 @@ ) @pytest.fixture -async def run_services(configure_registry_access, push_services, docker_swarm, user_id): #pylint: disable=W0613, W0621 +async def run_services(loop, configure_registry_access, push_services, docker_swarm, user_id): #pylint: disable=W0613, W0621 started_services = [] async def push_start_services(number_comp, number_dyn): pushed_services = push_services(number_comp,number_dyn, 60) assert len(pushed_services) == (number_comp + number_dyn) - for pushed_service in pushed_services: + for pushed_service in pushed_services: service_description = pushed_service["service_description"] service_key = service_description["key"] @@ -25,26 +25,27 @@ async def push_start_services(number_comp, number_dyn): started_service = await producer.start_service(user_id, service_key, service_version, service_uuid) assert "published_port" in started_service assert "entry_point" in started_service - assert "service_uuid" in started_service + assert "service_uuid" in started_service # should not throw await producer.get_service_details(service_uuid) started_services.append(started_service) return started_services + yield push_start_services #teardown stop the services - for service in started_services: + for service in started_services: service_uuid = service["service_uuid"] await producer.stop_service(service_uuid) with pytest.raises(exceptions.ServiceUUIDNotFoundError, message="expecting service uuid not found error"): await producer.get_service_details(service_uuid) -@pytest.mark.asyncio + async def test_start_stop_service(run_services): #pylint: disable=W0613, W0621 # standard test await run_services(1,1) -@pytest.mark.asyncio + async def test_service_assigned_env_variables(run_services, user_id): #pylint: disable=W0621 started_services = await run_services(1,1) client = docker.from_env() @@ -70,7 +71,7 @@ async def test_service_assigned_env_variables(run_services, user_id): #pylint: d assert "SIMCORE_USER_ID" in envs_dict assert envs_dict["SIMCORE_USER_ID"] == user_id -@pytest.mark.asyncio + async def test_interactive_service_published_port(run_services): #pylint: disable=W0621 running_dynamic_services = await run_services(0,1) assert len(running_dynamic_services) == 1 @@ -88,4 +89,4 @@ async def test_interactive_service_published_port(run_services): #pylint: disabl low_level_client = docker.APIClient() service_information = low_level_client.inspect_service(docker_service.id) service_published_port = service_information["Endpoint"]["Ports"][0]["PublishedPort"] - assert service_published_port == service_port \ No newline at end of file + assert service_published_port == service_port diff --git a/services/sidecar/tests/requirements.txt b/services/sidecar/tests/requirements.txt index 5d79da41910..87e9f61ace0 100644 --- a/services/sidecar/tests/requirements.txt +++ b/services/sidecar/tests/requirements.txt @@ -6,9 +6,10 @@ aiopg coveralls pytest pytest-aiohttp -pytest-asyncio pytest-cov pytest-docker openapi_spec_validator pyyaml virtualenv +# NOTE: pytest-aiohttp and pytest-asyncio incompatible +# https://github.com/pytest-dev/pytest-asyncio/issues/76 diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 9b72a40419d..84a7d4c551c 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -1,9 +1,9 @@ -# TODO: W0611:Unused import ... -# pylint: disable=W0611 -# TODO: W0613:Unused argument ... -# pylint: disable=W0613 -# -# pylint: disable=W0621 +# pylint:disable=wildcard-import +# pylint:disable=unused-import +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + import asyncio import os import subprocess @@ -26,24 +26,27 @@ SIMCORE_S3_STR) from utils import ACCESS_KEY, BUCKET_NAME, DATABASE, PASS, SECRET_KEY, USER -# fixtures ------------------------------------------------------- + @pytest.fixture(scope='session') def here(): return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + @pytest.fixture(scope='session') def package_dir(here): dirpath = Path(simcore_service_storage.__file__).parent assert dirpath.exists() return dirpath + @pytest.fixture(scope='session') def osparc_simcore_root_dir(here): root_dir = here.parent.parent.parent assert root_dir.exists(), "Is this service within osparc-simcore repo?" return root_dir + @pytest.fixture(scope='session') def python27_exec(osparc_simcore_root_dir, tmpdir_factory, here): # Assumes already created with make .venv27 @@ -52,16 +55,18 @@ def python27_exec(osparc_simcore_root_dir, tmpdir_factory, here): if not venv27.exists(): # create its own virtualenv venv27 = tmpdir_factory.mktemp("virtualenv") / ".venv27" - cmd = "virtualenv --python=python2 %s"%(venv27) # TODO: how to split in command safely? - assert subprocess.check_call(cmd.split()) == 0, "Unable to run %s" %cmd + # TODO: how to split in command safely? + cmd = "virtualenv --python=python2 %s" % (venv27) + assert subprocess.check_call( + cmd.split()) == 0, "Unable to run %s" % cmd # installs python2 requirements pip_exec = venv27 / "bin" / "pip" assert pip_exec.exists() requirements_py2 = here.parent / "requirements/py27.txt" cmd = "{} install -r {}".format(pip_exec, requirements_py2) - assert subprocess.check_call(cmd.split()) == 0, "Unable to run %s" %cmd - + assert subprocess.check_call( + cmd.split()) == 0, "Unable to run %s" % cmd python27_exec = venv27 / "bin" / "python2.7" assert python27_exec.exists() @@ -73,6 +78,7 @@ def python27_path(python27_exec): return Path(python27_exec).parent.parent # Assumes already created with make .venv27 + @pytest.fixture(scope='session') def docker_compose_file(here): """ Overrides pytest-docker fixture @@ -80,12 +86,12 @@ def docker_compose_file(here): old = os.environ.copy() # docker-compose reads these environs - os.environ['POSTGRES_DB']=DATABASE - os.environ['POSTGRES_USER']=USER - os.environ['POSTGRES_PASSWORD']=PASS - os.environ['POSTGRES_ENDPOINT']="FOO" # TODO: update config schema!! - os.environ['MINIO_ACCESS_KEY']=ACCESS_KEY - os.environ['MINIO_SECRET_KEY']=SECRET_KEY + os.environ['POSTGRES_DB'] = DATABASE + os.environ['POSTGRES_USER'] = USER + os.environ['POSTGRES_PASSWORD'] = PASS + os.environ['POSTGRES_ENDPOINT'] = "FOO" # TODO: update config schema!! + os.environ['MINIO_ACCESS_KEY'] = ACCESS_KEY + os.environ['MINIO_SECRET_KEY'] = SECRET_KEY dc_path = here / 'docker-compose.yml' @@ -94,12 +100,13 @@ def docker_compose_file(here): os.environ = old + @pytest.fixture(scope='session') def postgres_service(docker_services, docker_ip): url = 'postgresql://{user}:{password}@{host}:{port}/{database}'.format( - user = USER, - password = PASS, - database = DATABASE, + user=USER, + password=PASS, + database=DATABASE, host=docker_ip, port=docker_services.port_for('postgres', 5432), ) @@ -112,27 +119,29 @@ def postgres_service(docker_services, docker_ip): ) postgres_service = { - 'user' : USER, - 'password' : PASS, - 'database' : DATABASE, - 'host' : docker_ip, - 'port' : docker_services.port_for('postgres', 5432) + 'user': USER, + 'password': PASS, + 'database': DATABASE, + 'host': docker_ip, + 'port': docker_services.port_for('postgres', 5432) } return postgres_service + @pytest.fixture(scope='session') def postgres_service_url(postgres_service, docker_services, docker_ip): postgres_service_url = 'postgresql://{user}:{password}@{host}:{port}/{database}'.format( - user = USER, - password = PASS, - database = DATABASE, + user=USER, + password=PASS, + database=DATABASE, host=docker_ip, port=docker_services.port_for('postgres', 5432), ) return postgres_service_url + @pytest.fixture(scope='function') async def postgres_engine(loop, postgres_service_url): postgres_engine = await create_engine(postgres_service_url) @@ -163,24 +172,28 @@ def minio_service(docker_services, docker_ip): return { 'endpoint': '{ip}:{port}'.format(ip=docker_ip, port=docker_services.port_for('minio', 9000)), 'access_key': ACCESS_KEY, - 'secret_key' : SECRET_KEY, - 'bucket_name' : BUCKET_NAME, - } + 'secret_key': SECRET_KEY, + 'bucket_name': BUCKET_NAME, + } + @pytest.fixture(scope="module") def s3_client(minio_service): from s3wrapper.s3_client import S3Client - s3_client = S3Client(endpoint=minio_service['endpoint'],access_key=minio_service["access_key"], secret_key=minio_service["secret_key"]) + s3_client = S3Client( + endpoint=minio_service['endpoint'], access_key=minio_service["access_key"], secret_key=minio_service["secret_key"]) return s3_client + @pytest.fixture(scope="function") def mock_files_factory(tmpdir_factory): def _create_files(count): filepaths = [] for _i in range(count): name = str(uuid.uuid4()) - filepath = os.path.normpath(str(tmpdir_factory.mktemp('data').join(name + ".txt"))) + filepath = os.path.normpath( + str(tmpdir_factory.mktemp('data').join(name + ".txt"))) with open(filepath, 'w') as fout: fout.write("Hello world\n") filepaths.append(filepath) @@ -198,10 +211,11 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory): bucket_name = BUCKET_NAME s3_client.create_bucket(bucket_name, delete_contents_if_exists=True) - #TODO: use pip install Faker - users = [ 'alice', 'bob', 'chuck', 'dennis'] + # TODO: use pip install Faker + users = ['alice', 'bob', 'chuck', 'dennis'] - projects = ['astronomy', 'biology', 'chemistry', 'dermatology', 'economics', 'futurology', 'geology'] + projects = ['astronomy', 'biology', 'chemistry', + 'dermatology', 'economics', 'futurology', 'geology'] location = SIMCORE_S3_STR nodes = ['alpha', 'beta', 'gamma', 'delta'] @@ -214,41 +228,43 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory): idx = randrange(len(users)) user_name = users[idx] user_id = idx + 10 - idx = randrange(len(projects)) + idx = randrange(len(projects)) project_name = projects[idx] project_id = idx + 100 - idx = randrange(len(nodes)) + idx = randrange(len(nodes)) node = nodes[idx] node_id = idx + 10000 file_name = str(counter) - object_name = Path(str(project_id), str(node_id), str(counter)).as_posix() + object_name = Path(str(project_id), str( + node_id), str(counter)).as_posix() file_uuid = Path(object_name).as_posix() assert s3_client.upload_file(bucket_name, object_name, _file) - d = { 'file_uuid' : file_uuid, - 'location_id' : "0", - 'location' : location, - 'bucket_name' : bucket_name, - 'object_name' : object_name, - 'project_id' : str(project_id), - 'project_name' : project_name, - 'node_id' : str(node_id), - 'node_name' : node, - 'file_name' : file_name, - 'user_id' : str(user_id), - 'user_name' : user_name - } + d = {'file_uuid': file_uuid, + 'location_id': "0", + 'location': location, + 'bucket_name': bucket_name, + 'object_name': object_name, + 'project_id': str(project_id), + 'project_name': project_name, + 'node_id': str(node_id), + 'node_name': node, + 'file_name': file_name, + 'user_id': str(user_id), + 'user_name': user_name + } counter = counter + 1 data[object_name] = FileMetaData(**d) - utils.insert_metadata(postgres_service_url, data[object_name]) #pylint: disable=no-member - + # pylint: disable=no-member + utils.insert_metadata(postgres_service_url, + data[object_name]) total_count = 0 - for _obj in s3_client.list_objects_v2(bucket_name, recursive = True): + for _obj in s3_client.list_objects_v2(bucket_name, recursive=True): total_count = total_count + 1 assert total_count == N @@ -260,10 +276,6 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory): # db utils.drop_tables(url=postgres_service_url) -# This is weird, somehow the default loop gives problems with pytest asyncio, so lets override it -@pytest.fixture -def loop(event_loop): - return event_loop @pytest.fixture(scope="function") async def datcore_testbucket(loop, python27_exec, mock_files_factory): @@ -282,19 +294,20 @@ async def datcore_testbucket(loop, python27_exec, mock_files_factory): ready = False counter = 0 - while not ready and counter<5: + while not ready and counter < 5: data = await dcw.list_files() ready = len(data) == 2 await asyncio.sleep(10) counter = counter + 1 - yield BUCKET_NAME await dcw.delete_test_dataset(BUCKET_NAME) + @pytest.fixture(scope="function") def dsm_fixture(s3_client, python27_exec, postgres_engine, loop): pool = ThreadPoolExecutor(3) - dsm_fixture = DataStorageManager(s3_client, python27_exec, postgres_engine, loop, pool, BUCKET_NAME) + dsm_fixture = DataStorageManager( + s3_client, python27_exec, postgres_engine, loop, pool, BUCKET_NAME) return dsm_fixture diff --git a/services/storage/tests/requirements.txt b/services/storage/tests/requirements.txt index 8f86b464b12..6161e06b3ea 100644 --- a/services/storage/tests/requirements.txt +++ b/services/storage/tests/requirements.txt @@ -5,9 +5,11 @@ coveralls pytest pytest-aiohttp -pytest-asyncio pytest-cov pytest-docker openapi_spec_validator pyyaml virtualenv + +# NOTE: pytest-aiohttp and pytest-asyncio incompatible +# https://github.com/pytest-dev/pytest-asyncio/issues/76 diff --git a/services/storage/tests/test_dsm.py b/services/storage/tests/test_dsm.py index 15b168854be..7194e477b0c 100644 --- a/services/storage/tests/test_dsm.py +++ b/services/storage/tests/test_dsm.py @@ -1,7 +1,9 @@ -# TODO: W0611:Unused import ... -# pylint: disable=W0611 -# TODO: W0613:Unused argument ... -# pylint: disable=W0613 +# pylint:disable=wildcard-import +# pylint:disable=unused-import +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name +# pylint: disable=too-many-arguments import filecmp import io @@ -10,9 +12,8 @@ import pdb import urllib import uuid -from pprint import pprint - from pathlib import Path +from pprint import pprint import attr import pytest @@ -20,7 +21,8 @@ import utils from simcore_service_storage.dsm import DataStorageManager from simcore_service_storage.models import FileMetaData -from simcore_service_storage.s3 import DATCORE_STR, SIMCORE_S3_STR, SIMCORE_S3_ID +from simcore_service_storage.s3 import (DATCORE_STR, SIMCORE_S3_ID, + SIMCORE_S3_STR) from utils import BUCKET_NAME @@ -176,7 +178,6 @@ async def test_copy_s3_s3(postgres_service_url, s3_client, mock_files_factory, d assert len(data) == 2 #NOTE: Below tests directly access the datcore platform, use with care! - @pytest.mark.travis def test_datcore_fixture(datcore_testbucket): print(datcore_testbucket) @@ -198,8 +199,6 @@ async def test_dsm_datcore(postgres_service_url, dsm_fixture, datcore_testbucket data = await dsm.list_files(user_id=user_id, location=DATCORE_STR) assert len(data) == 1 -# pylint: disable=R0913 -# Too many arguments @pytest.mark.travis async def test_dsm_s3_to_datcore(postgres_service_url, s3_client, mock_files_factory, dsm_fixture, datcore_testbucket): utils.create_tables(url=postgres_service_url) @@ -230,8 +229,6 @@ async def test_dsm_s3_to_datcore(postgres_service_url, s3_client, mock_files_fac # there should now be 3 files assert len(data) == 3 -# pylint: disable=R0913 -# Too many arguments @pytest.mark.travis async def test_dsm_datcore_to_local(postgres_service_url, dsm_fixture, mock_files_factory, datcore_testbucket): utils.create_tables(url=postgres_service_url) @@ -250,8 +247,6 @@ async def test_dsm_datcore_to_local(postgres_service_url, dsm_fixture, mock_file assert filecmp.cmp(tmp_file2, tmp_file) -# pylint: disable=R0913 -# Too many arguments @pytest.mark.travis async def test_dsm_datcore_to_S3(postgres_service_url, s3_client, dsm_fixture, mock_files_factory, datcore_testbucket): utils.create_tables(url=postgres_service_url) @@ -287,10 +282,6 @@ async def test_dsm_datcore_to_S3(postgres_service_url, s3_client, dsm_fixture, m assert filecmp.cmp(tmp_file1, tmp_file2) - - -# pylint: disable=R0913 -# Too many arguments @pytest.mark.travis async def test_copy_datcore(postgres_service_url, s3_client, dsm_fixture, mock_files_factory, datcore_testbucket): utils.create_tables(url=postgres_service_url) diff --git a/services/web/client/source/class/qxapp/Preferences.js b/services/web/client/source/class/qxapp/Preferences.js index f230ff68c37..fce3f9abcf4 100644 --- a/services/web/client/source/class/qxapp/Preferences.js +++ b/services/web/client/source/class/qxapp/Preferences.js @@ -1,11 +1,14 @@ /* eslint no-warning-comments: "off" */ - qx.Class.define("qxapp.Preferences", { extend: qx.ui.window.Window, construct: function() { this.base(arguments, this.tr("Account Settings")); + this.__tokenResources = qxapp.io.rest.ResourceFactory.getInstance().createTokenResources(); + // this.__tokenResources.token + // this.__tokenResources.tokens + // window // TODO: fix-sized modal preference window this.set({ @@ -36,7 +39,8 @@ qx.Class.define("qxapp.Preferences", { }, members: { - _data: null, + __tokenResources: null, + __tokensList: null, __createPage: function(name, iconSrc = null) { let page = new qx.ui.tabview.Page(name, iconSrc); @@ -46,7 +50,7 @@ qx.Class.define("qxapp.Preferences", { })); // title - page.add(new qx.ui.basic.Label("

" + name + " Settings

").set({ + page.add(new qx.ui.basic.Label("

" + name + " " + this.tr("Settings") + "

").set({ rich: true })); @@ -56,18 +60,19 @@ qx.Class.define("qxapp.Preferences", { }, __getGeneral: function() { - const iconUrl = qxapp.dev.Placeholders.getIcon("ion-ios-settings", 32); - let page = this.__createPage("General", iconUrl); + const iconUrl = "@FontAwesome5Solid/sliders-h/24"; + let page = this.__createPage(this.tr("General"), iconUrl); const userEmail = qxapp.auth.Data.getInstance().getEmail(); + let form = new qx.ui.form.Form(); // content let username = new qx.ui.form.TextField().set({ value: userEmail.split("@")[0], placeholder: "User Name", readOnly: true }); - page.add(username); + form.add(username, "Username"); // let fullname = new qx.ui.form.TextField().set({ // placeholder: "Full Name" @@ -80,7 +85,9 @@ qx.Class.define("qxapp.Preferences", { placeholder: "Email", readOnly: true }); - page.add(email); + form.add(email, this.tr("Email")); + + page.add(new qx.ui.form.renderer.Single(form)); let img = new qx.ui.basic.Image().set({ source: qxapp.utils.Avatar.getUrl(email.getValue(), 200) @@ -91,34 +98,138 @@ qx.Class.define("qxapp.Preferences", { }, __getSecurity: function() { - const iconUrl = qxapp.dev.Placeholders.getIcon("fa-lock", 32); - let page = this.__createPage("Security", iconUrl); + const iconUrl = "@FontAwesome5Solid/shield-alt/24"; + let page = this.__createPage(this.tr("Security"), iconUrl); - // content - // page.add(new qx.ui.form.PasswordField().set({ - // placeholder: "Password" - // })); + const title14Font = qx.bom.Font.fromConfig(qxapp.theme.Font.fonts["title-14"]); + page.add(new qx.ui.basic.Label("API Tokens").set({ + font: title14Font + })); - // page.add(new qx.ui.form.PasswordField().set({ - // placeholder: "Re-type Password" - // })); + this.__tokensList = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); + page.add(this.__tokensList); - page.add(new qx.ui.basic.Atom("

DAT-CORE

").set({ - rich: true - })); + this.__reloadTokens(); + + return page; + }, - let tokens = new qx.ui.form.PasswordField(); - tokens.set({ - placeholder: "Personal Access Token" + __reloadTokens: function() { + this.__tokensList.removeAll(); + + let tokens = this.__tokenResources.tokens; + tokens.addListenerOnce("getSuccess", e => { + let tokensList = e.getRequest().getResponse().data; + if (tokensList.length === 0) { + let emptyForm = this.__getEmptyTokenForm(); + this.__tokensList.add(new qx.ui.form.renderer.Single(emptyForm)); + } else { + for (let i=0; i { + console.log(e); }); - page.add(tokens); + tokens.get(); + }, - return page; + __getEmptyTokenForm: function() { + let form = new qx.ui.form.Form(); + + // FIXME: for the moment this is fixed since it has to be a unique id + let newTokenService = new qx.ui.form.TextField(); + newTokenService.set({ + value: "blackfynn-datcore", + readOnly: true + }); + form.add(newTokenService, this.tr("Service")); + + // TODO: + let newTokenKey = new qx.ui.form.TextField(); + newTokenKey.set({ + placeholder: "introduce token key here" + }); + form.add(newTokenKey, this.tr("Key")); + + let newTokenSecret = new qx.ui.form.TextField(); + newTokenSecret.set({ + placeholder: "introduce token secret here" + }); + form.add(newTokenSecret, this.tr("Secret")); + + let addTokenBtn = new qx.ui.form.Button(this.tr("Add")); + addTokenBtn.setWidth(100); + addTokenBtn.addListener("execute", e => { + let tokens = this.__tokenResources.tokens; + tokens.addListenerOnce("postSuccess", ev => { + this.__reloadTokens(); + }, this); + tokens.addListenerOnce("getError", ev => { + console.log(ev); + }); + const newTokenInfo = { + "service": newTokenService.getValue(), + "token_key": newTokenKey.getValue(), + "token_secret": newTokenSecret.getValue() + }; + tokens.post(null, newTokenInfo); + }, this); + form.addButton(addTokenBtn); + + return form; + }, + + __getValidTokenForm: function(service, key, secret) { + let form = new qx.ui.form.Form(); + + let tokenService = new qx.ui.form.TextField().set({ + value: service, + readOnly: true + }); + form.add(tokenService, this.tr("Service API")); + + let tokenKey = new qx.ui.form.TextField(); + tokenKey.set({ + value: key, + readOnly: true + }); + form.add(tokenKey, this.tr("Key")); + + if (secret) { + let tokenSecret = new qx.ui.form.TextField(); + tokenSecret.set({ + value: secret, + readOnly: true + }); + form.add(tokenSecret, this.tr("Secret")); + } + + let delTokenBtn = new qx.ui.form.Button(this.tr("Delete")); + delTokenBtn.setWidth(100); + delTokenBtn.addListener("execute", e => { + let token = this.__tokenResources.token; + token.addListenerOnce("delSuccess", eve => { + this.__reloadTokens(); + }, this); + token.addListenerOnce("delError", eve => { + console.log(eve); + }); + token.del({ + "service": service + }); + }, this); + form.addButton(delTokenBtn); + + return form; }, __getDisplay: function() { - const iconUrl = qxapp.dev.Placeholders.getIcon("fa-eye", 32); - let page = this.__createPage("Display", iconUrl); + const iconUrl = "@FontAwesome5Solid/eye/24"; + let page = this.__createPage(this.tr("Display"), iconUrl); let themes = qx.Theme.getAll(); let select = new qx.ui.form.SelectBox("Theme"); @@ -151,8 +262,8 @@ qx.Class.define("qxapp.Preferences", { }, __getAdvanced: function() { - const iconUrl = qxapp.dev.Placeholders.getIcon("fa-rebel", 32); - let page = this.__createPage("Advanced", iconUrl); + const iconUrl = "@FontAwesome5Solid/flask/24"; + let page = this.__createPage(this.tr("Experimental"), iconUrl); return page; } diff --git a/services/web/client/source/class/qxapp/io/rest/ResourceFactory.js b/services/web/client/source/class/qxapp/io/rest/ResourceFactory.js index f2f8954d607..7336cd82fbe 100644 --- a/services/web/client/source/class/qxapp/io/rest/ResourceFactory.js +++ b/services/web/client/source/class/qxapp/io/rest/ResourceFactory.js @@ -64,6 +64,53 @@ qx.Class.define("qxapp.io.rest.ResourceFactory", { "projects": projects, "templates": templates }; + }, + + createTokenResources: function() { + // SEE: https://www.qooxdoo.org/current/pages/communication/rest.html + // SEE: api/specs/webserver/v0/openapi-user.yaml + const basePath = qxapp.io.rest.ResourceFactory.API; + + // Singular resource + let token = new qxapp.io.rest.Resource({ + // Get token + get: { + method: "GET", + url: basePath+"/my/tokens/{service}" + }, + + // Update token + put: { + method: "PUT", + url: basePath+"/my/tokens/{service}" + }, + + // Delete token + del: { + method: "DELETE", + url: basePath+"/my/tokens/{service}" + } + }); + + // Plural resource + var tokens = new qxapp.io.rest.Resource({ + // Retrieve tokens + get: { + method: "GET", + url: basePath+"/my/tokens" + }, + + // Create token + post: { + method: "POST", + url: basePath+"/my/tokens" + } + }); + + return { + "token": token, + "tokens": tokens + }; } } // members diff --git a/services/web/server/src/simcore_service_webserver/application.py b/services/web/server/src/simcore_service_webserver/application.py index 6cca377c982..16bf06540f0 100644 --- a/services/web/server/src/simcore_service_webserver/application.py +++ b/services/web/server/src/simcore_service_webserver/application.py @@ -21,6 +21,7 @@ from .statics import setup_statics from .storage import setup_storage from .projects import setup_projects +from .users import setup_users log = logging.getLogger(__name__) @@ -53,6 +54,7 @@ def create_application(config: dict): setup_director(app) setup_s3(app) setup_storage(app) + setup_users(app) setup_projects(app, debug=True) # TODO: deactivate fakes i.e. debug=testing return app diff --git a/services/web/server/src/simcore_service_webserver/login/utils.py b/services/web/server/src/simcore_service_webserver/login/utils.py index dea99cd020c..492884cbe0f 100644 --- a/services/web/server/src/simcore_service_webserver/login/utils.py +++ b/services/web/server/src/simcore_service_webserver/login/utils.py @@ -18,7 +18,10 @@ def encrypt_password(password): - return passlib.hash.sha256_crypt.encrypt(password, rounds=1000) + #TODO: add settings sha256_crypt.using(**settings).hash(secret) + # see https://passlib.readthedocs.io/en/stable/lib/passlib.hash.sha256_crypt.html + # + return passlib.hash.sha256_crypt.using(rounds=1000).hash(password) def check_password(password, password_hash): diff --git a/services/web/server/src/simcore_service_webserver/users.py b/services/web/server/src/simcore_service_webserver/users.py new file mode 100644 index 00000000000..3e8ee21f318 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/users.py @@ -0,0 +1,42 @@ +""" users management subsystem + + +""" + +import logging + +from aiohttp import web + +from servicelib.application_keys import APP_CONFIG_KEY +from servicelib.rest_routing import iter_path_operations, map_handlers_with_operations, get_handlers_from_namespace + +from . import users_handlers +from .rest_config import APP_OPENAPI_SPECS_KEY + +CONFIG_SECTION_NAME = "users" + + +logger = logging.getLogger(__name__) + +def setup(app: web.Application, *, debug=False): + logger.debug("Setting up %s %s...", __name__, "[debug]" if debug else "") + + assert CONFIG_SECTION_NAME not in app[APP_CONFIG_KEY], "Not section for the moment" + + # routes + specs = app[APP_OPENAPI_SPECS_KEY] + + routes = map_handlers_with_operations( + get_handlers_from_namespace(users_handlers), + filter(lambda o: "/my" in o[1], iter_path_operations(specs)), + strict=True + ) + app.router.add_routes(routes) + + +# alias +setup_users = setup + +__all__ = ( + 'setup_users' +) diff --git a/services/web/server/src/simcore_service_webserver/users_handlers.py b/services/web/server/src/simcore_service_webserver/users_handlers.py new file mode 100644 index 00000000000..c643ebfbaca --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/users_handlers.py @@ -0,0 +1,134 @@ +# pylint: disable=no-value-for-parameter + +import json +import logging + +import sqlalchemy as sa +import sqlalchemy.sql as sql +from aiohttp import web + +from servicelib.application_keys import APP_DB_ENGINE_KEY + +from .db_models import tokens, users +from .login.decorators import RQT_USERID_KEY, login_required +from .utils import gravatar_hash + +logger = logging.getLogger(__name__) + + +# my/ ----------------------------------------------------------- +@login_required +async def get_my_profile(request: web.Request): + uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] + + async with engine.acquire() as conn: + stmt = sa.select([users.c.email]).where(users.c.id == uid) + email = await conn.scalar(stmt) + + return { + 'login': email, + 'gravatar_id': gravatar_hash(email) + } + + +# my/tokens/ ------------------------------------------------------ +@login_required +async def create_tokens(request: web.Request): + uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] + + # TODO: validate + body = await request.json() + + # TODO: what it service exists already!? + # TODO: if service already, then IntegrityError is raised! How to deal with db exceptions?? + async with engine.acquire() as conn: + stmt = tokens.insert().values( + user_id=uid, + token_service=body['service'], + token_data=body) + result = await conn.execute(stmt) + row = await result.first() + + raise web.HTTPCreated(text=json.dumps({'data': row['token_id']}), + content_type="application/json") + + +@login_required +async def list_tokens(request: web.Request): + # TODO: start = request.match_info.get('start', 0) + # TODO: count = request.match_info.get('count', None) + uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] + + user_tokens = [] + async with engine.acquire() as conn: + query = (sa.select([tokens.c.token_data]) + .where(tokens.c.user_id == uid) + ) + async for row in conn.execute(query): + user_tokens.append(row["token_data"]) + + return user_tokens + + +@login_required +async def get_token(request: web.Request): + uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] + service_id = request.match_info['service'] + + async with engine.acquire() as conn: + query = (sa.select([tokens.c.token_data]) + .where(sql.and_( + tokens.c.user_id == uid, + tokens.c.token_service == service_id) ) + ) + result = await conn.execute(query) + row = await result.first() + return row["token_data"] + + +@login_required +async def update_token(request: web.Request): + """ updates token_data of a given user service + + WARNING: token_data has to be complete! + """ + uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] + service_id = request.match_info['service'] + + # TODO: validate + body = await request.json() + + # TODO: optimize to a single call? + async with engine.acquire() as conn: + query = (sa.select([tokens.c.token_data, tokens.c.token_id]) + .where(sql.and_( + tokens.c.user_id == uid, + tokens.c.token_service == service_id) ) + ) + result = await conn.execute(query) + row = await result.first() + + data = dict(row["token_data"]) + tid = row["token_id"] + data.update(body) + + query = (tokens.update() + .where(tokens.c.token_id == tid ) + .values(token_data=data) + ) + resp = await conn.execute(query) + assert resp.rowcount == 1 + + +@login_required +async def delete_token(request: web.Request): + uid, engine = request[RQT_USERID_KEY], request.app[APP_DB_ENGINE_KEY] + service_id = request.match_info.get('service') + + async with engine.acquire() as conn: + query = tokens.delete().where(sql.and_(tokens.c.user_id == uid, + tokens.c.token_service == service_id) + ) + await conn.execute(query) + + raise web.HTTPNoContent(content_type='application/json') diff --git a/services/web/server/src/simcore_service_webserver/utils.py b/services/web/server/src/simcore_service_webserver/utils.py index 500ea24366a..0dcbaaf8fcd 100644 --- a/services/web/server/src/simcore_service_webserver/utils.py +++ b/services/web/server/src/simcore_service_webserver/utils.py @@ -1,14 +1,14 @@ """ General utilities and helper functions """ +import hashlib import os import sys from pathlib import Path - from typing import Iterable, List from aiohttp.web import HTTPFound - +from yarl import URL CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent @@ -81,6 +81,13 @@ def redirect(*args, **kwargs): raise HTTPFound(*args, **kwargs) +def gravatar_hash(email): + return hashlib.md5(email.lower().encode('utf-8')).hexdigest() + +def gravatar_url(gravatarhash, size=100, default='identicon', rating='g') -> URL: + url = URL('https://secure.gravatar.com/avatar/%s' % gravatarhash) + return url.with_query(s=size, d=default, r=rating) + __all__ = ( 'redirect', ) diff --git a/services/web/server/tests/login/conftest.py b/services/web/server/tests/login/conftest.py index 532d95c173c..0d46b34067e 100644 --- a/services/web/server/tests/login/conftest.py +++ b/services/web/server/tests/login/conftest.py @@ -1,3 +1,7 @@ +""" Tests functionality that requires login users + + +""" # pylint:disable=wildcard-import # pylint:disable=unused-import # pylint:disable=unused-variable diff --git a/services/web/server/tests/login/test_users.py b/services/web/server/tests/login/test_users.py new file mode 100644 index 00000000000..8e616b1b6ec --- /dev/null +++ b/services/web/server/tests/login/test_users.py @@ -0,0 +1,230 @@ +# pylint:disable=wildcard-import +# pylint:disable=unused-import +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + +import collections +import random +from itertools import repeat + +import faker +import pytest +from aiohttp import web +from yarl import URL + +from servicelib.application_keys import APP_CONFIG_KEY +from servicelib.rest_responses import unwrap_envelope +from simcore_service_webserver.db import APP_DB_ENGINE_KEY, setup_db +from simcore_service_webserver.login import setup_login +from simcore_service_webserver.rest import APP_OPENAPI_SPECS_KEY, setup_rest +from simcore_service_webserver.security import setup_security +from simcore_service_webserver.session import setup_session +from simcore_service_webserver.users import setup_users +from utils_assert import assert_status +from utils_login import LoggedUser +from utils_tokens import (create_token_in_db, delete_all_tokens_from_db, + get_token_from_db) + +API_VERSION = "v0" + + +@pytest.fixture +def client(loop, aiohttp_client, aiohttp_unused_port, app_cfg, postgres_service): + app = web.Application() + port = app_cfg["main"]["port"] = aiohttp_unused_port() + + assert app_cfg["rest"]["version"] == API_VERSION + assert API_VERSION in app_cfg["rest"]["location"] + + app_cfg["db"]["init_tables"] = True # inits postgres_service + + # fake config + app[APP_CONFIG_KEY] = app_cfg + + setup_db(app) + setup_session(app) + setup_security(app) + setup_rest(app, debug=True) + setup_login(app) + setup_users(app) + + client = loop.run_until_complete(aiohttp_client(app, server_kwargs={ + 'port': port, + 'host': 'localhost' + })) + return client + + +# WARNING: pytest-asyncio and pytest-aiohttp are not compatible +# +# https://github.com/aio-libs/pytest-aiohttp/issues/8#issuecomment-405602020 +# https://github.com/pytest-dev/pytest-asyncio/issues/76 +# + +@pytest.fixture +async def logged_user(client): + """ adds a user in db and logs in with client """ + async with LoggedUser(client) as user: + yield user + + +@pytest.fixture +async def tokens_db(logged_user, client): + engine = client.app[APP_DB_ENGINE_KEY] + yield engine + await delete_all_tokens_from_db(engine) + + +@pytest.fixture +async def fake_tokens(logged_user, tokens_db): + # pylint: disable=E1101 + from faker.providers import lorem, misc + + fake = faker.Factory.create() + fake.seed(4567) # Always the same fakes + fake.add_provider(lorem) + fake.add_provider(lorem) + + all_tokens = [] + + # TODO: automatically create data from oas! + # See api/specs/webserver/v0/components/schemas/my.yaml + for _ in repeat(None, 5): + # TODO: add tokens from other users + data = { + 'service': fake.word(ext_word_list=None), + 'token_key': fake.md5(raw_output=False), + 'token_secret': fake.md5(raw_output=False) + } + row = await create_token_in_db( tokens_db, + user_id = logged_user['id'], + token_service = data['service'], + token_data = data + ) + all_tokens.append(data) + return all_tokens + + + + +PREFIX = "/" + API_VERSION + "/my" + +# test R on profile ---------------------------------------------------- +async def test_get_profile(logged_user, client): + url = client.app.router["get_my_profile"].url_for() + assert str(url) == "/v0/my" + + resp = await client.get(url) + payload = await resp.json() + assert resp.status == 200, payload + + data, error = unwrap_envelope(payload) + assert not error + assert data + + assert data['login'] == logged_user["email"] + assert data['gravatar_id'] + + +# Test CRUD on tokens -------------------------------------------- +RESOURCE_NAME = 'tokens' + + +# TODO: template for CRUD testing? +# TODO: create parametrize fixture with resource_name + +async def test_create(client, logged_user, tokens_db): + url = client.app.router["create_tokens"].url_for() + assert '/v0/my/tokens' == str(url) + + token = { + 'service': "blackfynn", + 'token_key': '4k9lyzBTS', + 'token_secret': 'my secret' + } + + resp = await client.post(url, json=token) + payload = await resp.json() + assert resp.status == 201, payload + + data, error = unwrap_envelope(payload) + assert not error + assert data + + db_token = await get_token_from_db(tokens_db, token_id=data) + assert db_token['token_data'] == token + assert db_token['user_id'] == logged_user["id"] + + +async def test_read(client, logged_user, tokens_db, fake_tokens): + # list all + url = client.app.router["list_tokens"].url_for() + assert "/v0/my/tokens" == str(url) + resp = await client.get(url) + payload = await resp.json() + assert resp.status == 200, payload + + data, error = unwrap_envelope(payload) + assert not error + assert data == fake_tokens + + # get one + expected = random.choice(fake_tokens) + sid = expected['service'] + + url = client.app.router["get_token"].url_for(service=sid) + assert "/v0/my/tokens/%s" % sid == str(url) + resp = await client.get(url) + payload = await resp.json() + assert resp.status == 200, payload + + data, error = unwrap_envelope(payload) + assert not error + assert data == expected + + +async def test_update(client, logged_user, tokens_db, fake_tokens): + + selected = random.choice(fake_tokens) + sid = selected['service'] + + url = client.app.router["get_token"].url_for(service=sid) + assert "/v0/my/tokens/%s" % sid == str(url) + + resp = await client.put(url, json={ + 'token_secret': 'some completely new secret' + }) + payload = await resp.json() + assert resp.status == 200, payload + + data, error = unwrap_envelope(payload) + assert not error + assert not data + + # check in db + token_in_db = await get_token_from_db(tokens_db, token_service=sid) + + assert token_in_db['token_data']['token_secret'] == 'some completely new secret' + assert token_in_db['token_data']['token_secret'] != selected['token_secret'] + + selected['token_secret'] = 'some completely new secret' + assert token_in_db['token_data'] == selected + + +async def test_delete(client, logged_user, tokens_db, fake_tokens): + sid = fake_tokens[0]['service'] + + url = client.app.router["delete_token"].url_for(service=sid) + assert "/v0/my/tokens/%s" % sid == str(url) + + resp = await client.delete(url) + payload = await resp.json() + + assert resp.status == 204, payload + + data, error = unwrap_envelope(payload) + assert not error + assert not data + + assert not (await get_token_from_db(tokens_db, token_service=sid)) diff --git a/services/web/server/tests/login/utils_tokens.py b/services/web/server/tests/login/utils_tokens.py new file mode 100644 index 00000000000..9b5420128a2 --- /dev/null +++ b/services/web/server/tests/login/utils_tokens.py @@ -0,0 +1,65 @@ +# pylint: disable=E1120 + +import random +from functools import reduce + +import sqlalchemy as sa +from sqlalchemy.sql import and_ # , or_, not_ + +from simcore_service_webserver.db import DSN +from simcore_service_webserver.db_models import metadata, tokens, users +from simcore_service_webserver.login.utils import get_random_string + + +def create_db_tables(**kargs): + url = DSN.format(**kargs) + engine = sa.create_engine(url, isolation_level="AUTOCOMMIT") + metadata.create_all(bind=engine, tables=[users, tokens], checkfirst=True) + return url + + +async def create_token_in_db(engine, **data): + # TODO change by faker? + params = { + "user_id": random.randint(0, 3), + "token_service": get_random_string(5), + "token_data": { + "token_secret": get_random_string(3), + "token_key": get_random_string(4), + } + } + params.update(data) + + async with engine.acquire() as conn: + stmt = tokens.insert().values(**params) + result = await conn.execute(stmt) + row = await result.first() + return dict(row) + + +async def get_token_from_db(engine, *, token_id=None, user_id=None, token_service=None, token_data=None): + async with engine.acquire() as conn: + expr = to_expression(token_id=token_id, user_id=user_id, + token_service=token_service, token_data=token_data) + stmt = sa.select([tokens, ]).where(expr) + result = await conn.execute(stmt) + row = await result.first() + return dict(row) if row else None + + +async def delete_token_from_db(engine, *, token_id): + expr = tokens.c.token_id == token_id + async with engine.acquire() as conn: + stmt = tokens.delete().where(expr) + await conn.execute(stmt) + + +async def delete_all_tokens_from_db(engine): + async with engine.acquire() as conn: + await conn.execute(tokens.delete()) + + +def to_expression(**params): + expr = reduce(and_, [getattr(tokens.c, key) == + value for key, value in params.items() if value is not None]) + return expr diff --git a/services/web/server/tests/requirements.txt b/services/web/server/tests/requirements.txt index a01c87fdcdb..e61e4e6bcb5 100644 --- a/services/web/server/tests/requirements.txt +++ b/services/web/server/tests/requirements.txt @@ -1,7 +1,7 @@ # List of packages added to setup:tests_require # Outsourced here so can be installed in base-stage of the web/Dockerfile coveralls~=1.3 -faker +Faker==1.0.0 openapi-core openapi_spec_validator pylint~=2.0