diff --git a/api/specs/webserver/v0/components/schemas/my.yaml b/api/specs/webserver/v0/components/schemas/my.yaml index e92a41b2bf9..b9753cdbe9e 100644 --- a/api/specs/webserver/v0/components/schemas/my.yaml +++ b/api/specs/webserver/v0/components/schemas/my.yaml @@ -32,6 +32,13 @@ Token: service: 'github-api-v1' token_key: N1BP5ZSpB + +TokenId: + description: toke identifier + type: string + # format: uuid + + # enveloped and array versions -------------------------- ProfileEnveloped: @@ -70,3 +77,14 @@ TokensArrayEnveloped: error: nullable: true default: null + +TokenIdEnveloped: + type: object + required: + - data + properties: + data: + $ref: '#/TokenId' + error: + nullable: true + default: null diff --git a/api/specs/webserver/v0/openapi-user.yaml b/api/specs/webserver/v0/openapi-user.yaml index 62a0e36de14..99145fc0f80 100644 --- a/api/specs/webserver/v0/openapi-user.yaml +++ b/api/specs/webserver/v0/openapi-user.yaml @@ -44,6 +44,11 @@ paths: responses: '201': description: token created + content: + application/json: + schema: + $ref: './components/schemas/my.yaml#/TokenIdEnveloped' + default: $ref: './openapi.yaml#/components/responses/DefaultErrorResponse' diff --git a/packages/simcore-sdk/setup.py b/packages/simcore-sdk/setup.py index d2c7ab49432..0cd5d67f9d7 100644 --- a/packages/simcore-sdk/setup.py +++ b/packages/simcore-sdk/setup.py @@ -18,12 +18,14 @@ 'mock~=2.0', 'pylint~=2.0', 'pytest~=3.6', - 'pytest-asyncio~=0.9', 'pytest-cov~=2.5', 'pytest-docker~=0.6', 'requests~=2.19', 'docker~=3.5' ] +# NOTE: pytest-aiohttp and pytest-asyncio incompatible +# https://github.com/pytest-dev/pytest-asyncio/issues/76 + setup( name='simcore-sdk', diff --git a/services/director/tests/requirements.txt b/services/director/tests/requirements.txt index 2b3a3752844..c1a7e71f1ca 100644 --- a/services/director/tests/requirements.txt +++ b/services/director/tests/requirements.txt @@ -5,4 +5,6 @@ pytest-aiohttp pytest~=3.6 pytest-cov~=2.5 pytest-docker~=0.6 -pytest-asyncio~=0.9.0 + +# NOTE: pytest-aiohttp and pytest-asyncio incompatible +# https://github.com/pytest-dev/pytest-asyncio/issues/76 diff --git a/services/director/tests/test_handlers.py b/services/director/tests/test_handlers.py index 7e71c3edf4c..78f15fb10da 100644 --- a/services/director/tests/test_handlers.py +++ b/services/director/tests/test_handlers.py @@ -9,11 +9,11 @@ API_VERSIONS = resources.listdir(resources.RESOURCE_OPENAPI_ROOT) -@pytest.mark.asyncio + async def test_root_get(): fake_request = "fake request" web_response = await rest.handlers.root_get(fake_request) - assert web_response.content_type == "application/json" + assert web_response.content_type == "application/json" assert web_response.status == 200 healthcheck_enveloped = json.loads(web_response.text) assert "data" in healthcheck_enveloped @@ -30,18 +30,18 @@ def _check_services(created_services, services, schema_version="v1"): assert len(created_services) == len(services) created_service_descriptions = [x["service_description"] for x in created_services] - + json_schema_path = resources.get_path(resources.RESOURCE_NODE_SCHEMA) assert json_schema_path.exists() == True with json_schema_path.open() as file_pt: service_schema = json.load(file_pt) - for service in services: + for service in services: if schema_version == "v1": assert created_service_descriptions.count(service) == 1 json_schema_validator.validate_instance_object(service, service_schema) -@pytest.mark.asyncio + async def test_services_get(docker_registry, push_services): fake_request = "fake request" # no registry defined @@ -49,10 +49,10 @@ async def test_services_get(docker_registry, push_services): services_enveloped = await rest.handlers.services_get(fake_request) # wrong registry defined - config.REGISTRY_URL = "blahblah" + config.REGISTRY_URL = "blahblah" with pytest.raises(web_exceptions.HTTPInternalServerError, message="Expecting HTTP Internal Error as SSL is enabled by default"): services_enveloped = await rest.handlers.services_get(fake_request) - + # right registry defined config.REGISTRY_URL = docker_registry with pytest.raises(web_exceptions.HTTPInternalServerError, message="Expecting HTTP Internal Error as SSL is enabled by default"): @@ -103,7 +103,7 @@ async def test_services_get(docker_registry, push_services): services = services_enveloped["data"] assert len(services) == 2 -@pytest.mark.asyncio + async def test_v0_services_conversion_to_new(configure_registry_access, push_v0_schema_services): #pylint: disable=W0613, W0621 fake_request = "fake request" created_services = push_v0_schema_services(3,2) @@ -117,7 +117,7 @@ async def test_v0_services_conversion_to_new(configure_registry_access, push_v0_ # ensure old style services are not retrieved assert len(services) == 0 -@pytest.mark.asyncio + async def test_services_by_key_version_get(configure_registry_access, push_services): #pylint: disable=W0613, W0621 fake_request = "fake request" @@ -131,7 +131,7 @@ async def test_services_by_key_version_get(configure_registry_access, push_servi web_response = await rest.handlers.services_by_key_version_get(fake_request, "whatever", "ofwhateverversion") created_services = push_services(3,2) - assert len(created_services) == 5 + assert len(created_services) == 5 retrieved_services = [] for created_service in created_services: @@ -151,19 +151,19 @@ async def _start_get_stop_services(push_services, user_id): with pytest.raises(web_exceptions.HTTPInternalServerError, message="Expecting internal server error"): web_response = await rest.handlers.running_interactive_services_post(fake_request, None, None, None, None) - + with pytest.raises(web_exceptions.HTTPInternalServerError, message="Expecting internal server error"): web_response = await rest.handlers.running_interactive_services_post(fake_request, "None", None, None, None) with pytest.raises(web_exceptions.HTTPInternalServerError, message="Expecting internal server error"): web_response = await rest.handlers.running_interactive_services_post(fake_request, "None", "None", None, None) - + with pytest.raises(web_exceptions.HTTPNotFound, message="Expecting not found error"): web_response = await rest.handlers.running_interactive_services_post(fake_request, "None", "None", "None", None) with pytest.raises(web_exceptions.HTTPNotFound, message="Expecting not found error"): web_response = await rest.handlers.running_interactive_services_post(fake_request, "None", "None", "None", "ablah") - + with pytest.raises(web_exceptions.HTTPInternalServerError, message="Expecting internal server error"): web_response = await rest.handlers.running_interactive_services_get(fake_request, None) @@ -201,11 +201,11 @@ async def _start_get_stop_services(push_services, user_id): assert web_response.content_type == "application/json" assert web_response.text is None -@pytest.mark.asyncio + async def test_running_services_post_and_delete_no_swarm(configure_registry_access, push_services, user_id): #pylint: disable=W0613, W0621 with pytest.raises(web_exceptions.HTTPInternalServerError, message="Expecting internal error as there is no docker swarm"): await _start_get_stop_services(push_services, user_id) -@pytest.mark.asyncio + async def test_running_services_post_and_delete(configure_registry_access, push_services, docker_swarm, user_id): #pylint: disable=W0613, W0621 await _start_get_stop_services(push_services, user_id) diff --git a/services/director/tests/test_producer.py b/services/director/tests/test_producer.py index 8a60a8d0219..d47c6e4a3e0 100644 --- a/services/director/tests/test_producer.py +++ b/services/director/tests/test_producer.py @@ -8,12 +8,12 @@ ) @pytest.fixture -async def run_services(configure_registry_access, push_services, docker_swarm, user_id): #pylint: disable=W0613, W0621 +async def run_services(loop, configure_registry_access, push_services, docker_swarm, user_id): #pylint: disable=W0613, W0621 started_services = [] async def push_start_services(number_comp, number_dyn): pushed_services = push_services(number_comp,number_dyn, 60) assert len(pushed_services) == (number_comp + number_dyn) - for pushed_service in pushed_services: + for pushed_service in pushed_services: service_description = pushed_service["service_description"] service_key = service_description["key"] @@ -25,26 +25,27 @@ async def push_start_services(number_comp, number_dyn): started_service = await producer.start_service(user_id, service_key, service_version, service_uuid) assert "published_port" in started_service assert "entry_point" in started_service - assert "service_uuid" in started_service + assert "service_uuid" in started_service # should not throw await producer.get_service_details(service_uuid) started_services.append(started_service) return started_services + yield push_start_services #teardown stop the services - for service in started_services: + for service in started_services: service_uuid = service["service_uuid"] await producer.stop_service(service_uuid) with pytest.raises(exceptions.ServiceUUIDNotFoundError, message="expecting service uuid not found error"): await producer.get_service_details(service_uuid) -@pytest.mark.asyncio + async def test_start_stop_service(run_services): #pylint: disable=W0613, W0621 # standard test await run_services(1,1) -@pytest.mark.asyncio + async def test_service_assigned_env_variables(run_services, user_id): #pylint: disable=W0621 started_services = await run_services(1,1) client = docker.from_env() @@ -70,7 +71,7 @@ async def test_service_assigned_env_variables(run_services, user_id): #pylint: d assert "SIMCORE_USER_ID" in envs_dict assert envs_dict["SIMCORE_USER_ID"] == user_id -@pytest.mark.asyncio + async def test_interactive_service_published_port(run_services): #pylint: disable=W0621 running_dynamic_services = await run_services(0,1) assert len(running_dynamic_services) == 1 @@ -88,4 +89,4 @@ async def test_interactive_service_published_port(run_services): #pylint: disabl low_level_client = docker.APIClient() service_information = low_level_client.inspect_service(docker_service.id) service_published_port = service_information["Endpoint"]["Ports"][0]["PublishedPort"] - assert service_published_port == service_port \ No newline at end of file + assert service_published_port == service_port diff --git a/services/sidecar/tests/requirements.txt b/services/sidecar/tests/requirements.txt index 5d79da41910..87e9f61ace0 100644 --- a/services/sidecar/tests/requirements.txt +++ b/services/sidecar/tests/requirements.txt @@ -6,9 +6,10 @@ aiopg coveralls pytest pytest-aiohttp -pytest-asyncio pytest-cov pytest-docker openapi_spec_validator pyyaml virtualenv +# NOTE: pytest-aiohttp and pytest-asyncio incompatible +# https://github.com/pytest-dev/pytest-asyncio/issues/76 diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 9b72a40419d..84a7d4c551c 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -1,9 +1,9 @@ -# TODO: W0611:Unused import ... -# pylint: disable=W0611 -# TODO: W0613:Unused argument ... -# pylint: disable=W0613 -# -# pylint: disable=W0621 +# pylint:disable=wildcard-import +# pylint:disable=unused-import +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + import asyncio import os import subprocess @@ -26,24 +26,27 @@ SIMCORE_S3_STR) from utils import ACCESS_KEY, BUCKET_NAME, DATABASE, PASS, SECRET_KEY, USER -# fixtures ------------------------------------------------------- + @pytest.fixture(scope='session') def here(): return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + @pytest.fixture(scope='session') def package_dir(here): dirpath = Path(simcore_service_storage.__file__).parent assert dirpath.exists() return dirpath + @pytest.fixture(scope='session') def osparc_simcore_root_dir(here): root_dir = here.parent.parent.parent assert root_dir.exists(), "Is this service within osparc-simcore repo?" return root_dir + @pytest.fixture(scope='session') def python27_exec(osparc_simcore_root_dir, tmpdir_factory, here): # Assumes already created with make .venv27 @@ -52,16 +55,18 @@ def python27_exec(osparc_simcore_root_dir, tmpdir_factory, here): if not venv27.exists(): # create its own virtualenv venv27 = tmpdir_factory.mktemp("virtualenv") / ".venv27" - cmd = "virtualenv --python=python2 %s"%(venv27) # TODO: how to split in command safely? - assert subprocess.check_call(cmd.split()) == 0, "Unable to run %s" %cmd + # TODO: how to split in command safely? + cmd = "virtualenv --python=python2 %s" % (venv27) + assert subprocess.check_call( + cmd.split()) == 0, "Unable to run %s" % cmd # installs python2 requirements pip_exec = venv27 / "bin" / "pip" assert pip_exec.exists() requirements_py2 = here.parent / "requirements/py27.txt" cmd = "{} install -r {}".format(pip_exec, requirements_py2) - assert subprocess.check_call(cmd.split()) == 0, "Unable to run %s" %cmd - + assert subprocess.check_call( + cmd.split()) == 0, "Unable to run %s" % cmd python27_exec = venv27 / "bin" / "python2.7" assert python27_exec.exists() @@ -73,6 +78,7 @@ def python27_path(python27_exec): return Path(python27_exec).parent.parent # Assumes already created with make .venv27 + @pytest.fixture(scope='session') def docker_compose_file(here): """ Overrides pytest-docker fixture @@ -80,12 +86,12 @@ def docker_compose_file(here): old = os.environ.copy() # docker-compose reads these environs - os.environ['POSTGRES_DB']=DATABASE - os.environ['POSTGRES_USER']=USER - os.environ['POSTGRES_PASSWORD']=PASS - os.environ['POSTGRES_ENDPOINT']="FOO" # TODO: update config schema!! - os.environ['MINIO_ACCESS_KEY']=ACCESS_KEY - os.environ['MINIO_SECRET_KEY']=SECRET_KEY + os.environ['POSTGRES_DB'] = DATABASE + os.environ['POSTGRES_USER'] = USER + os.environ['POSTGRES_PASSWORD'] = PASS + os.environ['POSTGRES_ENDPOINT'] = "FOO" # TODO: update config schema!! + os.environ['MINIO_ACCESS_KEY'] = ACCESS_KEY + os.environ['MINIO_SECRET_KEY'] = SECRET_KEY dc_path = here / 'docker-compose.yml' @@ -94,12 +100,13 @@ def docker_compose_file(here): os.environ = old + @pytest.fixture(scope='session') def postgres_service(docker_services, docker_ip): url = 'postgresql://{user}:{password}@{host}:{port}/{database}'.format( - user = USER, - password = PASS, - database = DATABASE, + user=USER, + password=PASS, + database=DATABASE, host=docker_ip, port=docker_services.port_for('postgres', 5432), ) @@ -112,27 +119,29 @@ def postgres_service(docker_services, docker_ip): ) postgres_service = { - 'user' : USER, - 'password' : PASS, - 'database' : DATABASE, - 'host' : docker_ip, - 'port' : docker_services.port_for('postgres', 5432) + 'user': USER, + 'password': PASS, + 'database': DATABASE, + 'host': docker_ip, + 'port': docker_services.port_for('postgres', 5432) } return postgres_service + @pytest.fixture(scope='session') def postgres_service_url(postgres_service, docker_services, docker_ip): postgres_service_url = 'postgresql://{user}:{password}@{host}:{port}/{database}'.format( - user = USER, - password = PASS, - database = DATABASE, + user=USER, + password=PASS, + database=DATABASE, host=docker_ip, port=docker_services.port_for('postgres', 5432), ) return postgres_service_url + @pytest.fixture(scope='function') async def postgres_engine(loop, postgres_service_url): postgres_engine = await create_engine(postgres_service_url) @@ -163,24 +172,28 @@ def minio_service(docker_services, docker_ip): return { 'endpoint': '{ip}:{port}'.format(ip=docker_ip, port=docker_services.port_for('minio', 9000)), 'access_key': ACCESS_KEY, - 'secret_key' : SECRET_KEY, - 'bucket_name' : BUCKET_NAME, - } + 'secret_key': SECRET_KEY, + 'bucket_name': BUCKET_NAME, + } + @pytest.fixture(scope="module") def s3_client(minio_service): from s3wrapper.s3_client import S3Client - s3_client = S3Client(endpoint=minio_service['endpoint'],access_key=minio_service["access_key"], secret_key=minio_service["secret_key"]) + s3_client = S3Client( + endpoint=minio_service['endpoint'], access_key=minio_service["access_key"], secret_key=minio_service["secret_key"]) return s3_client + @pytest.fixture(scope="function") def mock_files_factory(tmpdir_factory): def _create_files(count): filepaths = [] for _i in range(count): name = str(uuid.uuid4()) - filepath = os.path.normpath(str(tmpdir_factory.mktemp('data').join(name + ".txt"))) + filepath = os.path.normpath( + str(tmpdir_factory.mktemp('data').join(name + ".txt"))) with open(filepath, 'w') as fout: fout.write("Hello world\n") filepaths.append(filepath) @@ -198,10 +211,11 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory): bucket_name = BUCKET_NAME s3_client.create_bucket(bucket_name, delete_contents_if_exists=True) - #TODO: use pip install Faker - users = [ 'alice', 'bob', 'chuck', 'dennis'] + # TODO: use pip install Faker + users = ['alice', 'bob', 'chuck', 'dennis'] - projects = ['astronomy', 'biology', 'chemistry', 'dermatology', 'economics', 'futurology', 'geology'] + projects = ['astronomy', 'biology', 'chemistry', + 'dermatology', 'economics', 'futurology', 'geology'] location = SIMCORE_S3_STR nodes = ['alpha', 'beta', 'gamma', 'delta'] @@ -214,41 +228,43 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory): idx = randrange(len(users)) user_name = users[idx] user_id = idx + 10 - idx = randrange(len(projects)) + idx = randrange(len(projects)) project_name = projects[idx] project_id = idx + 100 - idx = randrange(len(nodes)) + idx = randrange(len(nodes)) node = nodes[idx] node_id = idx + 10000 file_name = str(counter) - object_name = Path(str(project_id), str(node_id), str(counter)).as_posix() + object_name = Path(str(project_id), str( + node_id), str(counter)).as_posix() file_uuid = Path(object_name).as_posix() assert s3_client.upload_file(bucket_name, object_name, _file) - d = { 'file_uuid' : file_uuid, - 'location_id' : "0", - 'location' : location, - 'bucket_name' : bucket_name, - 'object_name' : object_name, - 'project_id' : str(project_id), - 'project_name' : project_name, - 'node_id' : str(node_id), - 'node_name' : node, - 'file_name' : file_name, - 'user_id' : str(user_id), - 'user_name' : user_name - } + d = {'file_uuid': file_uuid, + 'location_id': "0", + 'location': location, + 'bucket_name': bucket_name, + 'object_name': object_name, + 'project_id': str(project_id), + 'project_name': project_name, + 'node_id': str(node_id), + 'node_name': node, + 'file_name': file_name, + 'user_id': str(user_id), + 'user_name': user_name + } counter = counter + 1 data[object_name] = FileMetaData(**d) - utils.insert_metadata(postgres_service_url, data[object_name]) #pylint: disable=no-member - + # pylint: disable=no-member + utils.insert_metadata(postgres_service_url, + data[object_name]) total_count = 0 - for _obj in s3_client.list_objects_v2(bucket_name, recursive = True): + for _obj in s3_client.list_objects_v2(bucket_name, recursive=True): total_count = total_count + 1 assert total_count == N @@ -260,10 +276,6 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory): # db utils.drop_tables(url=postgres_service_url) -# This is weird, somehow the default loop gives problems with pytest asyncio, so lets override it -@pytest.fixture -def loop(event_loop): - return event_loop @pytest.fixture(scope="function") async def datcore_testbucket(loop, python27_exec, mock_files_factory): @@ -282,19 +294,20 @@ async def datcore_testbucket(loop, python27_exec, mock_files_factory): ready = False counter = 0 - while not ready and counter<5: + while not ready and counter < 5: data = await dcw.list_files() ready = len(data) == 2 await asyncio.sleep(10) counter = counter + 1 - yield BUCKET_NAME await dcw.delete_test_dataset(BUCKET_NAME) + @pytest.fixture(scope="function") def dsm_fixture(s3_client, python27_exec, postgres_engine, loop): pool = ThreadPoolExecutor(3) - dsm_fixture = DataStorageManager(s3_client, python27_exec, postgres_engine, loop, pool, BUCKET_NAME) + dsm_fixture = DataStorageManager( + s3_client, python27_exec, postgres_engine, loop, pool, BUCKET_NAME) return dsm_fixture diff --git a/services/storage/tests/requirements.txt b/services/storage/tests/requirements.txt index 8f86b464b12..6161e06b3ea 100644 --- a/services/storage/tests/requirements.txt +++ b/services/storage/tests/requirements.txt @@ -5,9 +5,11 @@ coveralls pytest pytest-aiohttp -pytest-asyncio pytest-cov pytest-docker openapi_spec_validator pyyaml virtualenv + +# NOTE: pytest-aiohttp and pytest-asyncio incompatible +# https://github.com/pytest-dev/pytest-asyncio/issues/76 diff --git a/services/storage/tests/test_dsm.py b/services/storage/tests/test_dsm.py index 15b168854be..7194e477b0c 100644 --- a/services/storage/tests/test_dsm.py +++ b/services/storage/tests/test_dsm.py @@ -1,7 +1,9 @@ -# TODO: W0611:Unused import ... -# pylint: disable=W0611 -# TODO: W0613:Unused argument ... -# pylint: disable=W0613 +# pylint:disable=wildcard-import +# pylint:disable=unused-import +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name +# pylint: disable=too-many-arguments import filecmp import io @@ -10,9 +12,8 @@ import pdb import urllib import uuid -from pprint import pprint - from pathlib import Path +from pprint import pprint import attr import pytest @@ -20,7 +21,8 @@ import utils from simcore_service_storage.dsm import DataStorageManager from simcore_service_storage.models import FileMetaData -from simcore_service_storage.s3 import DATCORE_STR, SIMCORE_S3_STR, SIMCORE_S3_ID +from simcore_service_storage.s3 import (DATCORE_STR, SIMCORE_S3_ID, + SIMCORE_S3_STR) from utils import BUCKET_NAME @@ -176,7 +178,6 @@ async def test_copy_s3_s3(postgres_service_url, s3_client, mock_files_factory, d assert len(data) == 2 #NOTE: Below tests directly access the datcore platform, use with care! - @pytest.mark.travis def test_datcore_fixture(datcore_testbucket): print(datcore_testbucket) @@ -198,8 +199,6 @@ async def test_dsm_datcore(postgres_service_url, dsm_fixture, datcore_testbucket data = await dsm.list_files(user_id=user_id, location=DATCORE_STR) assert len(data) == 1 -# pylint: disable=R0913 -# Too many arguments @pytest.mark.travis async def test_dsm_s3_to_datcore(postgres_service_url, s3_client, mock_files_factory, dsm_fixture, datcore_testbucket): utils.create_tables(url=postgres_service_url) @@ -230,8 +229,6 @@ async def test_dsm_s3_to_datcore(postgres_service_url, s3_client, mock_files_fac # there should now be 3 files assert len(data) == 3 -# pylint: disable=R0913 -# Too many arguments @pytest.mark.travis async def test_dsm_datcore_to_local(postgres_service_url, dsm_fixture, mock_files_factory, datcore_testbucket): utils.create_tables(url=postgres_service_url) @@ -250,8 +247,6 @@ async def test_dsm_datcore_to_local(postgres_service_url, dsm_fixture, mock_file assert filecmp.cmp(tmp_file2, tmp_file) -# pylint: disable=R0913 -# Too many arguments @pytest.mark.travis async def test_dsm_datcore_to_S3(postgres_service_url, s3_client, dsm_fixture, mock_files_factory, datcore_testbucket): utils.create_tables(url=postgres_service_url) @@ -287,10 +282,6 @@ async def test_dsm_datcore_to_S3(postgres_service_url, s3_client, dsm_fixture, m assert filecmp.cmp(tmp_file1, tmp_file2) - - -# pylint: disable=R0913 -# Too many arguments @pytest.mark.travis async def test_copy_datcore(postgres_service_url, s3_client, dsm_fixture, mock_files_factory, datcore_testbucket): utils.create_tables(url=postgres_service_url) diff --git a/services/web/client/source/class/qxapp/Preferences.js b/services/web/client/source/class/qxapp/Preferences.js index f230ff68c37..fce3f9abcf4 100644 --- a/services/web/client/source/class/qxapp/Preferences.js +++ b/services/web/client/source/class/qxapp/Preferences.js @@ -1,11 +1,14 @@ /* eslint no-warning-comments: "off" */ - qx.Class.define("qxapp.Preferences", { extend: qx.ui.window.Window, construct: function() { this.base(arguments, this.tr("Account Settings")); + this.__tokenResources = qxapp.io.rest.ResourceFactory.getInstance().createTokenResources(); + // this.__tokenResources.token + // this.__tokenResources.tokens + // window // TODO: fix-sized modal preference window this.set({ @@ -36,7 +39,8 @@ qx.Class.define("qxapp.Preferences", { }, members: { - _data: null, + __tokenResources: null, + __tokensList: null, __createPage: function(name, iconSrc = null) { let page = new qx.ui.tabview.Page(name, iconSrc); @@ -46,7 +50,7 @@ qx.Class.define("qxapp.Preferences", { })); // title - page.add(new qx.ui.basic.Label("