Skip to content

Commit 605a2ae

Browse files
authored
Is369/storage tokens (#380)
* storage can access the user's datcore tokens info - storage service retrieves user token's key and secret when it needs to operate on external services - finishes implementation calls to list datcore from wrapper - fixes on front-end (@odeimaiz ) * closes #369 * merged squashed is309/storage-api-spec from PR #366 (pending review but tests passing) * Removed session from storage. not needed. * Cleanup storage submodule in web-server - adds body in request * Injected datcore tokens into dsm * renamed: db_helpers.py -> db_tokens.py * Fixed dev-config in storage (does not have entire root folder inside of the container!) Added acces stdin_open and tty to attach for debuggin * Fixes pytest warning "cannot collect test class 'TestClient' because it has a __init__ constructor" Pytest tries to collect TestClient as its name matches the naming conventions for test classes. However it finds a __init__ method, which it cannot understand. * Modified storage config schemas on datcore test tokens * Does not update dsm.datcore_tokens if not not available * Fixes case of empty token match * Injects dummy tokens table to init postgres fixture userid is an integer but not a string removed deprecated @pytest.mark.asyncio
1 parent b91f8c1 commit 605a2ae

File tree

19 files changed

+219
-218
lines changed

19 files changed

+219
-218
lines changed

packages/simcore-sdk/tests/fixtures/postgres.py

+18-4
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,22 @@
22
import os
33

44
import pytest
5-
from pytest_docker import docker_ip, docker_services # pylint:disable=W0611
65
import sqlalchemy as sa
6+
from pytest_docker import docker_ip, docker_services # pylint:disable=W0611
77
from sqlalchemy import create_engine
88
from sqlalchemy.orm import sessionmaker
99

10+
#
11+
# FIXME: this should be in sync with the original
12+
# which is owned by the webserver???
13+
_metadata = sa.MetaData()
14+
_tokens = sa.Table("tokens", _metadata,
15+
sa.Column("token_id", sa.BigInteger, nullable=False, primary_key=True),
16+
sa.Column("user_id", sa.BigInteger, nullable=False),
17+
sa.Column("token_service", sa.String, nullable=False),
18+
sa.Column("token_data", sa.JSON, nullable=False),
19+
)
20+
1021

1122
def is_responsive(url):
1223
"""Check if there is a db"""
@@ -22,7 +33,7 @@ def is_responsive(url):
2233

2334
# pylint:disable=redefined-outer-name
2435
@pytest.fixture(scope="module")
25-
def engine(docker_ip, docker_services):
36+
def engine(docker_ip, docker_services):
2637
dbname = 'test'
2738
user = 'user'
2839
password = 'pwd'
@@ -42,7 +53,10 @@ def engine(docker_ip, docker_services):
4253
pause=1.0,
4354
)
4455

56+
# Configures db and initializes tables
57+
# Uses syncrounous engine for that
4558
engine = create_engine(url, client_encoding='utf8')
59+
_metadata.create_all(bind=engine, tables=[_tokens, ], checkfirst=True)
4660

4761
os.environ["POSTGRES_ENDPOINT"]="{host}:{port}".format(host=host, port=port)
4862
os.environ["POSTGRES_USER"]="user"
@@ -51,7 +65,8 @@ def engine(docker_ip, docker_services):
5165
yield engine
5266
# cleanup
5367
engine.dispose()
54-
68+
69+
5570

5671
# pylint:disable=redefined-outer-name
5772
@pytest.fixture(scope="module")
@@ -62,4 +77,3 @@ def session(engine):
6277
yield session
6378
#cleanup
6479
session.close()
65-

packages/simcore-sdk/tests/node_ports/conftest.py

+12-11
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,9 @@
1414

1515

1616
@pytest.fixture
17-
def user_id()->str:
18-
yield "testuser"
17+
def user_id()->int:
18+
# see fixtures/postgres.py
19+
yield 1258
1920

2021
@pytest.fixture
2122
def s3_simcore_location() ->str:
@@ -25,8 +26,8 @@ def s3_simcore_location() ->str:
2526
def filemanager_cfg(storage, user_id, bucket):
2627
storage_endpoint = yarl.URL(storage)
2728
node_config.STORAGE_ENDPOINT = "{}:{}".format(storage_endpoint.host, storage_endpoint.port)
28-
node_config.USER_ID = user_id
29-
node_config.BUCKET = bucket
29+
node_config.USER_ID = user_id
30+
node_config.BUCKET = bucket
3031
yield
3132

3233
@pytest.fixture
@@ -39,12 +40,12 @@ def node_uuid()->str:
3940

4041
@pytest.fixture
4142
def file_uuid(project_id, node_uuid)->str:
42-
def create(file_path:Path, project:str=None, node:str=None):
43+
def create(file_path:Path, project:str=None, node:str=None):
4344
if project is None:
4445
project = project_id
4546
if node is None:
46-
node = node_uuid
47-
return helpers.file_uuid(file_path, project, node)
47+
node = node_uuid
48+
return helpers.file_uuid(file_path, project, node)
4849
yield create
4950

5051
@pytest.fixture(scope='session')
@@ -78,7 +79,7 @@ def postgres(engine, session):
7879
def default_configuration(postgres, default_configuration_file, project_id, node_uuid):
7980
# prepare database with default configuration
8081
json_configuration = default_configuration_file.read_text()
81-
82+
8283
_create_new_pipeline(postgres, project_id)
8384
_set_configuration(postgres, project_id, node_uuid, json_configuration)
8485
config_dict = json.loads(json_configuration)
@@ -129,7 +130,7 @@ def create_config(inputs: List[Tuple[str, str, Any]] =None, outputs: List[Tuple[
129130
@pytest.fixture(scope="function")
130131
def special_2nodes_configuration(postgres, empty_configuration_file: Path, project_id, node_uuid):
131132
def create_config(prev_node_inputs: List[Tuple[str, str, Any]] =None, prev_node_outputs: List[Tuple[str, str, Any]] =None,
132-
inputs: List[Tuple[str, str, Any]] =None, outputs: List[Tuple[str, str, Any]] =None,
133+
inputs: List[Tuple[str, str, Any]] =None, outputs: List[Tuple[str, str, Any]] =None,
133134
project_id:str =project_id, previous_node_id:str = node_uuid, node_id:str = "asdasdadsa"):
134135
_create_new_pipeline(postgres, project_id)
135136

@@ -157,7 +158,7 @@ def create_config(prev_node_inputs: List[Tuple[str, str, Any]] =None, prev_node_
157158
postgres.query(ComputationalPipeline).delete()
158159
postgres.commit()
159160

160-
def _create_new_pipeline(session, project:str)->str:
161+
def _create_new_pipeline(session, project:str)->str:
161162
new_Pipeline = ComputationalPipeline(project_id=project)
162163
session.add(new_Pipeline)
163164
session.commit()
@@ -170,7 +171,7 @@ def _set_configuration(session, project_id: str, node_id:str, json_configuration
170171

171172
new_Node = ComputationalTask(project_id=project_id, node_id=node_uuid, schema=configuration["schema"], inputs=configuration["inputs"], outputs=configuration["outputs"])
172173
session.add(new_Node)
173-
session.commit()
174+
session.commit()
174175
return node_uuid
175176

176177
def _assign_config(config_dict:dict, port_type:str, entries: List[Tuple[str, str, Any]]):

packages/simcore-sdk/tests/node_ports/test_filemanager.py

+12-12
Original file line numberDiff line numberDiff line change
@@ -7,12 +7,12 @@
77
from simcore_sdk.node_ports import exceptions, filemanager
88

99

10-
@pytest.mark.asyncio
10+
1111
async def test_valid_upload_download(tmpdir, bucket, storage, filemanager_cfg, user_id, file_uuid, s3_simcore_location):
1212
file_path = Path(tmpdir) / "test.test"
1313
file_path.write_text("I am a test file")
1414
assert file_path.exists()
15-
15+
1616
file_id = file_uuid(file_path)
1717
store = s3_simcore_location
1818
await filemanager.upload_file(store_id=store, s3_object=file_id, local_file_path=file_path)
@@ -22,14 +22,14 @@ async def test_valid_upload_download(tmpdir, bucket, storage, filemanager_cfg, u
2222
assert download_file_path.exists()
2323

2424
assert filecmp.cmp(download_file_path, file_path)
25-
26-
@pytest.mark.asyncio
25+
26+
2727
async def test_invalid_file_path(tmpdir, bucket, storage, filemanager_cfg, user_id, file_uuid, s3_simcore_location):
2828
file_path = Path(tmpdir) / "test.test"
2929
file_path.write_text("I am a test file")
3030
assert file_path.exists()
31-
32-
31+
32+
3333
file_id = file_uuid(file_path)
3434
store = s3_simcore_location
3535
with pytest.raises(FileNotFoundError):
@@ -39,12 +39,12 @@ async def test_invalid_file_path(tmpdir, bucket, storage, filemanager_cfg, user_
3939
with pytest.raises(exceptions.S3InvalidPathError):
4040
await filemanager.download_file(store_id=store, s3_object=file_id, local_file_path=download_file_path)
4141

42-
@pytest.mark.asyncio
42+
4343
async def test_invalid_fileid(tmpdir, bucket, storage, filemanager_cfg, user_id, s3_simcore_location):
4444
file_path = Path(tmpdir) / "test.test"
4545
file_path.write_text("I am a test file")
4646
assert file_path.exists()
47-
47+
4848
store = s3_simcore_location
4949
with pytest.raises(exceptions.StorageInvalidCall):
5050
await filemanager.upload_file(store_id=store, s3_object="", local_file_path=file_path)
@@ -56,8 +56,8 @@ async def test_invalid_fileid(tmpdir, bucket, storage, filemanager_cfg, user_id,
5656
await filemanager.download_file(store_id=store, s3_object="", local_file_path=download_file_path)
5757
with pytest.raises(exceptions.S3InvalidPathError):
5858
await filemanager.download_file(store_id=store, s3_object="file_id", local_file_path=download_file_path)
59-
60-
@pytest.mark.asyncio
59+
60+
6161
async def test_invalid_store(tmpdir, bucket, storage, filemanager_cfg, user_id, file_uuid, s3_simcore_location):
6262
file_path = Path(tmpdir) / "test.test"
6363
file_path.write_text("I am a test file")
@@ -72,6 +72,6 @@ async def test_invalid_store(tmpdir, bucket, storage, filemanager_cfg, user_id,
7272
with pytest.raises(exceptions.S3InvalidStore):
7373
await filemanager.download_file(store_name=store, s3_object=file_id, local_file_path=download_file_path)
7474

75-
@pytest.mark.asyncio
75+
7676
async def test_storage_sdk_client(storage):
77-
pass
77+
pass

services/director/src/simcore_service_director/system_utils.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
def get_system_extra_hosts(extra_host_domain: str) -> Dict:
66
extra_hosts = {}
77
hosts_path = Path("/etc/hosts")
8-
if hosts_path.exists() and extra_host_domain != "undefined":
8+
if hosts_path.exists() and extra_host_domain != "undefined":
99
with hosts_path.open() as hosts:
1010
for line in hosts:
1111
if extra_host_domain in line:

services/storage/src/simcore_service_storage/application.py

-2
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
from .s3 import setup_s3
1010
from .db import setup_db
1111
from .rest import setup_rest
12-
from .session import setup_session
1312
from .settings import APP_CONFIG_KEY
1413
from .dsm import setup_dsm
1514

@@ -22,7 +21,6 @@ def create(config):
2221
app[APP_CONFIG_KEY] = config
2322

2423
setup_db(app)
25-
setup_session(app)
2624
setup_rest(app)
2725
setup_s3(app)
2826
setup_dsm(app)

services/storage/src/simcore_service_storage/cli.py

-2
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,6 @@ def create_environ(skip_system_environ=False):
4040
if rootdir is not None:
4141
environ['OSPARC_SIMCORE_REPO_ROOTDIR'] = str(rootdir)
4242

43-
4443
return environ
4544

4645

@@ -72,7 +71,6 @@ def main(args=None):
7271

7372
log_level = config["main"]["log_level"]
7473
logging.basicConfig(level=getattr(logging, log_level))
75-
print("hello, iam")
7674

7775
application.run(config)
7876

services/storage/src/simcore_service_storage/config_schema.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,8 @@
1212
"python2": T.String(),
1313
T.Key("max_workers", default=8, optional=True) : T.Int(),
1414
T.Key("test_datcore", optional=True): T.Dict({
15-
"api_token": T.String(),
16-
"api_secret": T.String()
15+
"token_key": T.String(),
16+
"token_secret": T.String()
1717
}),
1818
T.Key("disable_services", default=[], optional=True): T.List(T.String())
1919
})

services/storage/src/simcore_service_storage/data/docker-dev-config.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@ main:
77
max_workers: 8
88
python2: ${VENV2}
99
test_datcore:
10-
api_token: ${BF_API_KEY}
11-
api_secret: ${BF_API_SECRET}
10+
token_key: ${BF_API_KEY}
11+
token_secret: ${BF_API_SECRET}
1212
postgres:
1313
database: ${POSTGRES_DB}
1414
endpoint: ${POSTGRES_ENDPOINT}

services/storage/src/simcore_service_storage/data/docker-prod-config.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,8 @@ main:
66
testing: True
77
python2: ${VENV2}
88
test_datcore:
9-
api_token: ${BF_API_KEY}
10-
api_secret: ${BF_API_SECRET}
9+
token_key: ${BF_API_KEY}
10+
token_secret: ${BF_API_SECRET}
1111
postgres:
1212
database: ${POSTGRES_DB}
1313
endpoint: ${POSTGRES_ENDPOINT}

services/storage/src/simcore_service_storage/data/host-dev-config.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@ main:
77
testing: true
88
python2: ${VENV2}
99
test_datcore:
10-
api_token: ${BF_API_KEY}
11-
api_secret: ${BF_API_SECRET}
10+
token_key: ${BF_API_KEY}
11+
token_secret: ${BF_API_SECRET}
1212
postgres:
1313
database: simcoredb
1414
endpoint: postgres:5432
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
2+
from typing import Tuple
3+
4+
import sqlalchemy as sa
5+
from aiohttp import web
6+
7+
from .settings import APP_DB_ENGINE_KEY, APP_CONFIG_KEY
8+
9+
# FIXME: this is a temporary solution DO NOT USE. This table needs to be in sync
10+
# with services/web/server/src/simcore_service_webserver/db_models.py
11+
_metadata = sa.MetaData()
12+
_tokens = sa.Table("tokens", _metadata,
13+
sa.Column("token_id", sa.BigInteger, nullable=False, primary_key=True),
14+
sa.Column("user_id", sa.BigInteger, nullable=False),
15+
sa.Column("token_service", sa.String, nullable=False),
16+
sa.Column("token_data", sa.JSON, nullable=False),
17+
)
18+
19+
20+
async def get_api_token_and_secret(request: web.Request, userid) -> Tuple[str, str]:
21+
# FIXME: this is a temporary solution. This information should be sent in some form
22+
# from the client side together with the userid?
23+
engine = request.app.get(APP_DB_ENGINE_KEY, None)
24+
data = {}
25+
if engine:
26+
async with engine.acquire() as conn:
27+
stmt = sa.select([_tokens, ]).where(_tokens.c.user_id==userid)
28+
result = await conn.execute(stmt)
29+
row = await result.first()
30+
data = dict(row) if row else {}
31+
32+
defaults = request.app[APP_CONFIG_KEY]["main"].get("test_datacore", {})
33+
34+
api_token = data.get('token_data', {}).get('token_key', defaults.get('api_token'))
35+
api_secret = data.get('token_data',{}).get('token_secret', defaults.get('api_secret'))
36+
37+
return api_token, api_secret

0 commit comments

Comments
 (0)