7
7
from contextlib import asynccontextmanager
8
8
from pathlib import Path
9
9
from random import choice , randint
10
- from typing import Any
10
+ from typing import Any , cast
11
11
12
12
import pytest
13
13
import sqlalchemy as sa
14
14
from faker import Faker
15
15
from models_library .basic_types import SHA256Str
16
16
from models_library .projects import ProjectID
17
- from models_library .projects_nodes_io import NodeID , SimcoreS3FileID
17
+ from models_library .projects_nodes_io import NodeID , SimcoreS3FileID , StorageFileID
18
18
from models_library .users import UserID
19
19
from pydantic import ByteSize , TypeAdapter
20
- from pytest_simcore .helpers .faker_factories import random_project , random_user
21
20
from servicelib .utils import limited_gather
22
21
from simcore_postgres_database .models .project_to_groups import project_to_groups
23
22
from simcore_postgres_database .storage_models import projects , users
24
23
from sqlalchemy .dialects .postgresql import insert as pg_insert
25
24
from sqlalchemy .ext .asyncio import AsyncConnection , AsyncEngine
26
25
27
- from ..helpers .utils import get_updated_project
26
+ from .helpers .faker_factories import random_project , random_user
27
+ from .helpers .storage_utils import FileIDDict , get_updated_project
28
28
29
29
30
30
@asynccontextmanager
@@ -259,6 +259,39 @@ async def _creator(
259
259
return _creator
260
260
261
261
262
+ async def _upload_file_and_update_project (
263
+ project_id : ProjectID ,
264
+ node_id : NodeID ,
265
+ * ,
266
+ file_name : str | None ,
267
+ file_id : StorageFileID | None ,
268
+ file_sizes : tuple [ByteSize , ...],
269
+ file_checksums : tuple [SHA256Str , ...],
270
+ node_to_files_mapping : dict [NodeID , dict [SimcoreS3FileID , FileIDDict ]],
271
+ upload_file : Callable [..., Awaitable [tuple [Path , SimcoreS3FileID ]]],
272
+ create_simcore_file_id : Callable [
273
+ [ProjectID , NodeID , str , Path | None ], SimcoreS3FileID
274
+ ],
275
+ faker : Faker ,
276
+ ) -> None :
277
+ if file_name is None :
278
+ file_name = faker .file_name ()
279
+ file_id = create_simcore_file_id (project_id , node_id , file_name , None )
280
+ checksum : SHA256Str = choice (file_checksums ) # noqa: S311
281
+ src_file , _ = await upload_file (
282
+ file_size = choice (file_sizes ), # noqa: S311
283
+ file_name = file_name ,
284
+ file_id = file_id ,
285
+ sha256_checksum = checksum ,
286
+ )
287
+ assert file_name is not None
288
+ assert file_id is not None
289
+ node_to_files_mapping [node_id ][file_id ] = {
290
+ "path" : src_file ,
291
+ "sha256_checksum" : checksum ,
292
+ }
293
+
294
+
262
295
@pytest .fixture
263
296
async def random_project_with_files (
264
297
sqlalchemy_async_engine : AsyncEngine ,
@@ -271,11 +304,7 @@ async def random_project_with_files(
271
304
faker : Faker ,
272
305
) -> Callable [
273
306
[int , tuple [ByteSize , ...], tuple [SHA256Str , ...]],
274
- Awaitable [
275
- tuple [
276
- dict [str , Any ], dict [NodeID , dict [SimcoreS3FileID , dict [str , Path | str ]]]
277
- ]
278
- ],
307
+ Awaitable [tuple [dict [str , Any ], dict [NodeID , dict [SimcoreS3FileID , FileIDDict ]]]],
279
308
]:
280
309
async def _creator (
281
310
num_nodes : int = 12 ,
@@ -295,76 +324,67 @@ async def _creator(
295
324
"488f3b57932803bbf644593bd46d95599b1d4da1d63bc020d7ebe6f1c255f7f3"
296
325
),
297
326
),
298
- ) -> tuple [
299
- dict [str , Any ], dict [NodeID , dict [SimcoreS3FileID , dict [str , Path | str ]]]
300
- ]:
327
+ ) -> tuple [dict [str , Any ], dict [NodeID , dict [SimcoreS3FileID , FileIDDict ]]]:
301
328
assert len (file_sizes ) == len (file_checksums )
302
329
project = await create_project (name = "random-project" )
303
- src_projects_list : dict [
304
- NodeID , dict [SimcoreS3FileID , dict [str , Path | str ]]
305
- ] = {}
330
+ node_to_files_mapping : dict [NodeID , dict [SimcoreS3FileID , FileIDDict ]] = {}
306
331
upload_tasks : deque [Awaitable ] = deque ()
307
332
for _node_index in range (num_nodes ):
308
- # NOTE: we put some more outputs in there to simulate a real case better
309
- new_node_id = NodeID (f"{ faker .uuid4 ()} " )
333
+ # Create a node with outputs (files and others)
334
+ project_id = ProjectID (project ["uuid" ])
335
+ node_id = cast (NodeID , faker .uuid4 (cast_to = None ))
336
+ output3_file_name = faker .file_name ()
310
337
output3_file_id = create_simcore_file_id (
311
- ProjectID (project ["uuid" ]),
312
- new_node_id ,
313
- faker .file_name (),
314
- Path ("outputs/output3" ),
338
+ project_id , node_id , output3_file_name , Path ("outputs/output_3" )
315
339
)
316
- src_node_id = await create_project_node (
340
+ created_node_id = await create_project_node (
317
341
ProjectID (project ["uuid" ]),
318
- new_node_id ,
342
+ node_id ,
319
343
outputs = {
320
344
"output_1" : faker .pyint (),
321
345
"output_2" : faker .pystr (),
322
346
"output_3" : f"{ output3_file_id } " ,
323
347
},
324
348
)
325
- assert src_node_id == new_node_id
326
-
327
- # upload the output 3 and some random other files at the root of each node
328
- src_projects_list [src_node_id ] = {}
329
- checksum : SHA256Str = choice (file_checksums ) # noqa: S311
330
- src_file , _ = await upload_file (
331
- file_size = choice (file_sizes ), # noqa: S311
332
- file_name = Path (output3_file_id ).name ,
333
- file_id = output3_file_id ,
334
- sha256_checksum = checksum ,
335
- )
336
- src_projects_list [src_node_id ][output3_file_id ] = {
337
- "path" : src_file ,
338
- "sha256_checksum" : checksum ,
339
- }
340
-
341
- async def _upload_file_and_update_project (project , src_node_id ):
342
- src_file_name = faker .file_name ()
343
- src_file_uuid = create_simcore_file_id (
344
- ProjectID (project ["uuid" ]), src_node_id , src_file_name , None
345
- )
346
- checksum : SHA256Str = choice (file_checksums ) # noqa: S311
347
- src_file , _ = await upload_file (
348
- file_size = choice (file_sizes ), # noqa: S311
349
- file_name = src_file_name ,
350
- file_id = src_file_uuid ,
351
- sha256_checksum = checksum ,
349
+ assert created_node_id == node_id
350
+
351
+ node_to_files_mapping [created_node_id ] = {}
352
+ upload_tasks .append (
353
+ _upload_file_and_update_project (
354
+ project_id ,
355
+ node_id ,
356
+ file_name = output3_file_name ,
357
+ file_id = output3_file_id ,
358
+ file_sizes = file_sizes ,
359
+ file_checksums = file_checksums ,
360
+ upload_file = upload_file ,
361
+ create_simcore_file_id = create_simcore_file_id ,
362
+ faker = faker ,
363
+ node_to_files_mapping = node_to_files_mapping ,
352
364
)
353
- src_projects_list [src_node_id ][src_file_uuid ] = {
354
- "path" : src_file ,
355
- "sha256_checksum" : checksum ,
356
- }
365
+ )
357
366
358
- # add a few random files in the node storage
367
+ # add a few random files in the node workspace
359
368
upload_tasks .extend (
360
369
[
361
- _upload_file_and_update_project (project , src_node_id )
370
+ _upload_file_and_update_project (
371
+ project_id ,
372
+ node_id ,
373
+ file_name = None ,
374
+ file_id = None ,
375
+ file_sizes = file_sizes ,
376
+ file_checksums = file_checksums ,
377
+ upload_file = upload_file ,
378
+ create_simcore_file_id = create_simcore_file_id ,
379
+ faker = faker ,
380
+ node_to_files_mapping = node_to_files_mapping ,
381
+ )
362
382
for _ in range (randint (0 , 3 )) # noqa: S311
363
383
]
364
384
)
365
385
await limited_gather (* upload_tasks , limit = 10 )
366
386
367
387
project = await get_updated_project (sqlalchemy_async_engine , project ["uuid" ])
368
- return project , src_projects_list
388
+ return project , node_to_files_mapping
369
389
370
390
return _creator
0 commit comments