1
- # TODO: W0611:Unused import ...
2
- # pylint: disable=W0611
3
- # TODO: W0613:Unused argument ...
4
- # pylint: disable=W0613
5
- #
6
- # pylint: disable=W0621
1
+ # pylint:disable=wildcard- import
2
+ # pylint:disable=unused-import
3
+ # pylint:disable=unused-variable
4
+ # pylint:disable=unused-argument
5
+ # pylint:disable=redefined-outer-name
6
+
7
7
import asyncio
8
8
import os
9
9
import subprocess
26
26
SIMCORE_S3_STR )
27
27
from utils import ACCESS_KEY , BUCKET_NAME , DATABASE , PASS , SECRET_KEY , USER
28
28
29
- # fixtures -------------------------------------------------------
29
+
30
30
31
31
@pytest .fixture (scope = 'session' )
32
32
def here ():
33
33
return Path (sys .argv [0 ] if __name__ == "__main__" else __file__ ).resolve ().parent
34
34
35
+
35
36
@pytest .fixture (scope = 'session' )
36
37
def package_dir (here ):
37
38
dirpath = Path (simcore_service_storage .__file__ ).parent
38
39
assert dirpath .exists ()
39
40
return dirpath
40
41
42
+
41
43
@pytest .fixture (scope = 'session' )
42
44
def osparc_simcore_root_dir (here ):
43
45
root_dir = here .parent .parent .parent
44
46
assert root_dir .exists (), "Is this service within osparc-simcore repo?"
45
47
return root_dir
46
48
49
+
47
50
@pytest .fixture (scope = 'session' )
48
51
def python27_exec (osparc_simcore_root_dir , tmpdir_factory , here ):
49
52
# Assumes already created with make .venv27
@@ -52,16 +55,18 @@ def python27_exec(osparc_simcore_root_dir, tmpdir_factory, here):
52
55
if not venv27 .exists ():
53
56
# create its own virtualenv
54
57
venv27 = tmpdir_factory .mktemp ("virtualenv" ) / ".venv27"
55
- cmd = "virtualenv --python=python2 %s" % (venv27 ) # TODO: how to split in command safely?
56
- assert subprocess .check_call (cmd .split ()) == 0 , "Unable to run %s" % cmd
58
+ # TODO: how to split in command safely?
59
+ cmd = "virtualenv --python=python2 %s" % (venv27 )
60
+ assert subprocess .check_call (
61
+ cmd .split ()) == 0 , "Unable to run %s" % cmd
57
62
58
63
# installs python2 requirements
59
64
pip_exec = venv27 / "bin" / "pip"
60
65
assert pip_exec .exists ()
61
66
requirements_py2 = here .parent / "requirements/py27.txt"
62
67
cmd = "{} install -r {}" .format (pip_exec , requirements_py2 )
63
- assert subprocess .check_call (cmd . split ()) == 0 , "Unable to run %s" % cmd
64
-
68
+ assert subprocess .check_call (
69
+ cmd . split ()) == 0 , "Unable to run %s" % cmd
65
70
66
71
python27_exec = venv27 / "bin" / "python2.7"
67
72
assert python27_exec .exists ()
@@ -73,19 +78,20 @@ def python27_path(python27_exec):
73
78
return Path (python27_exec ).parent .parent
74
79
# Assumes already created with make .venv27
75
80
81
+
76
82
@pytest .fixture (scope = 'session' )
77
83
def docker_compose_file (here ):
78
84
""" Overrides pytest-docker fixture
79
85
"""
80
86
old = os .environ .copy ()
81
87
82
88
# docker-compose reads these environs
83
- os .environ ['POSTGRES_DB' ]= DATABASE
84
- os .environ ['POSTGRES_USER' ]= USER
85
- os .environ ['POSTGRES_PASSWORD' ]= PASS
86
- os .environ ['POSTGRES_ENDPOINT' ]= "FOO" # TODO: update config schema!!
87
- os .environ ['MINIO_ACCESS_KEY' ]= ACCESS_KEY
88
- os .environ ['MINIO_SECRET_KEY' ]= SECRET_KEY
89
+ os .environ ['POSTGRES_DB' ] = DATABASE
90
+ os .environ ['POSTGRES_USER' ] = USER
91
+ os .environ ['POSTGRES_PASSWORD' ] = PASS
92
+ os .environ ['POSTGRES_ENDPOINT' ] = "FOO" # TODO: update config schema!!
93
+ os .environ ['MINIO_ACCESS_KEY' ] = ACCESS_KEY
94
+ os .environ ['MINIO_SECRET_KEY' ] = SECRET_KEY
89
95
90
96
dc_path = here / 'docker-compose.yml'
91
97
@@ -94,12 +100,13 @@ def docker_compose_file(here):
94
100
95
101
os .environ = old
96
102
103
+
97
104
@pytest .fixture (scope = 'session' )
98
105
def postgres_service (docker_services , docker_ip ):
99
106
url = 'postgresql://{user}:{password}@{host}:{port}/{database}' .format (
100
- user = USER ,
101
- password = PASS ,
102
- database = DATABASE ,
107
+ user = USER ,
108
+ password = PASS ,
109
+ database = DATABASE ,
103
110
host = docker_ip ,
104
111
port = docker_services .port_for ('postgres' , 5432 ),
105
112
)
@@ -112,27 +119,29 @@ def postgres_service(docker_services, docker_ip):
112
119
)
113
120
114
121
postgres_service = {
115
- 'user' : USER ,
116
- 'password' : PASS ,
117
- 'database' : DATABASE ,
118
- 'host' : docker_ip ,
119
- 'port' : docker_services .port_for ('postgres' , 5432 )
122
+ 'user' : USER ,
123
+ 'password' : PASS ,
124
+ 'database' : DATABASE ,
125
+ 'host' : docker_ip ,
126
+ 'port' : docker_services .port_for ('postgres' , 5432 )
120
127
}
121
128
122
129
return postgres_service
123
130
131
+
124
132
@pytest .fixture (scope = 'session' )
125
133
def postgres_service_url (postgres_service , docker_services , docker_ip ):
126
134
postgres_service_url = 'postgresql://{user}:{password}@{host}:{port}/{database}' .format (
127
- user = USER ,
128
- password = PASS ,
129
- database = DATABASE ,
135
+ user = USER ,
136
+ password = PASS ,
137
+ database = DATABASE ,
130
138
host = docker_ip ,
131
139
port = docker_services .port_for ('postgres' , 5432 ),
132
140
)
133
141
134
142
return postgres_service_url
135
143
144
+
136
145
@pytest .fixture (scope = 'function' )
137
146
async def postgres_engine (loop , postgres_service_url ):
138
147
postgres_engine = await create_engine (postgres_service_url )
@@ -163,24 +172,28 @@ def minio_service(docker_services, docker_ip):
163
172
return {
164
173
'endpoint' : '{ip}:{port}' .format (ip = docker_ip , port = docker_services .port_for ('minio' , 9000 )),
165
174
'access_key' : ACCESS_KEY ,
166
- 'secret_key' : SECRET_KEY ,
167
- 'bucket_name' : BUCKET_NAME ,
168
- }
175
+ 'secret_key' : SECRET_KEY ,
176
+ 'bucket_name' : BUCKET_NAME ,
177
+ }
178
+
169
179
170
180
@pytest .fixture (scope = "module" )
171
181
def s3_client (minio_service ):
172
182
from s3wrapper .s3_client import S3Client
173
183
174
- s3_client = S3Client (endpoint = minio_service ['endpoint' ],access_key = minio_service ["access_key" ], secret_key = minio_service ["secret_key" ])
184
+ s3_client = S3Client (
185
+ endpoint = minio_service ['endpoint' ], access_key = minio_service ["access_key" ], secret_key = minio_service ["secret_key" ])
175
186
return s3_client
176
187
188
+
177
189
@pytest .fixture (scope = "function" )
178
190
def mock_files_factory (tmpdir_factory ):
179
191
def _create_files (count ):
180
192
filepaths = []
181
193
for _i in range (count ):
182
194
name = str (uuid .uuid4 ())
183
- filepath = os .path .normpath (str (tmpdir_factory .mktemp ('data' ).join (name + ".txt" )))
195
+ filepath = os .path .normpath (
196
+ str (tmpdir_factory .mktemp ('data' ).join (name + ".txt" )))
184
197
with open (filepath , 'w' ) as fout :
185
198
fout .write ("Hello world\n " )
186
199
filepaths .append (filepath )
@@ -198,10 +211,11 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory):
198
211
bucket_name = BUCKET_NAME
199
212
s3_client .create_bucket (bucket_name , delete_contents_if_exists = True )
200
213
201
- #TODO: use pip install Faker
202
- users = [ 'alice' , 'bob' , 'chuck' , 'dennis' ]
214
+ # TODO: use pip install Faker
215
+ users = ['alice' , 'bob' , 'chuck' , 'dennis' ]
203
216
204
- projects = ['astronomy' , 'biology' , 'chemistry' , 'dermatology' , 'economics' , 'futurology' , 'geology' ]
217
+ projects = ['astronomy' , 'biology' , 'chemistry' ,
218
+ 'dermatology' , 'economics' , 'futurology' , 'geology' ]
205
219
location = SIMCORE_S3_STR
206
220
207
221
nodes = ['alpha' , 'beta' , 'gamma' , 'delta' ]
@@ -214,41 +228,43 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory):
214
228
idx = randrange (len (users ))
215
229
user_name = users [idx ]
216
230
user_id = idx + 10
217
- idx = randrange (len (projects ))
231
+ idx = randrange (len (projects ))
218
232
project_name = projects [idx ]
219
233
project_id = idx + 100
220
- idx = randrange (len (nodes ))
234
+ idx = randrange (len (nodes ))
221
235
node = nodes [idx ]
222
236
node_id = idx + 10000
223
237
file_name = str (counter )
224
- object_name = Path (str (project_id ), str (node_id ), str (counter )).as_posix ()
238
+ object_name = Path (str (project_id ), str (
239
+ node_id ), str (counter )).as_posix ()
225
240
file_uuid = Path (object_name ).as_posix ()
226
241
227
242
assert s3_client .upload_file (bucket_name , object_name , _file )
228
243
229
- d = { 'file_uuid' : file_uuid ,
230
- 'location_id' : "0" ,
231
- 'location' : location ,
232
- 'bucket_name' : bucket_name ,
233
- 'object_name' : object_name ,
234
- 'project_id' : str (project_id ),
235
- 'project_name' : project_name ,
236
- 'node_id' : str (node_id ),
237
- 'node_name' : node ,
238
- 'file_name' : file_name ,
239
- 'user_id' : str (user_id ),
240
- 'user_name' : user_name
241
- }
244
+ d = {'file_uuid' : file_uuid ,
245
+ 'location_id' : "0" ,
246
+ 'location' : location ,
247
+ 'bucket_name' : bucket_name ,
248
+ 'object_name' : object_name ,
249
+ 'project_id' : str (project_id ),
250
+ 'project_name' : project_name ,
251
+ 'node_id' : str (node_id ),
252
+ 'node_name' : node ,
253
+ 'file_name' : file_name ,
254
+ 'user_id' : str (user_id ),
255
+ 'user_name' : user_name
256
+ }
242
257
243
258
counter = counter + 1
244
259
245
260
data [object_name ] = FileMetaData (** d )
246
261
247
- utils .insert_metadata (postgres_service_url , data [object_name ]) #pylint: disable=no-member
248
-
262
+ # pylint: disable=no-member
263
+ utils .insert_metadata (postgres_service_url ,
264
+ data [object_name ])
249
265
250
266
total_count = 0
251
- for _obj in s3_client .list_objects_v2 (bucket_name , recursive = True ):
267
+ for _obj in s3_client .list_objects_v2 (bucket_name , recursive = True ):
252
268
total_count = total_count + 1
253
269
254
270
assert total_count == N
@@ -260,10 +276,6 @@ def dsm_mockup_db(postgres_service_url, s3_client, mock_files_factory):
260
276
# db
261
277
utils .drop_tables (url = postgres_service_url )
262
278
263
- # This is weird, somehow the default loop gives problems with pytest asyncio, so lets override it
264
- @pytest .fixture
265
- def loop (event_loop ):
266
- return event_loop
267
279
268
280
@pytest .fixture (scope = "function" )
269
281
async def datcore_testbucket (loop , python27_exec , mock_files_factory ):
@@ -282,19 +294,20 @@ async def datcore_testbucket(loop, python27_exec, mock_files_factory):
282
294
283
295
ready = False
284
296
counter = 0
285
- while not ready and counter < 5 :
297
+ while not ready and counter < 5 :
286
298
data = await dcw .list_files ()
287
299
ready = len (data ) == 2
288
300
await asyncio .sleep (10 )
289
301
counter = counter + 1
290
302
291
-
292
303
yield BUCKET_NAME
293
304
294
305
await dcw .delete_test_dataset (BUCKET_NAME )
295
306
307
+
296
308
@pytest .fixture (scope = "function" )
297
309
def dsm_fixture (s3_client , python27_exec , postgres_engine , loop ):
298
310
pool = ThreadPoolExecutor (3 )
299
- dsm_fixture = DataStorageManager (s3_client , python27_exec , postgres_engine , loop , pool , BUCKET_NAME )
311
+ dsm_fixture = DataStorageManager (
312
+ s3_client , python27_exec , postgres_engine , loop , pool , BUCKET_NAME )
300
313
return dsm_fixture
0 commit comments