@@ -38,21 +38,21 @@ def test_insert(self, kikimr, s3, client, format, dataset_name, unique_prefix):
38
38
storage_connection_name = unique_prefix + "ibucket"
39
39
client .create_storage_connection (storage_connection_name , "insert_bucket" )
40
40
41
- sql = R '''
42
- insert into `{}`.`{}/` with (format={})
41
+ sql = f '''
42
+ insert into `{ storage_connection_name } `.`{ dataset_name } /` with (format={ format } )
43
43
select * from AS_TABLE([<|foo:123, bar:"xxx"u|>,<|foo:456, bar:"yyy"u|>]);
44
- ''' . format ( storage_connection_name , dataset_name , format )
44
+ '''
45
45
46
46
query_id = client .create_query ("simple" , sql , type = fq .QueryContent .QueryType .ANALYTICS ).result .query_id
47
47
client .wait_query_status (query_id , fq .QueryMeta .COMPLETED )
48
48
prefix = client .describe_query (query_id ).result .query .meta .last_job_id .split ("-" )[0 ] # cut _<query_id> part
49
49
50
- sql = R '''
51
- select foo, bar from {0 }.`{1 }/{3 }*` with (format={2 }, schema(
50
+ sql = f '''
51
+ select foo, bar from { storage_connection_name } .`{ dataset_name } /{ prefix } *` with (format={ format } , schema(
52
52
foo Int NOT NULL,
53
53
bar String NOT NULL
54
54
))
55
- ''' . format ( storage_connection_name , dataset_name , format , prefix )
55
+ '''
56
56
57
57
query_id = client .create_query ("simple" , sql , type = fq .QueryContent .QueryType .ANALYTICS ).result .query_id
58
58
client .wait_query_status (query_id , fq .QueryMeta .COMPLETED )
@@ -70,7 +70,7 @@ def test_insert(self, kikimr, s3, client, format, dataset_name, unique_prefix):
70
70
assert result_set .rows [0 ].items [1 ].bytes_value == b'xxx'
71
71
assert result_set .rows [1 ].items [0 ].int32_value == 456
72
72
assert result_set .rows [1 ].items [1 ].bytes_value == b'yyy'
73
- assert sum (kikimr .control_plane .get_metering ()) == 20
73
+ assert sum (kikimr .control_plane .get_metering (1 )) == 20
74
74
75
75
@yq_all
76
76
@pytest .mark .parametrize ("client" , [{"folder_id" : "my_folder" }], indirect = True )
@@ -93,7 +93,7 @@ def test_big_json_list_insert(self, kikimr, s3, client, unique_prefix):
93
93
aws_secret_access_key = "secret_key"
94
94
)
95
95
96
- taxi = R '''VendorID'''
96
+ taxi = '''VendorID'''
97
97
for i in range (37 ):
98
98
taxi += "\n " + str (i )
99
99
s3_client .put_object (Body = taxi , Bucket = 'big_data_bucket' , Key = 'src/taxi.csv' , ContentType = 'text/plain' )
@@ -119,7 +119,7 @@ def test_big_json_list_insert(self, kikimr, s3, client, unique_prefix):
119
119
120
120
client .create_storage_connection ("ibucket" , "insert_bucket" )
121
121
122
- sql = fR '''
122
+ sql = f '''
123
123
pragma s3.JsonListSizeLimit="10";
124
124
INSERT INTO bindings.`{ storage_sink_binding_name } `
125
125
SELECT
@@ -130,7 +130,7 @@ def test_big_json_list_insert(self, kikimr, s3, client, unique_prefix):
130
130
query_id = client .create_query ("simple" , sql , type = fq .QueryContent .QueryType .ANALYTICS ).result .query_id
131
131
client .wait_query_status (query_id , fq .QueryMeta .COMPLETED )
132
132
133
- sql = fR '''
133
+ sql = f '''
134
134
135
135
SELECT
136
136
count(*)
@@ -148,7 +148,7 @@ def test_big_json_list_insert(self, kikimr, s3, client, unique_prefix):
148
148
assert result_set .columns [0 ].type .type_id == ydb .Type .UINT64
149
149
assert len (result_set .rows ) == 1
150
150
assert result_set .rows [0 ].items [0 ].uint64_value == 37
151
- assert sum (kikimr .control_plane .get_metering ()) == 20
151
+ assert sum (kikimr .control_plane .get_metering (1 )) == 20
152
152
153
153
@yq_all
154
154
@pytest .mark .parametrize ("client" , [{"folder_id" : "my_folder" }], indirect = True )
@@ -167,7 +167,7 @@ def test_insert_csv_delimiter(self, kikimr, s3, client, unique_prefix):
167
167
storage_connection_name = unique_prefix + "ibucket"
168
168
client .create_storage_connection (storage_connection_name , "insert_bucket" )
169
169
170
- sql = fR '''
170
+ sql = f '''
171
171
insert into `{ storage_connection_name } `.`csv_delim_out/` with (
172
172
format=csv_with_names,
173
173
csv_delimiter=";"
@@ -179,11 +179,11 @@ def test_insert_csv_delimiter(self, kikimr, s3, client, unique_prefix):
179
179
client .wait_query_status (query_id , fq .QueryMeta .COMPLETED )
180
180
prefix = "" # client.describe_query(query_id).result.query.meta.last_job_id.split("-")[0] # cut _<query_id> part
181
181
182
- sql = R '''
183
- select data from `{}`.`csv_delim_out/{}*` with (format=raw, schema(
182
+ sql = f '''
183
+ select data from `{ storage_connection_name } `.`csv_delim_out/{ prefix } *` with (format=raw, schema(
184
184
data String NOT NULL
185
185
))
186
- ''' . format ( storage_connection_name , prefix )
186
+ '''
187
187
188
188
query_id = client .create_query ("simple" , sql , type = fq .QueryContent .QueryType .ANALYTICS ).result .query_id
189
189
client .wait_query_status (query_id , fq .QueryMeta .COMPLETED )
@@ -196,7 +196,7 @@ def test_insert_csv_delimiter(self, kikimr, s3, client, unique_prefix):
196
196
assert result_set .columns [0 ].type .type_id == ydb .Type .STRING
197
197
assert len (result_set .rows ) == 1
198
198
assert result_set .rows [0 ].items [0 ].bytes_value == b'"bar";"foo"\n "xxx";123\n "yyy";456\n '
199
- assert sum (kikimr .control_plane .get_metering ()) == 20
199
+ assert sum (kikimr .control_plane .get_metering (1 )) == 20
200
200
201
201
@yq_all
202
202
@pytest .mark .parametrize ("client" , [{"folder_id" : "my_folder" }], indirect = True )
@@ -215,23 +215,23 @@ def test_append(self, kikimr, s3, client, unique_prefix):
215
215
storage_connection_name = unique_prefix + "abucket"
216
216
client .create_storage_connection (storage_connection_name , "append_bucket" )
217
217
218
- sql = fR '''
218
+ sql = f '''
219
219
insert into `{ storage_connection_name } `.`append/` with (format=json_each_row)
220
220
select * from AS_TABLE([<|foo:123, bar:"xxx"u|>,<|foo:456, bar:"yyy"u|>]);
221
221
'''
222
222
223
223
query_id = client .create_query ("simple" , sql , type = fq .QueryContent .QueryType .ANALYTICS ).result .query_id
224
224
client .wait_query_status (query_id , fq .QueryMeta .COMPLETED )
225
225
226
- sql = fR '''
226
+ sql = f '''
227
227
insert into `{ storage_connection_name } `.`append/` with (format=json_each_row)
228
228
select * from AS_TABLE([<|foo:345, bar:"zzz"u|>]);
229
229
'''
230
230
231
231
query_id = client .create_query ("simple" , sql , type = fq .QueryContent .QueryType .ANALYTICS ).result .query_id
232
232
client .wait_query_status (query_id , fq .QueryMeta .COMPLETED )
233
233
234
- sql = fR '''
234
+ sql = f '''
235
235
select foo, bar from `{ storage_connection_name } `.`append/` with (format=json_each_row, schema(
236
236
foo Int NOT NULL,
237
237
bar String NOT NULL
@@ -256,7 +256,7 @@ def test_append(self, kikimr, s3, client, unique_prefix):
256
256
assert result_set .rows [1 ].items [1 ].bytes_value == b'zzz'
257
257
assert result_set .rows [2 ].items [0 ].int32_value == 456
258
258
assert result_set .rows [2 ].items [1 ].bytes_value == b'yyy'
259
- assert sum (kikimr .control_plane .get_metering ()) == 30
259
+ assert sum (kikimr .control_plane .get_metering (1 )) == 30
260
260
261
261
@yq_all
262
262
@pytest .mark .parametrize ("client" , [{"folder_id" : "my_folder" }], indirect = True )
@@ -275,15 +275,15 @@ def test_part_split(self, kikimr, s3, client, unique_prefix):
275
275
storage_connection_name = unique_prefix + "sbucket"
276
276
client .create_storage_connection (storage_connection_name , "split_bucket" )
277
277
278
- sql = fR '''
278
+ sql = f '''
279
279
insert into `{ storage_connection_name } `.`part/` with (format=json_each_row, partitioned_by=(foo, bar))
280
280
select * from AS_TABLE([<|foo:123, bar:"xxx"u, data:3.14|>,<|foo:456, bar:"yyy"u, data:2.72|>,<|foo:123, bar:"xxx"u, data:1.41|>]);
281
281
'''
282
282
283
283
query_id = client .create_query ("simple" , sql , type = fq .QueryContent .QueryType .ANALYTICS ).result .query_id
284
284
client .wait_query_status (query_id , fq .QueryMeta .COMPLETED )
285
285
286
- sql = fR '''
286
+ sql = f '''
287
287
select data from `{ storage_connection_name } `.`part/foo=123/bar=xxx/` with (format=json_each_row, schema(
288
288
data Float NOT NULL,
289
289
))
@@ -301,7 +301,7 @@ def test_part_split(self, kikimr, s3, client, unique_prefix):
301
301
assert len (result_set .rows ) == 2
302
302
assert abs (result_set .rows [0 ].items [0 ].float_value - 3.14 ) < 0.01
303
303
assert abs (result_set .rows [1 ].items [0 ].float_value - 1.41 ) < 0.01
304
- assert sum (kikimr .control_plane .get_metering ()) == 20
304
+ assert sum (kikimr .control_plane .get_metering (1 )) == 20
305
305
306
306
@yq_all
307
307
@pytest .mark .parametrize ("client" , [{"folder_id" : "my_folder" }], indirect = True )
@@ -320,23 +320,23 @@ def test_part_merge(self, kikimr, s3, client, unique_prefix):
320
320
storage_connection_name = unique_prefix + "mbucket"
321
321
client .create_storage_connection (storage_connection_name , "merge_bucket" )
322
322
323
- sql = fR '''
323
+ sql = f '''
324
324
insert into `{ storage_connection_name } `.`part/foo=123/bar=xxx/` with (format=json_each_row)
325
325
select * from AS_TABLE([<|data:3.14|>,<|data:1.41|>]);
326
326
'''
327
327
328
328
query_id = client .create_query ("simple" , sql , type = fq .QueryContent .QueryType .ANALYTICS ).result .query_id
329
329
client .wait_query_status (query_id , fq .QueryMeta .COMPLETED )
330
330
331
- sql = fR '''
331
+ sql = f '''
332
332
insert into `{ storage_connection_name } `.`part/foo=456/bar=yyy/` with (format=json_each_row)
333
333
select * from AS_TABLE([<|data:2.72|>]);
334
334
'''
335
335
336
336
query_id = client .create_query ("simple" , sql , type = fq .QueryContent .QueryType .ANALYTICS ).result .query_id
337
337
client .wait_query_status (query_id , fq .QueryMeta .COMPLETED )
338
338
339
- sql = fR '''
339
+ sql = f '''
340
340
select foo, bar, data from `{ storage_connection_name } `.`part` with (format=json_each_row, partitioned_by=(foo, bar), schema(
341
341
foo Int NOT NULL,
342
342
bar String NOT NULL,
@@ -367,7 +367,7 @@ def test_part_merge(self, kikimr, s3, client, unique_prefix):
367
367
assert result_set .rows [2 ].items [0 ].int32_value == 456
368
368
assert result_set .rows [2 ].items [1 ].bytes_value == b'yyy'
369
369
assert abs (result_set .rows [2 ].items [2 ].float_value - 2.72 ) < 0.01
370
- assert sum (kikimr .control_plane .get_metering ()) == 30
370
+ assert sum (kikimr .control_plane .get_metering (1 )) == 30
371
371
372
372
@yq_all
373
373
@pytest .mark .parametrize ("format" , ["json_list" , "json_each_row" , "csv_with_names" ])
@@ -403,15 +403,15 @@ def test_part_binding(self, kikimr, s3, client, format, unique_prefix):
403
403
"file_pattern" : "*{json,csv}"
404
404
})
405
405
406
- sql = fR '''
406
+ sql = f '''
407
407
insert into bindings.`{ storage_binding_name } `
408
408
select * from AS_TABLE([<|foo:123, bar:"xxx"u, data:3.14|>,<|foo:456, bar:"yyy"u, data:2.72|>,<|foo:123, bar:"xxx"u, data:1.41|>]);
409
409
'''
410
410
411
411
query_id = client .create_query ("simple" , sql , type = fq .QueryContent .QueryType .ANALYTICS ).result .query_id
412
412
client .wait_query_status (query_id , fq .QueryMeta .COMPLETED )
413
413
414
- sql = fR '''
414
+ sql = f '''
415
415
select foo, bar, data from bindings.`{ storage_binding_name } ` order by foo, data
416
416
'''
417
417
@@ -438,7 +438,7 @@ def test_part_binding(self, kikimr, s3, client, format, unique_prefix):
438
438
assert result_set .rows [2 ].items [0 ].int32_value == 456
439
439
assert result_set .rows [2 ].items [1 ].text_value == 'yyy'
440
440
assert abs (result_set .rows [2 ].items [2 ].double_value - 2.72 ) < 0.01
441
- assert sum (kikimr .control_plane .get_metering ()) == 20
441
+ assert sum (kikimr .control_plane .get_metering (1 )) == 20
442
442
443
443
@yq_v1
444
444
@pytest .mark .parametrize ("format" , ["json_each_row" , "csv_with_names" , "tsv_with_names" , "parquet" ])
0 commit comments