Skip to content

Commit 8aadf4f

Browse files
authored
Merge b1b6596 into 066cc81
2 parents 066cc81 + b1b6596 commit 8aadf4f

File tree

1 file changed

+95
-14
lines changed

1 file changed

+95
-14
lines changed

ydb/tests/olap/test_quota_exhaustion.py

Lines changed: 95 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,7 @@
1+
import os
2+
import subprocess
13
import sys
4+
import time
25

36
import ydb
47
from ydb.tests.library.harness.kikimr_config import KikimrConfigGenerator
@@ -22,24 +25,23 @@ def setup_class(cls):
2225
def teardown_class(cls):
2326
cls.cluster.stop()
2427

25-
def test(self):
26-
"""As per https://github.com/ydb-platform/ydb/issues/13529"""
27-
28+
def make_session(self):
2829
driver = ydb.Driver(endpoint=f'grpc://localhost:{self.cluster.nodes[1].grpc_port}', database='/Root')
2930
session = ydb.QuerySessionPool(driver)
3031
driver.wait(5, fail_fast=True)
32+
return session
3133

32-
def create_table(table):
33-
return session.execute_with_retries(f"""
34+
def create_test_table(self, session, table):
35+
return session.execute_with_retries(f"""
3436
CREATE TABLE {table} (
3537
k Int32 NOT NULL,
3638
v Uint64,
3739
PRIMARY KEY (k)
3840
) WITH (STORE = COLUMN)
3941
""")
4042

41-
def upsert_chunk(table, chunk_id, retries=10):
42-
return session.execute_with_retries(f"""
43+
def upsert_test_chunk(self, session, table, chunk_id, retries=10):
44+
return session.execute_with_retries(f"""
4345
$n = {ROWS_CHUNK_SIZE};
4446
$values_list = ListReplicate(42ul, $n);
4547
$rows_list = ListFoldMap($values_list, {chunk_id * ROWS_CHUNK_SIZE}, ($val, $i) -> ((<|k:$i, v:$val|>, $i + 1)));
@@ -48,17 +50,96 @@ def upsert_chunk(table, chunk_id, retries=10):
4850
SELECT * FROM AS_TABLE($rows_list);
4951
""", None, ydb.retries.RetrySettings(max_retries=retries))
5052

51-
create_table('huge')
52-
53+
def upsert_until_overload(self, session, table):
5354
try:
5455
for i in range(ROWS_CHUNKS_COUNT):
55-
res = upsert_chunk('huge', i, retries=0)
56-
print(f"query #{i} ok, result:", res, file=sys.stderr)
56+
res = self.upsert_test_chunk(session, table, i, retries=0)
57+
print(f"upsert #{i} ok, result:", res, file=sys.stderr)
5758
except ydb.issues.Overloaded:
58-
print('got overload issue', file=sys.stderr)
59+
print('upsert: got overload issue', file=sys.stderr)
60+
61+
def test(self):
62+
"""As per https://github.com/ydb-platform/ydb/issues/13529"""
63+
session = self.make_session()
64+
65+
# Overflow the database
66+
self.create_test_table(session, 'huge')
67+
self.upsert_until_overload(session, 'huge')
5968

69+
# Cleanup
6070
session.execute_with_retries("""DROP TABLE huge""")
6171

6272
# Check database health after cleanup
63-
create_table('small')
64-
upsert_chunk('small', 0)
73+
self.create_test_table(session, 'small')
74+
self.upsert_test_chunk(session, 'small', 0)
75+
76+
def delete_test_chunk(self, session, table, chunk_id, retries=10):
77+
session.execute_with_retries(f"""
78+
DELETE FROM {table}
79+
WHERE {chunk_id * ROWS_CHUNK_SIZE} <= k AND k <= {chunk_id * ROWS_CHUNK_SIZE + ROWS_CHUNK_SIZE}
80+
""", None, ydb.retries.RetrySettings(max_retries=retries))
81+
82+
def delete_until_overload(self, session, table):
83+
for i in range(ROWS_CHUNKS_COUNT):
84+
try:
85+
self.delete_test_chunk(session, table, i, retries=0)
86+
print(f"delete #{i} ok", file=sys.stderr)
87+
except ydb.issues.Overloaded:
88+
print('delete: got overload issue', file=sys.stderr)
89+
return i
90+
91+
def ydbcli_db_schema_exec(self, node, operation_proto):
92+
endpoint = f"{node.host}:{node.port}"
93+
args = [
94+
node.binary_path,
95+
f"--server=grpc://{endpoint}",
96+
"db",
97+
"schema",
98+
"exec",
99+
operation_proto,
100+
]
101+
command = subprocess.run(args, capture_output=True)
102+
assert command.returncode == 0, command.stderr.decode("utf-8")
103+
104+
105+
def alter_database_quotas(self, node, database_path, database_quotas):
106+
alter_proto = """ModifyScheme {
107+
OperationType: ESchemeOpAlterSubDomain
108+
WorkingDir: "%s"
109+
SubDomain {
110+
Name: "%s"
111+
DatabaseQuotas {
112+
%s
113+
}
114+
}
115+
}""" % (
116+
os.path.dirname(database_path),
117+
os.path.basename(database_path),
118+
database_quotas,
119+
)
120+
121+
self.ydbcli_db_schema_exec(node, alter_proto)
122+
123+
def test_delete(self):
124+
"""As per https://github.com/ydb-platform/ydb/issues/13653"""
125+
session = self.make_session()
126+
127+
# Set soft and hard quotas to 6GB
128+
self.alter_database_quotas(self.cluster.nodes[1], '/Root', """
129+
data_size_hard_quota: 6000000000
130+
data_size_soft_quota: 6000000000
131+
""")
132+
133+
# Overflow the database
134+
self.create_test_table(session, 'huge')
135+
self.upsert_until_overload(session, 'huge')
136+
137+
# Check that deletion works at least first time
138+
# self.delete_test_chunk(session, 'huge', 0)
139+
# ^ uncomment after fixing https://github.com/ydb-platform/ydb/issues/13808
140+
141+
# Check that deletions will lead to overflow at some moment
142+
i = self.delete_until_overload(session, 'huge')
143+
144+
# Try to wait until deletion works again (after compaction)
145+
self.delete_test_chunk(session, 'huge', i)

0 commit comments

Comments
 (0)