Skip to content

Commit 843a9bc

Browse files
authored
Merge 701cbfb into 8af7595
2 parents 8af7595 + 701cbfb commit 843a9bc

File tree

1 file changed

+93
-14
lines changed

1 file changed

+93
-14
lines changed

ydb/tests/olap/test_quota_exhaustion.py

Lines changed: 93 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
import os
2+
import subprocess
13
import sys
24

35
import ydb
@@ -22,24 +24,23 @@ def setup_class(cls):
2224
def teardown_class(cls):
2325
cls.cluster.stop()
2426

25-
def test(self):
26-
"""As per https://github.com/ydb-platform/ydb/issues/13529"""
27-
27+
def make_session(self):
2828
driver = ydb.Driver(endpoint=f'grpc://localhost:{self.cluster.nodes[1].grpc_port}', database='/Root')
2929
session = ydb.QuerySessionPool(driver)
3030
driver.wait(5, fail_fast=True)
31+
return session
3132

32-
def create_table(table):
33-
return session.execute_with_retries(f"""
33+
def create_test_table(self, session, table):
34+
return session.execute_with_retries(f"""
3435
CREATE TABLE {table} (
3536
k Int32 NOT NULL,
3637
v Uint64,
3738
PRIMARY KEY (k)
3839
) WITH (STORE = COLUMN)
3940
""")
4041

41-
def upsert_chunk(table, chunk_id, retries=10):
42-
return session.execute_with_retries(f"""
42+
def upsert_test_chunk(self, session, table, chunk_id, retries=10):
43+
return session.execute_with_retries(f"""
4344
$n = {ROWS_CHUNK_SIZE};
4445
$values_list = ListReplicate(42ul, $n);
4546
$rows_list = ListFoldMap($values_list, {chunk_id * ROWS_CHUNK_SIZE}, ($val, $i) -> ((<|k:$i, v:$val|>, $i + 1)));
@@ -48,17 +49,95 @@ def upsert_chunk(table, chunk_id, retries=10):
4849
SELECT * FROM AS_TABLE($rows_list);
4950
""", None, ydb.retries.RetrySettings(max_retries=retries))
5051

51-
create_table('huge')
52-
52+
def upsert_until_overload(self, session, table):
5353
try:
5454
for i in range(ROWS_CHUNKS_COUNT):
55-
res = upsert_chunk('huge', i, retries=0)
56-
print(f"query #{i} ok, result:", res, file=sys.stderr)
55+
res = self.upsert_test_chunk(session, table, i, retries=0)
56+
print(f"upsert #{i} ok, result:", res, file=sys.stderr)
5757
except ydb.issues.Overloaded:
58-
print('got overload issue', file=sys.stderr)
58+
print('upsert: got overload issue', file=sys.stderr)
59+
60+
def test(self):
61+
"""As per https://github.com/ydb-platform/ydb/issues/13529"""
62+
session = self.make_session()
63+
64+
# Overflow the database
65+
self.create_test_table(session, 'huge')
66+
self.upsert_until_overload(session, 'huge')
5967

68+
# Cleanup
6069
session.execute_with_retries("""DROP TABLE huge""")
6170

6271
# Check database health after cleanup
63-
create_table('small')
64-
upsert_chunk('small', 0)
72+
self.create_test_table(session, 'small')
73+
self.upsert_test_chunk(session, 'small', 0)
74+
75+
def delete_test_chunk(self, session, table, chunk_id, retries=10):
76+
session.execute_with_retries(f"""
77+
DELETE FROM {table}
78+
WHERE {chunk_id * ROWS_CHUNK_SIZE} <= k AND k <= {chunk_id * ROWS_CHUNK_SIZE + ROWS_CHUNK_SIZE}
79+
""", None, ydb.retries.RetrySettings(max_retries=retries))
80+
81+
def delete_until_overload(self, session, table):
82+
for i in range(ROWS_CHUNKS_COUNT):
83+
try:
84+
self.delete_test_chunk(session, table, i, retries=0)
85+
print(f"delete #{i} ok", file=sys.stderr)
86+
except ydb.issues.Overloaded:
87+
print('delete: got overload issue', file=sys.stderr)
88+
return i
89+
90+
def ydbcli_db_schema_exec(self, node, operation_proto):
91+
endpoint = f"{node.host}:{node.port}"
92+
args = [
93+
node.binary_path,
94+
f"--server=grpc://{endpoint}",
95+
"db",
96+
"schema",
97+
"exec",
98+
operation_proto,
99+
]
100+
command = subprocess.run(args, capture_output=True)
101+
assert command.returncode == 0, command.stderr.decode("utf-8")
102+
103+
def alter_database_quotas(self, node, database_path, database_quotas):
104+
alter_proto = """ModifyScheme {
105+
OperationType: ESchemeOpAlterSubDomain
106+
WorkingDir: "%s"
107+
SubDomain {
108+
Name: "%s"
109+
DatabaseQuotas {
110+
%s
111+
}
112+
}
113+
}""" % (
114+
os.path.dirname(database_path),
115+
os.path.basename(database_path),
116+
database_quotas,
117+
)
118+
119+
self.ydbcli_db_schema_exec(node, alter_proto)
120+
121+
def test_delete(self):
122+
"""As per https://github.com/ydb-platform/ydb/issues/13653"""
123+
session = self.make_session()
124+
125+
# Set soft and hard quotas to 6GB
126+
self.alter_database_quotas(self.cluster.nodes[1], '/Root', """
127+
data_size_hard_quota: 6000000000
128+
data_size_soft_quota: 6000000000
129+
""")
130+
131+
# Overflow the database
132+
self.create_test_table(session, 'huge')
133+
self.upsert_until_overload(session, 'huge')
134+
135+
# Check that deletion works at least first time
136+
# self.delete_test_chunk(session, 'huge', 0)
137+
# ^ uncomment after fixing https://github.com/ydb-platform/ydb/issues/13808
138+
139+
# Check that deletions will lead to overflow at some moment
140+
i = self.delete_until_overload(session, 'huge')
141+
142+
# Try to wait until deletion works again (after compaction)
143+
self.delete_test_chunk(session, 'huge', i)

0 commit comments

Comments
 (0)