Skip to content

Commit b6448f6

Browse files
committed
doc and cleanup while reading
1 parent de15f43 commit b6448f6

File tree

3 files changed

+30
-31
lines changed

3 files changed

+30
-31
lines changed

services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -83,27 +83,26 @@ async def garbage_collector_task(app: web.Application):
8383

8484
async def collect_garbage(registry: RedisResourceRegistry, app: web.Application):
8585
"""
86-
Garbage collection has the task of removing trash from the system. The trash
86+
Garbage collection has the task of removing trash (i.e. unused resources) from the system. The trash
8787
can be divided in:
8888
8989
- Websockets & Redis (used to keep track of current active connections)
9090
- GUEST users (used for temporary access to the system which are created on the fly)
91-
- deletion of users. If a user needs to be deleted it is manually marked as GUEST
92-
in the database
91+
- Deletion of users. If a user needs to be deleted it can be set as GUEST in the database
9392
9493
The resources are Redis entries where all information regarding all the
95-
websocket identifiers for all opened tabs accross all broser for each user
94+
websocket identifiers for all opened tabs accross all browser for each user
9695
are stored.
9796
98-
The alive/dead keys are normal Redis keys. To each key and ALIVE key is associated,
99-
which has an assigned TTL. The browser will call the `client_heartbeat` websocket
97+
The alive/dead keys are normal Redis keys. To each key an ALIVE key is associated,
98+
which has an assigned TTL (Time To Live). The browser will call the `client_heartbeat` websocket
10099
endpoint to refresh the TTL, thus declaring that the user (websocket connection) is
101-
still active. The `resource_deletion_timeout_seconds` is theTTL of the key.
100+
still active. The `resource_deletion_timeout_seconds` is the TTL of the key.
102101
103102
The field `garbage_collection_interval_seconds` defines the interval at which this
104103
function will be called.
105104
"""
106-
logger.info("collecting garbage...")
105+
logger.info("Collecting garbage...")
107106

108107
# Removes disconnected user resources
109108
# Triggers signal to close possible pending opened projects

services/web/server/src/simcore_service_webserver/resource_manager/redis.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ async def redis_client(app: web.Application):
3333
client = None
3434
for attempt in Retrying(**retry_upon_init_policy):
3535
with attempt:
36-
client = await aioredis.create_redis_pool(url, encoding="utf-8")
36+
client: aioredis.Redis = await aioredis.create_redis_pool(url, encoding="utf-8")
3737
# create lock manager
3838
lock_manager = Aioredlock([url])
3939

services/web/server/src/simcore_service_webserver/resource_manager/registry.py

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
import logging
1717
from typing import Dict, List, Tuple
1818

19+
import aioredis
1920
import attr
2021
from aiohttp import web
2122

@@ -53,75 +54,74 @@ def _decode_hash_key(cls, hash_key: str) -> Dict[str, str]:
5354
key = dict(x.split("=") for x in tmp_key.split(":"))
5455
return key
5556

57+
@property
58+
def client(self) -> aioredis.Redis:
59+
return get_redis_client(self.app)
60+
5661
async def set_resource(
5762
self, key: Dict[str, str], resource: Tuple[str, str]
5863
) -> None:
59-
client = get_redis_client(self.app)
6064
hash_key = f"{self._hash_key(key)}:{RESOURCE_SUFFIX}"
61-
await client.hmset_dict(hash_key, **{resource[0]: resource[1]})
65+
field, value = resource
66+
await self.client.hmset_dict(hash_key, **{field: value})
6267

6368
async def get_resources(self, key: Dict[str, str]) -> Dict[str, str]:
64-
client = get_redis_client(self.app)
6569
hash_key = f"{self._hash_key(key)}:{RESOURCE_SUFFIX}"
66-
return await client.hgetall(hash_key)
70+
return await self.client.hgetall(hash_key)
6771

6872
async def remove_resource(self, key: Dict[str, str], resource_name: str) -> None:
69-
client = get_redis_client(self.app)
7073
hash_key = f"{self._hash_key(key)}:{RESOURCE_SUFFIX}"
71-
await client.hdel(hash_key, resource_name)
74+
await self.client.hdel(hash_key, resource_name)
7275

7376
async def find_resources(
7477
self, key: Dict[str, str], resource_name: str
7578
) -> List[str]:
76-
client = get_redis_client(self.app)
7779
resources = []
7880
# the key might only be partialy complete
7981
partial_hash_key = f"{self._hash_key(key)}:{RESOURCE_SUFFIX}"
80-
async for key in client.iscan(match=partial_hash_key):
81-
if await client.hexists(key, resource_name):
82-
resources.append(await client.hget(key, resource_name))
82+
async for key in self.client.iscan(match=partial_hash_key):
83+
if await self.client.hexists(key, resource_name):
84+
resources.append(await self.client.hget(key, resource_name))
8385
return resources
8486

8587
async def find_keys(self, resource: Tuple[str, str]) -> List[Dict[str, str]]:
8688
keys = []
8789
if not resource:
8890
return keys
89-
client = get_redis_client(self.app)
90-
async for hash_key in client.iscan(match=f"*:{RESOURCE_SUFFIX}"):
91-
if resource[1] == await client.hget(hash_key, resource[0]):
91+
92+
field, value = resource
93+
94+
async for hash_key in self.client.iscan(match=f"*:{RESOURCE_SUFFIX}"):
95+
if value == await self.client.hget(hash_key, field):
9296
keys.append(self._decode_hash_key(hash_key))
9397
return keys
9498

9599
async def set_key_alive(self, key: Dict[str, str], timeout: int) -> None:
96100
# setting the timeout to always expire, timeout > 0
97101
timeout = int(max(1, timeout))
98-
client = get_redis_client(self.app)
99102
hash_key = f"{self._hash_key(key)}:{ALIVE_SUFFIX}"
100-
await client.set(hash_key, 1, expire=timeout)
103+
await self.client.set(hash_key, 1, expire=timeout)
101104

102105
async def is_key_alive(self, key: Dict[str, str]) -> bool:
103-
client = get_redis_client(self.app)
104106
hash_key = f"{self._hash_key(key)}:{ALIVE_SUFFIX}"
105-
return await client.exists(hash_key) > 0
107+
return await self.client.exists(hash_key) > 0
106108

107109
async def remove_key(self, key: Dict[str, str]) -> None:
108-
client = get_redis_client(self.app)
109-
await client.delete(
110+
await self.client.delete(
110111
f"{self._hash_key(key)}:{RESOURCE_SUFFIX}",
111112
f"{self._hash_key(key)}:{ALIVE_SUFFIX}",
112113
)
113114

114115
async def get_all_resource_keys(
115116
self,
116117
) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]:
117-
client = get_redis_client(self.app)
118118
alive_keys = [
119119
self._decode_hash_key(hash_key)
120-
async for hash_key in client.iscan(match=f"*:{ALIVE_SUFFIX}")
120+
async for hash_key in self.client.iscan(match=f"*:{ALIVE_SUFFIX}")
121121
]
122122
dead_keys = [
123123
self._decode_hash_key(hash_key)
124-
async for hash_key in client.iscan(match=f"*:{RESOURCE_SUFFIX}")
124+
async for hash_key in self.client.iscan(match=f"*:{RESOURCE_SUFFIX}")
125125
if self._decode_hash_key(hash_key) not in alive_keys
126126
]
127127

0 commit comments

Comments
 (0)