Skip to content

Commit 12491c5

Browse files
authored
Merge branch 'master' into feature/previews-and-scene
2 parents fe99f90 + 9c22ab8 commit 12491c5

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

41 files changed

+1221
-597
lines changed

packages/models-library/src/models_library/services_resources.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import logging
22
from enum import auto
3-
from typing import Any, Final, Optional, Union
3+
from typing import Any, Final, TypeAlias
44

55
from models_library.docker import DockerGenericTag
66
from models_library.utils.enums import StrAutoEnum
@@ -37,8 +37,8 @@
3737

3838

3939
class ResourceValue(BaseModel):
40-
limit: Union[StrictInt, StrictFloat, str]
41-
reservation: Union[StrictInt, StrictFloat, str]
40+
limit: StrictInt | StrictFloat | str
41+
reservation: StrictInt | StrictFloat | str
4242

4343
@root_validator()
4444
@classmethod
@@ -100,15 +100,15 @@ class Config:
100100
}
101101

102102

103-
ServiceResourcesDict = dict[DockerGenericTag, ImageResources]
103+
ServiceResourcesDict: TypeAlias = dict[DockerGenericTag, ImageResources]
104104

105105

106106
class ServiceResourcesDictHelpers:
107107
@staticmethod
108108
def create_from_single_service(
109109
image: DockerGenericTag,
110110
resources: ResourcesDict,
111-
boot_modes: Optional[list[BootMode]] = None,
111+
boot_modes: list[BootMode] | None = None,
112112
) -> ServiceResourcesDict:
113113
if boot_modes is None:
114114
boot_modes = [BootMode.CPU]

packages/postgres-database/setup.cfg

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,5 +9,6 @@ commit_args = --no-verify
99

1010
[tool:pytest]
1111
asyncio_mode = auto
12-
markers =
12+
addopts = -W error::sqlalchemy.exc.SAWarning
13+
markers =
1314
testit: "marks test to run during development"
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
"""migrate projects workbench step1
2+
3+
Revision ID: add0afaaf728
4+
Revises: 6e91067932f2
5+
Create Date: 2023-06-22 14:45:38.827559+00:00
6+
7+
"""
8+
import sqlalchemy as sa
9+
from alembic import op
10+
11+
# revision identifiers, used by Alembic.
12+
revision = "add0afaaf728"
13+
down_revision = "6e91067932f2"
14+
branch_labels = None
15+
depends_on = None
16+
17+
18+
def upgrade():
19+
projects_table = sa.table(
20+
"projects",
21+
sa.column("uuid"),
22+
sa.column("workbench"),
23+
sa.column("creation_date"),
24+
sa.column("last_change_date"),
25+
)
26+
projects_nodes_table = sa.table(
27+
"projects_nodes",
28+
sa.column("project_uuid"),
29+
sa.column("node_id"),
30+
sa.column("created"),
31+
sa.column("modified"),
32+
)
33+
34+
connection = op.get_bind()
35+
36+
for project_uuid, workbench, creation_date, last_change_date in connection.execute(
37+
projects_table.select()
38+
):
39+
for node_id in workbench.keys():
40+
connection.execute(
41+
projects_nodes_table.insert().values(
42+
project_uuid=project_uuid,
43+
node_id=node_id,
44+
created=creation_date,
45+
modified=last_change_date,
46+
)
47+
)
48+
49+
50+
def downgrade():
51+
projects_nodes_table = sa.table(
52+
"projects_nodes",
53+
sa.column("project_uuid"),
54+
sa.column("node_id"),
55+
)
56+
connection = op.get_bind()
57+
58+
connection.execute(projects_nodes_table.delete())
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
from typing import TypeVar
2+
3+
from aiopg.sa.result import RowProxy
4+
5+
ModelType = TypeVar("ModelType")
6+
7+
8+
class FromRowMixin:
9+
"""Mixin to allow instance construction from aiopg.sa.result.RowProxy"""
10+
11+
@classmethod
12+
def from_row(cls: type[ModelType], row: RowProxy) -> ModelType:
13+
return cls(**dict(row.items()))

packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py

Lines changed: 64 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,14 @@
11
import datetime
22
import uuid
3-
from dataclasses import asdict, dataclass, field
3+
from dataclasses import asdict, dataclass, field, fields
4+
from typing import Any
45

56
import sqlalchemy
67
from aiopg.sa.connection import SAConnection
78

89
from .errors import ForeignKeyViolation, UniqueViolation
910
from .models.projects_nodes import projects_nodes
11+
from .utils_models import FromRowMixin
1012

1113

1214
#
@@ -34,12 +36,16 @@ class ProjectNodesDuplicateNode(BaseProjectNodesError):
3436

3537
@dataclass(frozen=True, slots=True, kw_only=True)
3638
class ProjectNodeCreate:
37-
required_resources: dict = field(default_factory=dict)
39+
node_id: uuid.UUID
40+
required_resources: dict[str, Any] = field(default_factory=dict)
41+
42+
@staticmethod
43+
def get_field_names(*, exclude: set[str]) -> set[str]:
44+
return {f.name for f in fields(ProjectNodeCreate) if f.name not in exclude}
3845

3946

4047
@dataclass(frozen=True, slots=True, kw_only=True)
41-
class ProjectNode(ProjectNodeCreate):
42-
node_id: uuid.UUID
48+
class ProjectNode(ProjectNodeCreate, FromRowMixin):
4349
created: datetime.datetime
4450
modified: datetime.datetime
4551

@@ -52,9 +58,8 @@ async def add(
5258
self,
5359
connection: SAConnection,
5460
*,
55-
node_id: uuid.UUID,
56-
node: ProjectNodeCreate,
57-
) -> ProjectNode:
61+
nodes: list[ProjectNodeCreate],
62+
) -> list[ProjectNode]:
5863
"""creates a new entry in *projects_nodes* and *projects_to_projects_nodes* tables
5964
6065
NOTE: Do not use this in an asyncio.gather call as this will fail!
@@ -65,51 +70,62 @@ async def add(
6570
ProjectsNodesNodeNotFound: in case the node does not exist
6671
6772
"""
68-
async with connection.begin():
69-
try:
70-
result = await connection.execute(
71-
projects_nodes.insert()
72-
.values(
73-
project_uuid=f"{self.project_uuid}",
74-
node_id=f"{node_id}",
73+
if not nodes:
74+
return []
75+
insert_stmt = (
76+
projects_nodes.insert()
77+
.values(
78+
[
79+
{
80+
"project_uuid": f"{self.project_uuid}",
7581
**asdict(node),
76-
)
77-
.returning(
78-
*[
79-
c
80-
for c in projects_nodes.c
81-
if c is not projects_nodes.c.project_uuid
82-
]
83-
)
84-
)
85-
created_node_db = await result.first()
86-
assert created_node_db # nosec
87-
created_node = ProjectNode(**dict(created_node_db.items()))
88-
89-
return created_node
90-
except ForeignKeyViolation as exc:
91-
# this happens when the project does not exist, as we first check the node exists
92-
raise ProjectNodesProjectNotFound(
93-
f"Project {self.project_uuid} not found"
94-
) from exc
95-
except UniqueViolation as exc:
96-
# this happens if the node already exists on creation
97-
raise ProjectNodesDuplicateNode(
98-
f"Project node {node_id} already exists"
99-
) from exc
82+
}
83+
for node in nodes
84+
]
85+
)
86+
.returning(
87+
*[
88+
c
89+
for c in projects_nodes.columns
90+
if c is not projects_nodes.c.project_uuid
91+
]
92+
)
93+
)
94+
95+
try:
96+
result = await connection.execute(insert_stmt)
97+
assert result # nosec
98+
rows = await result.fetchall()
99+
assert rows is not None # nosec
100+
return [ProjectNode.from_row(r) for r in rows]
101+
except ForeignKeyViolation as exc:
102+
# this happens when the project does not exist, as we first check the node exists
103+
raise ProjectNodesProjectNotFound(
104+
f"Project {self.project_uuid} not found"
105+
) from exc
106+
except UniqueViolation as exc:
107+
# this happens if the node already exists on creation
108+
raise ProjectNodesDuplicateNode(
109+
f"Project node already exists: {exc}"
110+
) from exc
100111

101112
async def list(self, connection: SAConnection) -> list[ProjectNode]:
102113
"""list the nodes in the current project
103114
104115
NOTE: Do not use this in an asyncio.gather call as this will fail!
105116
"""
106117
list_stmt = sqlalchemy.select(
107-
*[c for c in projects_nodes.c if c is not projects_nodes.c.project_uuid]
118+
*[
119+
c
120+
for c in projects_nodes.columns
121+
if c is not projects_nodes.c.project_uuid
122+
]
108123
).where(projects_nodes.c.project_uuid == f"{self.project_uuid}")
109-
nodes = [
110-
ProjectNode(**dict(row.items()))
111-
async for row in connection.execute(list_stmt)
112-
]
124+
result = await connection.execute(list_stmt)
125+
assert result # nosec
126+
rows = await result.fetchall()
127+
assert rows is not None # nosec
128+
nodes = [ProjectNode.from_row(row) for row in rows]
113129
return nodes
114130

115131
async def get(self, connection: SAConnection, *, node_id: uuid.UUID) -> ProjectNode:
@@ -134,7 +150,7 @@ async def get(self, connection: SAConnection, *, node_id: uuid.UUID) -> ProjectN
134150
if row is None:
135151
raise ProjectNodesNodeNotFound(f"Node with {node_id} not found")
136152
assert row # nosec
137-
return ProjectNode(**dict(row.items()))
153+
return ProjectNode.from_row(row)
138154

139155
async def update(
140156
self, connection: SAConnection, *, node_id: uuid.UUID, **values
@@ -158,11 +174,11 @@ async def update(
158174
)
159175
)
160176
result = await connection.execute(update_stmt)
161-
updated_entry = await result.first()
162-
if not updated_entry:
177+
row = await result.first()
178+
if not row:
163179
raise ProjectNodesNodeNotFound(f"Node with {node_id} not found")
164-
assert updated_entry # nosec
165-
return ProjectNode(**dict(updated_entry.items()))
180+
assert row # nosec
181+
return ProjectNode.from_row(row)
166182

167183
async def delete(self, connection: SAConnection, *, node_id: uuid.UUID) -> None:
168184
"""delete a node in the current project

0 commit comments

Comments
 (0)