Skip to content

✨ Personalized resource limits: add API to change node resources ⚠️🗃️ #4374

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
73 commits
Select commit Hold shift + click to select a range
b7cf0d4
add first integration
sanderegg Jun 16, 2023
88fd5fb
create and delete project nodes with default resources
sanderegg Jun 16, 2023
e00fac2
let users do stupid things
sanderegg Jun 16, 2023
abe591e
copy works
sanderegg Jun 16, 2023
339ef41
computation now works as well
sanderegg Jun 16, 2023
92eba71
reserved for TESTER
sanderegg Jun 16, 2023
97fdccd
wrap old way with new way
sanderegg Jun 19, 2023
4903102
small refactor
sanderegg Jun 19, 2023
182319e
cleanup
sanderegg Jun 19, 2023
b418544
small refactor
sanderegg Jun 19, 2023
d048411
add aiopg fixture
sanderegg Jun 19, 2023
d666542
revert
sanderegg Jun 19, 2023
9d4f8e5
cleanup
sanderegg Jun 19, 2023
64b39ed
hit a wall. node id are not unique!!
sanderegg Jun 19, 2023
a94b8aa
fix typo
sanderegg Jun 19, 2023
37732ad
multiple insertion
sanderegg Jun 19, 2023
bc91b49
use new syntax
sanderegg Jun 19, 2023
fe901a1
only add if necessary
sanderegg Jun 20, 2023
dbe035b
simplify syntax
sanderegg Jun 20, 2023
73aa8c5
refactor
sanderegg Jun 20, 2023
40ae223
add more clients
sanderegg Jun 20, 2023
8fb9799
clean and reduce test expectations
sanderegg Jun 20, 2023
4288087
improve a bit
sanderegg Jun 20, 2023
f70e04d
ensure we clenaup
sanderegg Jun 20, 2023
d8f6d93
optimize and fix code
sanderegg Jun 20, 2023
406e9a3
ensure the test runs
sanderegg Jun 20, 2023
778fc64
cleanup
sanderegg Jun 20, 2023
2e6d3df
make cleaning more efficient, remove stupid tests
sanderegg Jun 20, 2023
1807869
all good
sanderegg Jun 20, 2023
daa94de
sql 2.0
sanderegg Jun 20, 2023
73dbc73
improve copying
sanderegg Jun 20, 2023
7b2e029
change syntax
sanderegg Jun 20, 2023
1a1f490
const utils
sanderegg Jun 20, 2023
13b14ca
create better fake
sanderegg Jun 20, 2023
7bcd117
faster testing
sanderegg Jun 20, 2023
1fe9c8a
add function to list project nodes
sanderegg Jun 20, 2023
b25a132
fix copying
sanderegg Jun 20, 2023
3970956
use a different example
sanderegg Jun 20, 2023
5842790
fix function
sanderegg Jun 20, 2023
f88ab64
typing
sanderegg Jun 20, 2023
137a328
fixed test
sanderegg Jun 20, 2023
039ab3f
refactor
sanderegg Jun 20, 2023
33cf79d
adapt to async version of entries.
sanderegg Jun 20, 2023
6fc5f9b
types
sanderegg Jun 20, 2023
e92ca56
make postgres-database tests fail on sql2.0 warnings
sanderegg Jun 20, 2023
1358c8c
also accepts dictionaries
sanderegg Jun 22, 2023
e9480c3
cleanup
sanderegg Jun 22, 2023
4f732df
added exception for invalid node resources
sanderegg Jun 22, 2023
1a0fb68
add test on _nodes_resources
sanderegg Jun 22, 2023
9234dac
cleanup
sanderegg Jun 22, 2023
b5fa83a
fix insertion with None
sanderegg Jun 22, 2023
de12fe5
remove warning (confirmed by @pcrespov)
sanderegg Jun 22, 2023
4cf4628
enhance testing
sanderegg Jun 22, 2023
7377c27
cleanup
sanderegg Jun 22, 2023
9ff8914
cleanup and further test coverage
sanderegg Jun 22, 2023
2ffff36
cleanup
sanderegg Jun 22, 2023
acb71f4
add docs
sanderegg Jun 22, 2023
de33452
reduce cognitivity levels
sanderegg Jun 22, 2023
fb85b70
ensure defaults are returned in case no value was set
sanderegg Jun 22, 2023
36e4669
rename
sanderegg Jun 22, 2023
01a6f17
failing test
sanderegg Jun 22, 2023
6504828
refactor and ensure default values are gotten
sanderegg Jun 22, 2023
19ca28a
check computation gets resources correctly
sanderegg Jun 22, 2023
35a58dd
add migration of projects data
sanderegg Jun 22, 2023
c7c3f4e
removed code to duplicate
sanderegg Jun 22, 2023
9c2acdc
@pcrespov review: get_field_names more readable
sanderegg Jun 23, 2023
c472d0d
@pcrespov review: use columns
sanderegg Jun 23, 2023
1b648bf
@pcrespov review: add mixin with from_row function to create from aiopg
sanderegg Jun 23, 2023
bb8c8a9
@pcrespov review: change name of module
sanderegg Jun 23, 2023
1188264
@pcrespov review: use columns
sanderegg Jun 23, 2023
96afb1f
@pcrespov review: remove comment
sanderegg Jun 23, 2023
ed4f58d
@GitHK review: confusing uuidlib
sanderegg Jun 23, 2023
0f80be1
@pcrespov review: dict alias
sanderegg Jun 23, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logging
from enum import auto
from typing import Any, Final, Optional, Union
from typing import Any, Final, TypeAlias

from models_library.docker import DockerGenericTag
from models_library.utils.enums import StrAutoEnum
Expand Down Expand Up @@ -37,8 +37,8 @@


class ResourceValue(BaseModel):
limit: Union[StrictInt, StrictFloat, str]
reservation: Union[StrictInt, StrictFloat, str]
limit: StrictInt | StrictFloat | str
reservation: StrictInt | StrictFloat | str

@root_validator()
@classmethod
Expand Down Expand Up @@ -100,15 +100,15 @@ class Config:
}


ServiceResourcesDict = dict[DockerGenericTag, ImageResources]
ServiceResourcesDict: TypeAlias = dict[DockerGenericTag, ImageResources]


class ServiceResourcesDictHelpers:
@staticmethod
def create_from_single_service(
image: DockerGenericTag,
resources: ResourcesDict,
boot_modes: Optional[list[BootMode]] = None,
boot_modes: list[BootMode] | None = None,
) -> ServiceResourcesDict:
if boot_modes is None:
boot_modes = [BootMode.CPU]
Expand Down
3 changes: 2 additions & 1 deletion packages/postgres-database/setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,6 @@ commit_args = --no-verify

[tool:pytest]
asyncio_mode = auto
markers =
addopts = -W error::sqlalchemy.exc.SAWarning
markers =
testit: "marks test to run during development"
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
"""migrate projects workbench step1

Revision ID: add0afaaf728
Revises: 6e91067932f2
Create Date: 2023-06-22 14:45:38.827559+00:00

"""
import sqlalchemy as sa
from alembic import op

# revision identifiers, used by Alembic.
revision = "add0afaaf728"
down_revision = "6e91067932f2"
branch_labels = None
depends_on = None


def upgrade():
projects_table = sa.table(
"projects",
sa.column("uuid"),
sa.column("workbench"),
sa.column("creation_date"),
sa.column("last_change_date"),
)
projects_nodes_table = sa.table(
"projects_nodes",
sa.column("project_uuid"),
sa.column("node_id"),
sa.column("created"),
sa.column("modified"),
)

connection = op.get_bind()

for project_uuid, workbench, creation_date, last_change_date in connection.execute(
projects_table.select()
):
for node_id in workbench.keys():
connection.execute(
projects_nodes_table.insert().values(
project_uuid=project_uuid,
node_id=node_id,
created=creation_date,
modified=last_change_date,
)
)


def downgrade():
projects_nodes_table = sa.table(
"projects_nodes",
sa.column("project_uuid"),
sa.column("node_id"),
)
connection = op.get_bind()

connection.execute(projects_nodes_table.delete())
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
from typing import TypeVar

from aiopg.sa.result import RowProxy

ModelType = TypeVar("ModelType")


class FromRowMixin:
"""Mixin to allow instance construction from aiopg.sa.result.RowProxy"""

@classmethod
def from_row(cls: type[ModelType], row: RowProxy) -> ModelType:
return cls(**dict(row.items()))
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
import datetime
import uuid
from dataclasses import asdict, dataclass, field
from dataclasses import asdict, dataclass, field, fields
from typing import Any

import sqlalchemy
from aiopg.sa.connection import SAConnection

from .errors import ForeignKeyViolation, UniqueViolation
from .models.projects_nodes import projects_nodes
from .utils_models import FromRowMixin


#
Expand Down Expand Up @@ -34,12 +36,16 @@ class ProjectNodesDuplicateNode(BaseProjectNodesError):

@dataclass(frozen=True, slots=True, kw_only=True)
class ProjectNodeCreate:
required_resources: dict = field(default_factory=dict)
node_id: uuid.UUID
required_resources: dict[str, Any] = field(default_factory=dict)

@staticmethod
def get_field_names(*, exclude: set[str]) -> set[str]:
return {f.name for f in fields(ProjectNodeCreate) if f.name not in exclude}


@dataclass(frozen=True, slots=True, kw_only=True)
class ProjectNode(ProjectNodeCreate):
node_id: uuid.UUID
class ProjectNode(ProjectNodeCreate, FromRowMixin):
created: datetime.datetime
modified: datetime.datetime

Expand All @@ -52,9 +58,8 @@ async def add(
self,
connection: SAConnection,
*,
node_id: uuid.UUID,
node: ProjectNodeCreate,
) -> ProjectNode:
nodes: list[ProjectNodeCreate],
) -> list[ProjectNode]:
"""creates a new entry in *projects_nodes* and *projects_to_projects_nodes* tables

NOTE: Do not use this in an asyncio.gather call as this will fail!
Expand All @@ -65,51 +70,62 @@ async def add(
ProjectsNodesNodeNotFound: in case the node does not exist

"""
async with connection.begin():
try:
result = await connection.execute(
projects_nodes.insert()
.values(
project_uuid=f"{self.project_uuid}",
node_id=f"{node_id}",
if not nodes:
return []
insert_stmt = (
projects_nodes.insert()
.values(
[
{
"project_uuid": f"{self.project_uuid}",
**asdict(node),
)
.returning(
*[
c
for c in projects_nodes.c
if c is not projects_nodes.c.project_uuid
]
)
)
created_node_db = await result.first()
assert created_node_db # nosec
created_node = ProjectNode(**dict(created_node_db.items()))

return created_node
except ForeignKeyViolation as exc:
# this happens when the project does not exist, as we first check the node exists
raise ProjectNodesProjectNotFound(
f"Project {self.project_uuid} not found"
) from exc
except UniqueViolation as exc:
# this happens if the node already exists on creation
raise ProjectNodesDuplicateNode(
f"Project node {node_id} already exists"
) from exc
}
for node in nodes
]
)
.returning(
*[
c
for c in projects_nodes.columns
if c is not projects_nodes.c.project_uuid
]
)
)

try:
result = await connection.execute(insert_stmt)
assert result # nosec
rows = await result.fetchall()
assert rows is not None # nosec
return [ProjectNode.from_row(r) for r in rows]
except ForeignKeyViolation as exc:
# this happens when the project does not exist, as we first check the node exists
raise ProjectNodesProjectNotFound(
f"Project {self.project_uuid} not found"
) from exc
except UniqueViolation as exc:
# this happens if the node already exists on creation
raise ProjectNodesDuplicateNode(
f"Project node already exists: {exc}"
) from exc

async def list(self, connection: SAConnection) -> list[ProjectNode]:
"""list the nodes in the current project

NOTE: Do not use this in an asyncio.gather call as this will fail!
"""
list_stmt = sqlalchemy.select(
*[c for c in projects_nodes.c if c is not projects_nodes.c.project_uuid]
*[
c
for c in projects_nodes.columns
if c is not projects_nodes.c.project_uuid
]
).where(projects_nodes.c.project_uuid == f"{self.project_uuid}")
nodes = [
ProjectNode(**dict(row.items()))
async for row in connection.execute(list_stmt)
]
result = await connection.execute(list_stmt)
assert result # nosec
rows = await result.fetchall()
assert rows is not None # nosec
nodes = [ProjectNode.from_row(row) for row in rows]
return nodes

async def get(self, connection: SAConnection, *, node_id: uuid.UUID) -> ProjectNode:
Expand All @@ -134,7 +150,7 @@ async def get(self, connection: SAConnection, *, node_id: uuid.UUID) -> ProjectN
if row is None:
raise ProjectNodesNodeNotFound(f"Node with {node_id} not found")
assert row # nosec
return ProjectNode(**dict(row.items()))
return ProjectNode.from_row(row)

async def update(
self, connection: SAConnection, *, node_id: uuid.UUID, **values
Expand All @@ -158,11 +174,11 @@ async def update(
)
)
result = await connection.execute(update_stmt)
updated_entry = await result.first()
if not updated_entry:
row = await result.first()
if not row:
raise ProjectNodesNodeNotFound(f"Node with {node_id} not found")
assert updated_entry # nosec
return ProjectNode(**dict(updated_entry.items()))
assert row # nosec
return ProjectNode.from_row(row)

async def delete(self, connection: SAConnection, *, node_id: uuid.UUID) -> None:
"""delete a node in the current project
Expand Down
Loading