Skip to content

Commit c5ae4fb

Browse files
committed
moving sidecar to simcore_service_sidecar
added RabbitMQ updating from other project
1 parent 74648ed commit c5ae4fb

16 files changed

+274
-78
lines changed

services/sidecar/setup.py

+31-6
Original file line numberDiff line numberDiff line change
@@ -4,30 +4,55 @@
44

55
from setuptools import find_packages, setup
66

7-
here = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
7+
if sys.version_info.major != 3 and sys.version_info.minor != 6:
8+
raise RuntimeError(
9+
"Expected ~=3.6, got %s. Did you forget to activate virtualenv?"
10+
% str(sys.version_info)
11+
)
12+
13+
current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent
814

915

1016
def read_reqs(reqs_path: Path):
1117
return re.findall(r"(^[^#-][\w]+[-~>=<.\w]+)", reqs_path.read_text(), re.MULTILINE)
1218

19+
readme = (current_dir / "README.md").read_text()
20+
version = (current_dir / "VERSION").read_text().strip()
1321

14-
install_requirements = read_reqs(here / "requirements" / "_base.txt") + [
22+
install_requirements = read_reqs(current_dir / "requirements" / "_base.txt") + [
1523
"s3wrapper==0.1.0",
1624
"simcore-sdk==0.1.0",
1725
"simcore-service-storage-sdk==0.1.0",
1826
"simcore-service-library",
1927
]
2028

21-
test_requirements = read_reqs(here / "requirements" / "_test.txt")
29+
test_requirements = read_reqs(current_dir / "requirements" / "_test.txt")
2230

2331

2432
setup(
2533
name="simcore-service-sidecar",
26-
version="0.0.1",
34+
version=version,
35+
author="oSparclers",
36+
description="Platform's sidecar",
37+
classifiers={
38+
"Development Status :: 1 - Planning",
39+
"License :: OSI Approved :: MIT License",
40+
"Natural Language :: English",
41+
"Programming Language :: Python :: 3.6",
42+
},
43+
long_description=readme,
44+
license="MIT license",
45+
python_requires="~=3.6",
2746
packages=find_packages(where="src"),
28-
package_dir={"": "src"},
47+
package_dir={"": "src",},
48+
include_package_data=True,
2949
install_requires=install_requirements,
30-
python_requires=">=3.6",
3150
test_suite="tests",
3251
tests_require=test_requirements,
52+
extras_require={"test": test_requirements},
53+
entry_points={
54+
"console_scripts": [
55+
"simcore-service-sidecar = simcore_service_sidecar.__main__:main",
56+
],
57+
},
3358
)

services/sidecar/src/sidecar/__init__.py

-2
This file was deleted.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
""" Python package for the simcore_service_sidecar.
2+
"""
3+
from .__version__ import __version__
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
""" Main application entry point
2+
`python -m simcore_service_sidecar ...`
3+
Why does this file exist, and why __main__? For more info, read:
4+
- https://www.python.org/dev/peps/pep-0338/
5+
- https://docs.python.org/3/using/cmdline.html#cmdoption-m
6+
"""
7+
8+
import logging
9+
10+
import click
11+
12+
from .utils import wrap_async_call
13+
14+
log = logging.getLogger(__name__)
15+
16+
@click.command
17+
@click.option("--job_id", default="0", help="The job ID")
18+
@click.option("--user_id", default="0", help="The user ID")
19+
@click.option("--project_id", default="0", help="The project ID")
20+
@click.option("--node_id", default=None, help="The node ID or nothing")
21+
def main(job_id: str, user_id: str, project_id: str, node_id: str):
22+
from simcore_service_sidecar.core import SIDECAR
23+
24+
log.info(
25+
"STARTING task processing for user %s, project %s, node %s",
26+
user_id,
27+
project_id,
28+
node_id,
29+
)
30+
try:
31+
next_task_nodes = wrap_async_call(SIDECAR.inspect(job_id, user_id, project_id, node_id=node_id))
32+
except Exception: # pylint: disable=broad-except
33+
log.exception("Uncaught exception")
34+
log.info(
35+
"COMPLETED task processing for user %s, project %s, node %s",
36+
user_id,
37+
project_id,
38+
node_id,
39+
)
40+
41+
42+
if __name__ == "__main__":
43+
main()
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
""" Current version of the simcore_service_api_gateway application
2+
"""
3+
import pkg_resources
4+
5+
__version__ = pkg_resources.get_distribution("simcore_service_sidecar").version
6+
7+
major, minor, patch = __version__.split(".")
8+
9+
api_version = __version__
10+
api_version_prefix: str = f"v{major}"

services/sidecar/src/sidecar/core.py renamed to services/sidecar/src/simcore_service_sidecar/core.py

+65-69
Original file line numberDiff line numberDiff line change
@@ -6,36 +6,27 @@
66
from contextlib import contextmanager
77
from datetime import datetime
88
from pathlib import Path
9-
from typing import Dict, List, Union
9+
from typing import Dict, List, Union, Any
1010

11+
import aio_pika
1112
import aiofiles
13+
import attr
1214
import docker
13-
import pika
1415
from celery.utils.log import get_task_logger
1516
from sqlalchemy import and_, exc
1617

1718
from servicelib.utils import logged_gather
1819
from simcore_sdk import node_data, node_ports
19-
from simcore_sdk.models.pipeline_models import (
20-
RUNNING,
21-
SUCCESS,
22-
ComputationalPipeline,
23-
ComputationalTask,
24-
)
20+
from simcore_sdk.models.pipeline_models import (RUNNING, SUCCESS,
21+
ComputationalPipeline,
22+
ComputationalTask)
2523
from simcore_sdk.node_ports import log as node_port_log
2624
from simcore_sdk.node_ports.dbmanager import DBManager
2725

2826
from . import config
29-
from .utils import (
30-
DbSettings,
31-
DockerSettings,
32-
ExecutorSettings,
33-
RabbitSettings,
34-
S3Settings,
35-
find_entry_point,
36-
is_node_ready,
37-
safe_channel,
38-
)
27+
from .rabbitmq import RabbitMQ
28+
from .utils import (DbSettings, DockerSettings, ExecutorSettings, S3Settings,
29+
find_entry_point, is_node_ready, safe_channel)
3930

4031
log = get_task_logger(__name__)
4132
log.setLevel(config.SIDECAR_LOGLEVEL)
@@ -57,35 +48,17 @@ def session_scope(session_factory):
5748
finally:
5849
session.close()
5950

60-
51+
@attr.s(auto_attribs=True)
6152
class Sidecar: # pylint: disable=too-many-instance-attributes
62-
def __init__(self):
63-
# publish subscribe config
64-
self._pika = RabbitSettings()
65-
66-
# docker client config
67-
self._docker = DockerSettings()
68-
69-
# object storage config
70-
self._s3 = S3Settings()
71-
72-
# db config
73-
self._db = DbSettings() # keeps single db engine: sidecar.utils_{id}
74-
self._db_manager = (
75-
None # lazy init because still not configured. SEE _get_node_ports
76-
)
77-
78-
# current task
79-
self._task = None
80-
81-
# current user id
82-
self._user_id: str = None
83-
84-
# stack name
85-
self._stack_name: str = None
86-
87-
# executor options
88-
self._executor = ExecutorSettings()
53+
_rabbit_mq: RabbitMQ
54+
_docker: DockerSettings = DockerSettings()
55+
_s3: S3Settings = S3Settings()
56+
_db: DbSettings = DbSettings() # keeps single db engine: sidecar.utils_{id}
57+
_db_manager: Any = None # lazy init because still not configured. SEE _get_node_ports
58+
_task: ComputationalTask = None # current task
59+
_user_id: str = None # current user id
60+
_stack_name: str = None # stack name
61+
_executor: ExecutorSettings = ExecutorSettings() # executor options
8962

9063
async def _get_node_ports(self):
9164
if self._db_manager is None:
@@ -211,27 +184,51 @@ async def _post_progress(self, channel, progress):
211184
async def log_file_processor(self, log_file: Path) -> None:
212185
"""checks both container logs and the log_file if any
213186
"""
214-
try:
215-
TIME_BETWEEN_LOGS_S: int = 2
216-
time_logs_sent = time.monotonic()
217-
accumulated_logs = []
218-
async with aiofiles.open(log_file, mode="r") as fp:
219-
async for line in fp:
220-
now = time.monotonic()
221-
accumulated_logs.append(line)
222-
if (now - time_logs_sent) < TIME_BETWEEN_LOGS_S:
223-
continue
224-
# send logs to rabbitMQ
225-
# TODO: NEEDS to shield??
226-
with safe_channel(self._pika) as (channel, _):
227-
await self._post_log(channel, msg=accumulated_logs)
228-
time_logs_sent = now
229-
accumulated_logs = []
230-
except asyncio.CancelledError:
231-
# the task is complete let's send the last logs
232-
if accumulated_logs:
233-
with safe_channel(self._pika) as (channel, _):
234-
await self._post_log(channel, msg=accumulated_logs)
187+
# async def parse_line(line: str) -> None:
188+
# # TODO: This should be 'settings', a regex for every service
189+
# if line.lower().startswith("[progress]"):
190+
# progress = line.lower().lstrip(
191+
# "[progress]").rstrip("%").strip()
192+
# await self._post_progress(channel, progress)
193+
# log.debug('PROGRESS %s', progress)
194+
# elif "percent done" in line.lower():
195+
# progress = line.lower().rstrip("percent done")
196+
# try:
197+
# float_progress = float(progress) / 100.0
198+
# progress = str(float_progress)
199+
# await self._post_progress(channel, progress)
200+
# log.debug('PROGRESS %s', progress)
201+
# except ValueError:
202+
# log.exception("Could not extract progress from solver")
203+
# else:
204+
# # just send as log
205+
# await self._post_log(channel, msg=line)
206+
207+
208+
209+
210+
# try:
211+
# import pdb; pdb.set_trace()
212+
# TIME_BETWEEN_LOGS_S: int = 2
213+
# time_logs_sent = time.monotonic()
214+
# accumulated_logs = []
215+
# async with aiofiles.open(log_file, mode="r") as fp:
216+
# async for line in fp:
217+
# now = time.monotonic()
218+
# accumulated_logs.append(line)
219+
# if (now - time_logs_sent) < TIME_BETWEEN_LOGS_S:
220+
# continue
221+
# # send logs to rabbitMQ
222+
# # TODO: NEEDS to shield??
223+
# with safe_channel(self._pika) as (channel, _):
224+
# await self._post_log(channel, msg=accumulated_logs)
225+
# time_logs_sent = now
226+
# accumulated_logs = []
227+
# except asyncio.CancelledError:
228+
# # the task is complete let's send the last logs
229+
# if accumulated_logs:
230+
# with safe_channel(self._pika) as (channel, _):
231+
# await self._post_log(channel, msg=accumulated_logs)
235232

236233
# async def _bg_job(self, log_file):
237234
# log.debug('Bck job started %s:node %s:internal id %s from container',
@@ -568,7 +565,7 @@ async def postprocess(self):
568565
finally:
569566
_session.close()
570567

571-
async def inspect(self, celery_task, user_id: str, project_id: str, node_id: str):
568+
async def inspect(self, job_request_id: int, user_id: str, project_id: str, node_id: str):
572569
log.debug(
573570
"ENTERING inspect with user %s pipeline:node %s: %s",
574571
user_id,
@@ -657,4 +654,3 @@ async def inspect(self, celery_task, user_id: str, project_id: str, node_id: str
657654
SIDECAR = Sidecar()
658655

659656
__all__ = ["SIDECAR"]
660-
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
import logging
2+
3+
import aio_pika
4+
import attr
5+
import tenacity
6+
7+
from servicelib.rabbitmq_utils import RabbitMQRetryPolicyUponInitialization
8+
from simcore_sdk.config.rabbit import Config, eval_broker
9+
10+
log = logging.getLogger(__file__)
11+
12+
@attr.s(auto_attribs=True)
13+
class RabbitMQ:
14+
_config: Config = Config()
15+
_connection: aio_pika.RobustConnection = None
16+
_channel: aio_pika.Channel = None
17+
logs_exchange: aio_pika.Exchange = None
18+
progress_exchange: aio_pika.Exchange = None
19+
20+
async def connect(self):
21+
url = eval_broker(self._config)
22+
await wait_till_rabbit_responsive(url)
23+
24+
25+
self._connection = await aio_pika.connect_robust(
26+
url,
27+
client_properties={"connection_name": "sidecar connection"},
28+
)
29+
30+
self._channel = await self._connection.channel()
31+
self.logs_exchange = await self._channel.declare_exchange(
32+
self._config.log_channel, aio_pika.ExchangeType.FANOUT, auto_delete=True
33+
)
34+
self.progress_exchange = await self._channel.declare_exchange(
35+
self._config.progress_channel, aio_pika.ExchangeType.FANOUT, auto_delete=True
36+
)
37+
38+
async def post_message(self):
39+
40+
41+
42+
@tenacity.retry(**RabbitMQRetryPolicyUponInitialization().kwargs)
43+
async def wait_till_rabbit_responsive(url: str):
44+
connection = await aio_pika.connect(url)
45+
await connection.close()
46+
return True

services/sidecar/src/sidecar/tasks.py renamed to services/sidecar/src/simcore_service_sidecar/tasks.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ async def async_pipeline(self, user_id: str, project_id: str, node_id: str) -> L
1313
from .core import SIDECAR
1414

1515
log.info("STARTING task processing for user %s, project %s, node %s", user_id, project_id, node_id)
16-
next_task_nodes = await SIDECAR.inspect(self, user_id, project_id, node_id)
16+
next_task_nodes = await SIDECAR.inspect(self.request.id, user_id, project_id, node_id)
1717
log.info("COMPLETED task processing for user %s, project %s, node %s", user_id, project_id, node_id)
1818
return next_task_nodes
1919

0 commit comments

Comments
 (0)