Skip to content

Commit 723d234

Browse files
authored
Include update info in logging output (#664)
* Make LoggerAdapter behavior identical to merge_extra=True * Add update info to logging `extra`
1 parent 0b327b0 commit 723d234

File tree

2 files changed

+52
-11
lines changed

2 files changed

+52
-11
lines changed

temporalio/workflow.py

+28-8
Original file line numberDiff line numberDiff line change
@@ -500,6 +500,14 @@ class UpdateInfo:
500500
name: str
501501
"""Update type name."""
502502

503+
@property
504+
def _logger_details(self) -> Mapping[str, Any]:
505+
"""Data to be included in string appended to default logging output."""
506+
return {
507+
"update_id": self.id,
508+
"update_name": self.name,
509+
}
510+
503511

504512
class _Runtime(ABC):
505513
@staticmethod
@@ -1211,6 +1219,10 @@ class LoggerAdapter(logging.LoggerAdapter):
12111219
use by others. Default is False.
12121220
log_during_replay: Boolean for whether logs should occur during replay.
12131221
Default is False.
1222+
1223+
Values added to ``extra`` are merged with the ``extra`` dictionary from a
1224+
logging call, with values from the logging call taking precedence. I.e. the
1225+
behavior is that of `merge_extra=True` in Python >= 3.13.
12141226
"""
12151227

12161228
def __init__(
@@ -1232,20 +1244,28 @@ def process(
12321244
or self.workflow_info_on_extra
12331245
or self.full_workflow_info_on_extra
12341246
):
1247+
extra: Dict[str, Any] = {}
1248+
msg_extra: Dict[str, Any] = {}
12351249
runtime = _Runtime.maybe_current()
12361250
if runtime:
1251+
workflow_details = runtime.logger_details
12371252
if self.workflow_info_on_message:
1238-
msg = f"{msg} ({runtime.logger_details})"
1253+
msg_extra.update(workflow_details)
12391254
if self.workflow_info_on_extra:
1240-
# Extra can be absent or None, this handles both
1241-
extra = kwargs.get("extra", None) or {}
1242-
extra["temporal_workflow"] = runtime.logger_details
1243-
kwargs["extra"] = extra
1255+
extra["temporal_workflow"] = workflow_details
12441256
if self.full_workflow_info_on_extra:
1245-
# Extra can be absent or None, this handles both
1246-
extra = kwargs.get("extra", None) or {}
12471257
extra["workflow_info"] = runtime.workflow_info()
1248-
kwargs["extra"] = extra
1258+
update_info = current_update_info()
1259+
if update_info:
1260+
update_details = update_info._logger_details
1261+
if self.workflow_info_on_message:
1262+
msg_extra.update(update_details)
1263+
if self.workflow_info_on_extra:
1264+
extra.setdefault("temporal_workflow", {}).update(update_details)
1265+
1266+
kwargs["extra"] = {**extra, **(kwargs.get("extra") or {})}
1267+
if msg_extra:
1268+
msg = f"{msg} ({msg_extra})"
12491269
return (msg, kwargs)
12501270

12511271
def isEnabledFor(self, level: int) -> bool:

tests/worker/test_workflow.py

+24-3
Original file line numberDiff line numberDiff line change
@@ -1908,6 +1908,10 @@ def my_signal(self, value: str) -> None:
19081908
self._last_signal = value
19091909
workflow.logger.info(f"Signal: {value}")
19101910

1911+
@workflow.update
1912+
def my_update(self, value: str) -> None:
1913+
workflow.logger.info(f"Update: {value}")
1914+
19111915
@workflow.query
19121916
def last_signal(self) -> str:
19131917
return self._last_signal
@@ -1955,14 +1959,22 @@ async def test_workflow_logging(client: Client, env: WorkflowEnvironment):
19551959
id=f"workflow-{uuid.uuid4()}",
19561960
task_queue=worker.task_queue,
19571961
)
1958-
# Send a couple signals
1962+
# Send some signals and updates
19591963
await handle.signal(LoggingWorkflow.my_signal, "signal 1")
19601964
await handle.signal(LoggingWorkflow.my_signal, "signal 2")
1965+
await handle.execute_update(
1966+
LoggingWorkflow.my_update, "update 1", id="update-1"
1967+
)
1968+
await handle.execute_update(
1969+
LoggingWorkflow.my_update, "update 2", id="update-2"
1970+
)
19611971
assert "signal 2" == await handle.query(LoggingWorkflow.last_signal)
19621972

1963-
# Confirm two logs happened
1973+
# Confirm logs were produced
19641974
assert capturer.find_log("Signal: signal 1 ({'attempt':")
19651975
assert capturer.find_log("Signal: signal 2")
1976+
assert capturer.find_log("Update: update 1")
1977+
assert capturer.find_log("Update: update 2")
19661978
assert not capturer.find_log("Signal: signal 3")
19671979
# Also make sure it has some workflow info and correct funcName
19681980
record = capturer.find_log("Signal: signal 1")
@@ -1974,6 +1986,15 @@ async def test_workflow_logging(client: Client, env: WorkflowEnvironment):
19741986
)
19751987
# Since we enabled full info, make sure it's there
19761988
assert isinstance(record.__dict__["workflow_info"], workflow.Info)
1989+
# Check the log emitted by the update execution.
1990+
record = capturer.find_log("Update: update 1")
1991+
assert (
1992+
record
1993+
and record.__dict__["temporal_workflow"]["update_id"] == "update-1"
1994+
and record.__dict__["temporal_workflow"]["update_name"] == "my_update"
1995+
and "'update_id': 'update-1'" in record.message
1996+
and "'update_name': 'my_update'" in record.message
1997+
)
19771998

19781999
# Clear queue and start a new one with more signals
19792000
capturer.log_queue.queue.clear()
@@ -1983,7 +2004,7 @@ async def test_workflow_logging(client: Client, env: WorkflowEnvironment):
19832004
task_queue=worker.task_queue,
19842005
max_cached_workflows=0,
19852006
) as worker:
1986-
# Send a couple signals
2007+
# Send signals and updates
19872008
await handle.signal(LoggingWorkflow.my_signal, "signal 3")
19882009
await handle.signal(LoggingWorkflow.my_signal, "finish")
19892010
await handle.result()

0 commit comments

Comments
 (0)