Skip to content

Commit 83afedf

Browse files
committed
DFP-3901: Fixed lint issue
1 parent a353180 commit 83afedf

File tree

2 files changed

+16
-18
lines changed

2 files changed

+16
-18
lines changed

Diff for: spark_pipeline_framework/transformers/framework_exception_handler/v1/framework_exception_handler.py

+11-13
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
1-
from typing import Dict, Any, Optional, Union, List, Callable
1+
from typing import Dict, Any, Optional, Type, Union, List, Callable
22

3-
from spark_pipeline_framework.logger.log_level import LogLevel
43
from spark_pipeline_framework.utilities.capture_parameters import capture_parameters
54
from pyspark.ml import Transformer
65
from pyspark.sql.dataframe import DataFrame
@@ -18,7 +17,7 @@ def __init__(
1817
self,
1918
*,
2019
raise_on_exception: Optional[Union[bool, Callable[[DataFrame], bool]]] = True,
21-
error_exception: Optional[Exception] = Exception,
20+
error_exception: Type[BaseException] = BaseException,
2221
stages: Union[List[Transformer], Callable[[], List[Transformer]]],
2322
exception_stages: Optional[
2423
Union[List[Transformer], Callable[[], List[Transformer]]]
@@ -50,18 +49,18 @@ def __init__(
5049
raise_on_exception
5150
)
5251

53-
self.error_exception: Optional[Exception] = error_exception
52+
self.error_exception: Type[BaseException] = error_exception
5453
self.stages: Union[List[Transformer], Callable[[], List[Transformer]]] = stages
55-
self.exception_stages: Optional[
56-
Union[List[Transformer], Callable[[], List[Transformer]]]
54+
self.exception_stages: Union[
55+
List[Transformer], Callable[[], List[Transformer]]
5756
] = (exception_stages or [])
5857

5958
self.loop_id: Optional[str] = None
6059

6160
kwargs = self._input_kwargs
6261
self.setParams(**kwargs)
6362

64-
async def _transform_async(self, df):
63+
async def _transform_async(self, df: DataFrame) -> DataFrame:
6564
"""
6665
Executes the transformation pipeline asynchronously.
6766
@@ -81,8 +80,8 @@ async def run_pipeline(
8180
df: DataFrame,
8281
stages: Union[List[Transformer], Callable[[], List[Transformer]]],
8382
progress_logger: Optional[ProgressLogger],
84-
):
85-
stages: List[Transformer] = stages if not callable(stages) else stages()
83+
) -> None:
84+
stages = stages if not callable(stages) else stages()
8685
nonlocal stage_name
8786

8887
for stage in stages:
@@ -112,7 +111,6 @@ async def run_pipeline(
112111
progress_logger.write_to_log(
113112
self.getName() or "FrameworkExceptionHandlerTransformer",
114113
f"Failed while running steps with error: {e}. Run execution steps: {isinstance(e, self.error_exception)}",
115-
log_level=LogLevel.INFO,
116114
)
117115

118116
try:
@@ -140,8 +138,8 @@ def as_dict(self) -> Dict[str, Any]:
140138
else str(self.stages)
141139
),
142140
"exception_stages": (
143-
[s.as_dict() for s in self.else_stages] # type: ignore
144-
if self.else_stages and not callable(self.else_stages)
145-
else str(self.else_stages)
141+
[s.as_dict() for s in self.exception_stages] # type: ignore
142+
if self.exception_stages and not callable(self.exception_stages)
143+
else str(self.exception_stages)
146144
),
147145
}

Diff for: spark_pipeline_framework/transformers/framework_exception_handler/v1/test/test_framework_exception_handler.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -16,13 +16,13 @@
1616

1717

1818
def test_framework_exception_handle(spark_session: SparkSession) -> None:
19-
19+
2020
# create a dataframe with the test data
2121
data_dir: Path = Path(__file__).parent.joinpath("./")
2222
df: DataFrame = create_empty_dataframe(spark_session=spark_session)
2323
invalid_view: str = "invalid_view"
2424
valid_view = "view"
25-
25+
2626
with ProgressLogger() as progress_logger:
2727
FrameworkExceptionHandlerTransformer(
2828
name="Exception Handler Test",
@@ -42,12 +42,12 @@ def test_framework_exception_handle(spark_session: SparkSession) -> None:
4242
)
4343
],
4444
raise_on_exception=False,
45-
progress_logger=progress_logger
45+
progress_logger=progress_logger,
4646
).transform(df)
4747
result_df: DataFrame = spark_session.table(valid_view)
48-
48+
4949
# Assert that the exception-handling stage has successfully run
5050
assert result_df.count() == 3
51-
51+
5252
# Verify that the invalid view was NOT created, confirming that the original stage failed
5353
assert not spark_session.catalog.tableExists(invalid_view)

0 commit comments

Comments
 (0)