|
7 | 7 |
|
8 | 8 | import pydash
|
9 | 9 | import pytest
|
| 10 | + |
| 11 | +from azure.ai.ml.constants._job import PipelineConstants |
10 | 12 | from test_configs.dsl_pipeline import data_binding_expression
|
11 | 13 | from test_utilities.utils import omit_with_wildcard, prepare_dsl_curated
|
12 | 14 |
|
@@ -2596,3 +2598,25 @@ def my_pipeline() -> Output(type="uri_folder", description="new description", mo
|
2596 | 2598 | 'description': 'new description', 'job_output_type': 'uri_folder', 'mode': 'Upload'
|
2597 | 2599 | }}
|
2598 | 2600 | assert pipeline_job._to_rest_object().as_dict()["properties"]["outputs"] == expected_outputs
|
| 2601 | + |
| 2602 | + def test_dsl_pipeline_run_settings(self) -> None: |
| 2603 | + hello_world_component_yaml = "./tests/test_configs/components/helloworld_component.yml" |
| 2604 | + hello_world_component_func = load_component(source=hello_world_component_yaml) |
| 2605 | + |
| 2606 | + @dsl.pipeline() |
| 2607 | + def my_pipeline() -> Output(type="uri_folder", description="new description", mode="upload"): |
| 2608 | + node = hello_world_component_func(component_in_path=Input(path="path/on/ds"), component_in_number=10) |
| 2609 | + return {"output": node.outputs.component_out_path} |
| 2610 | + |
| 2611 | + pipeline_job: PipelineJob = my_pipeline() |
| 2612 | + pipeline_job.settings.default_compute = "cpu-cluster" |
| 2613 | + pipeline_job.settings.continue_on_step_failure = True |
| 2614 | + pipeline_job.settings.continue_run_on_failed_optional_input = False |
| 2615 | + |
| 2616 | + assert pipeline_job._to_rest_object().properties.settings == { |
| 2617 | + PipelineConstants.DEFAULT_COMPUTE: "cpu-cluster", |
| 2618 | + PipelineConstants.CONTINUE_ON_STEP_FAILURE: True, |
| 2619 | + PipelineConstants.CONTINUE_RUN_ON_FAILED_OPTIONAL_INPUT: False, |
| 2620 | + "_source": "DSL" |
| 2621 | + } |
| 2622 | + |
0 commit comments