|
66 | 66 |
|
67 | 67 | class BasePipeline(ABC):
|
68 | 68 | """
|
| 69 | + Generic BasePipeline abstract class meant to wrap inference objects to include |
| 70 | + model-specific Pipelines objects. Any pipeline inherited from Pipeline objects |
| 71 | + should handle all model-specific input/output pre/post processing while BasePipeline |
| 72 | + is meant to serve as a generic wrapper. Inputs and outputs of BasePipelines should |
| 73 | + be serialized as pydantic Models. |
| 74 | +
|
| 75 | + BasePipelines should not be instantiated by their constructors, but rather the |
| 76 | + `BasePipeline.create()` method. The task name given to `create` will be used to |
| 77 | + load the appropriate pipeline. The pipeline should inherit from `BasePipeline` and |
| 78 | + implement the `__call__`, `input_schema`, and `output_schema` abstract methods. |
| 79 | +
|
| 80 | + Finally, the class definition should be decorated by the `BasePipeline.register` |
| 81 | + function. This defines the task name and task aliases for the pipeline and |
| 82 | + ensures that it will be accessible by `BasePipeline.create`. The implemented |
| 83 | + `BasePipeline` subclass must be imported at runtime to be accessible. |
| 84 | +
|
| 85 | + Example: |
| 86 | + @BasePipeline.register(task="base_example") |
| 87 | + class BasePipelineExample(BasePipeline): |
| 88 | + def __init__(self, base_specific, **kwargs): |
| 89 | + self._base_specific = base_specific |
| 90 | + self.model_pipeline = Pipeline.create(task="..") |
| 91 | + super().__init__(**kwargs) |
| 92 | + # implementation of abstract methods |
| 93 | +
|
69 | 94 | :param alias: optional name to give this pipeline instance, useful when
|
70 | 95 | inferencing with multiple models. Default is None
|
71 | 96 | :param logger: An optional item that can be either a DeepSparse Logger object,
|
@@ -97,13 +122,19 @@ def __init__(
|
97 | 122 |
|
98 | 123 | @abstractmethod
|
99 | 124 | def __call__(self, *args, **kwargs) -> BaseModel:
|
| 125 | + """ |
| 126 | + Runner function needed to stitch together any parsing, preprocessing, engine, |
| 127 | + and post-processing steps. |
| 128 | +
|
| 129 | + :returns: pydantic model class that outputs of this pipeline must comply to |
| 130 | + """ |
100 | 131 | raise NotImplementedError()
|
101 | 132 |
|
102 | 133 | @staticmethod
|
103 | 134 | def _get_task_constructor(task: str) -> Type["BasePipeline"]:
|
104 | 135 | """
|
105 | 136 | This function retrieves the class previously registered via
|
106 |
| - `BasePipeline.register` for `task`. |
| 137 | + `BasePipeline.register` or `Pipeline.register` for `task`. |
107 | 138 |
|
108 | 139 | If `task` starts with "import:", it is treated as a module to be imported,
|
109 | 140 | and retrieves the task via the `TASK` attribute of the imported module.
|
@@ -199,7 +230,7 @@ def register(
|
199 | 230 | """
|
200 | 231 | Pipeline implementer class decorator that registers the pipeline
|
201 | 232 | task name and its aliases as valid tasks that can be used to load
|
202 |
| - the pipeline through `BasePipeline.create()`. |
| 233 | + the pipeline through `BasePipeline.create()` or `Pipeline.create()` |
203 | 234 |
|
204 | 235 | Multiple pipelines may not have the same task name. An error will
|
205 | 236 | be raised if two different pipelines attempt to register the same task name
|
@@ -387,7 +418,8 @@ class Pipeline(BasePipeline):
|
387 | 418 | """
|
388 | 419 | Generic Pipeline abstract class meant to wrap inference engine objects to include
|
389 | 420 | data pre/post-processing. Inputs and outputs of pipelines should be serialized
|
390 |
| - as pydantic Models. |
| 421 | + as pydantic Models. See the BasePipeline above for additional parameters provided |
| 422 | + during inference. |
391 | 423 |
|
392 | 424 | Pipelines should not be instantiated by their constructors, but rather the
|
393 | 425 | `Pipeline.create()` method. The task name given to `create` will be used to
|
|
0 commit comments