Skip to content

Commit 24947a1

Browse files
committed
add and update docstring
1 parent 92570b6 commit 24947a1

File tree

1 file changed

+35
-3
lines changed

1 file changed

+35
-3
lines changed

src/deepsparse/pipeline.py

+35-3
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,31 @@
6666

6767
class BasePipeline(ABC):
6868
"""
69+
Generic BasePipeline abstract class meant to wrap inference objects to include
70+
model-specific Pipelines objects. Any pipeline inherited from Pipeline objects
71+
should handle all model-specific input/output pre/post processing while BasePipeline
72+
is meant to serve as a generic wrapper. Inputs and outputs of BasePipelines should
73+
be serialized as pydantic Models.
74+
75+
BasePipelines should not be instantiated by their constructors, but rather the
76+
`BasePipeline.create()` method. The task name given to `create` will be used to
77+
load the appropriate pipeline. The pipeline should inherit from `BasePipeline` and
78+
implement the `__call__`, `input_schema`, and `output_schema` abstract methods.
79+
80+
Finally, the class definition should be decorated by the `BasePipeline.register`
81+
function. This defines the task name and task aliases for the pipeline and
82+
ensures that it will be accessible by `BasePipeline.create`. The implemented
83+
`BasePipeline` subclass must be imported at runtime to be accessible.
84+
85+
Example:
86+
@BasePipeline.register(task="base_example")
87+
class BasePipelineExample(BasePipeline):
88+
def __init__(self, base_specific, **kwargs):
89+
self._base_specific = base_specific
90+
self.model_pipeline = Pipeline.create(task="..")
91+
super().__init__(**kwargs)
92+
# implementation of abstract methods
93+
6994
:param alias: optional name to give this pipeline instance, useful when
7095
inferencing with multiple models. Default is None
7196
:param logger: An optional item that can be either a DeepSparse Logger object,
@@ -97,13 +122,19 @@ def __init__(
97122

98123
@abstractmethod
99124
def __call__(self, *args, **kwargs) -> BaseModel:
125+
"""
126+
Runner function needed to stitch together any parsing, preprocessing, engine,
127+
and post-processing steps.
128+
129+
:returns: pydantic model class that outputs of this pipeline must comply to
130+
"""
100131
raise NotImplementedError()
101132

102133
@staticmethod
103134
def _get_task_constructor(task: str) -> Type["BasePipeline"]:
104135
"""
105136
This function retrieves the class previously registered via
106-
`BasePipeline.register` for `task`.
137+
`BasePipeline.register` or `Pipeline.register` for `task`.
107138
108139
If `task` starts with "import:", it is treated as a module to be imported,
109140
and retrieves the task via the `TASK` attribute of the imported module.
@@ -199,7 +230,7 @@ def register(
199230
"""
200231
Pipeline implementer class decorator that registers the pipeline
201232
task name and its aliases as valid tasks that can be used to load
202-
the pipeline through `BasePipeline.create()`.
233+
the pipeline through `BasePipeline.create()` or `Pipeline.create()`
203234
204235
Multiple pipelines may not have the same task name. An error will
205236
be raised if two different pipelines attempt to register the same task name
@@ -387,7 +418,8 @@ class Pipeline(BasePipeline):
387418
"""
388419
Generic Pipeline abstract class meant to wrap inference engine objects to include
389420
data pre/post-processing. Inputs and outputs of pipelines should be serialized
390-
as pydantic Models.
421+
as pydantic Models. See the BasePipeline above for additional parameters provided
422+
during inference.
391423
392424
Pipelines should not be instantiated by their constructors, but rather the
393425
`Pipeline.create()` method. The task name given to `create` will be used to

0 commit comments

Comments
 (0)