@@ -69,7 +69,7 @@ def _call(
69
69
)
70
70
71
71
# Streaming for NeMo Guardrails is not supported in sync calls.
72
- if self .model_kwargs .get ("streaming" ):
72
+ if self .model_kwargs and self . model_kwargs .get ("streaming" ):
73
73
raise Exception (
74
74
"Streaming mode not supported for HuggingFacePipeline in NeMo Guardrails!"
75
75
)
@@ -100,7 +100,7 @@ async def _acall(
100
100
)
101
101
102
102
# Handle streaming, if the flag is set
103
- if self .model_kwargs .get ("streaming" ):
103
+ if self .model_kwargs and self . model_kwargs .get ("streaming" ):
104
104
# Retrieve the streamer object, needs to be set in model_kwargs
105
105
streamer = self .model_kwargs .get ("streamer" )
106
106
if not streamer :
@@ -153,7 +153,18 @@ async def _acall(self, *args, **kwargs):
153
153
154
154
def discover_langchain_providers ():
155
155
"""Automatically discover all LLM providers from LangChain."""
156
- _providers .update (llms .type_to_cls_dict )
156
+ # To deal with deprecated stuff and avoid warnings, we compose the type_to_cls_dict here
157
+ if hasattr (llms , "get_type_to_cls_dict" ):
158
+ type_to_cls_dict = {
159
+ k : v ()
160
+ for k , v in llms .get_type_to_cls_dict ().items ()
161
+ # Exclude deprecated ones
162
+ if k not in ["mlflow-chat" , "databricks-chat" ]
163
+ }
164
+ else :
165
+ type_to_cls_dict = llms .type_to_cls_dict
166
+
167
+ _providers .update (type_to_cls_dict )
157
168
158
169
# We make sure we have OpenAI from the right package.
159
170
if "openai" in _providers :
0 commit comments