@@ -225,7 +225,7 @@ def _init_chat_completion_model(model_name: str, provider_name: str, kwargs: Dic
225225 raise
226226
227227
228- def _init_text_completion_model (model_name : str , provider_name : str , kwargs : Dict [str , Any ]) -> BaseLLM :
228+ def _init_text_completion_model (model_name : str , provider_name : str , kwargs : Dict [str , Any ]) -> BaseLLM | None :
229229 """Initialize a text completion model.
230230
231231 Args:
@@ -234,22 +234,24 @@ def _init_text_completion_model(model_name: str, provider_name: str, kwargs: Dic
234234 kwargs: Additional arguments to pass to the model initialization
235235
236236 Returns:
237- An initialized text completion model
238-
239- Raises:
240- RuntimeError: If the provider is not found
237+ An initialized text completion model, or None if the provider is not found
241238 """
242- provider_cls = _get_text_completion_provider (provider_name )
239+ try :
240+ provider_cls = _get_text_completion_provider (provider_name )
241+ except RuntimeError :
242+ return None
243+
243244 if provider_cls is None :
244- raise ValueError ()
245+ return None
246+
245247 kwargs = _update_model_kwargs (provider_cls , model_name , kwargs )
246248 # remove stream_usage parameter as it's not supported by text completion APIs
247249 # (e.g., OpenAI's AsyncCompletions.create() doesn't accept this parameter)
248250 kwargs .pop ("stream_usage" , None )
249251 return provider_cls (** kwargs )
250252
251253
252- def _init_community_chat_models (model_name : str , provider_name : str , kwargs : Dict [str , Any ]) -> BaseChatModel :
254+ def _init_community_chat_models (model_name : str , provider_name : str , kwargs : Dict [str , Any ]) -> BaseChatModel | None :
253255 """Initialize community chat models.
254256
255257 Args:
@@ -264,14 +266,19 @@ def _init_community_chat_models(model_name: str, provider_name: str, kwargs: Dic
264266 ImportError: If langchain_community is not installed
265267 ModelInitializationError: If model initialization fails
266268 """
267- provider_cls = _get_chat_completion_provider (provider_name )
269+ try :
270+ provider_cls = _get_chat_completion_provider (provider_name )
271+ except RuntimeError :
272+ return None
273+
268274 if provider_cls is None :
269- raise ValueError ()
275+ return None
276+
270277 kwargs = _update_model_kwargs (provider_cls , model_name , kwargs )
271278 return provider_cls (** kwargs )
272279
273280
274- def _init_gpt35_turbo_instruct (model_name : str , provider_name : str , kwargs : Dict [str , Any ]) -> BaseLLM :
281+ def _init_gpt35_turbo_instruct (model_name : str , provider_name : str , kwargs : Dict [str , Any ]) -> BaseLLM | None :
275282 """Initialize GPT-3.5 Turbo Instruct model.
276283
277284 Currently init_chat_model from langchain infers this as a chat model.
0 commit comments