Skip to content

Commit

Permalink
Merge pull request #12733 from RasaHQ/fix-langchain-user-warning
Browse files Browse the repository at this point in the history
"Fix" langchain user warning
  • Loading branch information
tmbo authored Aug 16, 2023
2 parents 7150b15 + 329dda0 commit 444fc60
Showing 1 changed file with 11 additions and 1 deletion.
12 changes: 11 additions & 1 deletion rasa/shared/utils/llm.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from typing import Any, Dict, Optional, Text, Type
import warnings

import structlog
from langchain.embeddings.base import Embeddings
Expand Down Expand Up @@ -147,7 +148,16 @@ def llm_factory(
# need to create a copy as the langchain function modifies the
# config in place...
structlogger.debug("llmfactory.create.llm", config=config)
return load_llm_from_config(config.copy())
# langchain issues a user warning when using chat models. at the same time
# it doesn't provide a way to instantiate a chat model directly using the
# config. so for now, we need to suppress the warning here. Original
# warning:
# packages/langchain/llms/openai.py:189: UserWarning: You are trying to
# use a chat model. This way of initializing it is no longer supported.
# Instead, please use: `from langchain.chat_models import ChatOpenAI
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UserWarning)
return load_llm_from_config(config.copy())


def embedder_factory(
Expand Down

0 comments on commit 444fc60

Please sign in to comment.