From 3231a8c51c54d19b9c7cfb698fb8ebaead01903c Mon Sep 17 00:00:00 2001 From: Yeuoly <45712896+Yeuoly@users.noreply.github.com> Date: Fri, 8 Mar 2024 14:50:51 +0800 Subject: [PATCH] fix: image tokenizer (#2752) --- .../model_providers/anthropic/llm/llm.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/api/core/model_runtime/model_providers/anthropic/llm/llm.py b/api/core/model_runtime/model_providers/anthropic/llm/llm.py index 6f9f41ca44d258..ad74179353da1c 100644 --- a/api/core/model_runtime/model_providers/anthropic/llm/llm.py +++ b/api/core/model_runtime/model_providers/anthropic/llm/llm.py @@ -424,8 +424,25 @@ def _convert_one_message_to_text(self, message: PromptMessage) -> str: if isinstance(message, UserPromptMessage): message_text = f"{human_prompt} {content}" + if not isinstance(message.content, list): + message_text = f"{ai_prompt} {content}" + else: + message_text = "" + for sub_message in message.content: + if sub_message.type == PromptMessageContentType.TEXT: + message_text += f"{human_prompt} {sub_message.data}" + elif sub_message.type == PromptMessageContentType.IMAGE: + message_text += f"{human_prompt} [IMAGE]" elif isinstance(message, AssistantPromptMessage): - message_text = f"{ai_prompt} {content}" + if not isinstance(message.content, list): + message_text = f"{ai_prompt} {content}" + else: + message_text = "" + for sub_message in message.content: + if sub_message.type == PromptMessageContentType.TEXT: + message_text += f"{ai_prompt} {sub_message.data}" + elif sub_message.type == PromptMessageContentType.IMAGE: + message_text += f"{ai_prompt} [IMAGE]" elif isinstance(message, SystemPromptMessage): message_text = content else: