Skip to content

Commit

Permalink
fix: zhipuai chatglm turbo prompts must user, assistant in sequence (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
takatost authored Jan 4, 2024
1 parent 91ff07f commit 0c5892b
Showing 1 changed file with 29 additions and 5 deletions.
34 changes: 29 additions & 5 deletions api/core/model_runtime/model_providers/zhipuai/llm/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@
Union
)

from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, UserPromptMessage, AssistantPromptMessage, \
SystemPromptMessage
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, UserPromptMessage, \
AssistantPromptMessage, \
SystemPromptMessage, PromptMessageRole
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, \
LLMResultChunkDelta
from core.model_runtime.errors.validate import CredentialsValidateFailedError
Expand Down Expand Up @@ -111,16 +112,39 @@ def _generate(self, model: str, credentials_kwargs: dict,
if len(prompt_messages) == 0:
raise ValueError('At least one message is required')

if prompt_messages[0].role.value == 'system':
if prompt_messages[0].role == PromptMessageRole.SYSTEM:
if not prompt_messages[0].content:
prompt_messages = prompt_messages[1:]

# resolve zhipuai model not support system message and user message, assistant message must be in sequence
new_prompt_messages = []
for prompt_message in prompt_messages:
copy_prompt_message = prompt_message.copy()
if copy_prompt_message.role in [PromptMessageRole.USER, PromptMessageRole.SYSTEM, PromptMessageRole.TOOL]:
if not isinstance(copy_prompt_message.content, str):
# not support image message
continue

if new_prompt_messages and new_prompt_messages[-1].role == PromptMessageRole.USER:
new_prompt_messages[-1].content += "\n\n" + copy_prompt_message.content
else:
if copy_prompt_message.role == PromptMessageRole.USER:
new_prompt_messages.append(copy_prompt_message)
else:
new_prompt_message = UserPromptMessage(content=copy_prompt_message.content)
new_prompt_messages.append(new_prompt_message)
else:
if new_prompt_messages and new_prompt_messages[-1].role == PromptMessageRole.ASSISTANT:
new_prompt_messages[-1].content += "\n\n" + copy_prompt_message.content
else:
new_prompt_messages.append(copy_prompt_message)

params = {
'model': model,
'prompt': [{
'role': prompt_message.role.value if prompt_message.role.value != 'system' else 'user',
'role': prompt_message.role.value,
'content': prompt_message.content
} for prompt_message in prompt_messages],
} for prompt_message in new_prompt_messages],
**model_parameters
}

Expand Down

0 comments on commit 0c5892b

Please sign in to comment.