Skip to content

Commit

Permalink
Feat/advanced prompt enhancement (#1340)
Browse files Browse the repository at this point in the history
  • Loading branch information
GarfieldDai authored Oct 13, 2023
1 parent 3efaa71 commit 695841a
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 22 deletions.
3 changes: 1 addition & 2 deletions api/controllers/console/app/advanced_prompt_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ def get(self):
parser.add_argument('model_name', type=str, required=True, location='args')
args = parser.parse_args()

service = AdvancedPromptTemplateService()
return service.get_prompt(args)
return AdvancedPromptTemplateService.get_prompt(args)

api.add_resource(AdvancedPromptTemplateList, '/app/prompt-templates')
12 changes: 8 additions & 4 deletions api/core/prompt/advanced_prompt_templates.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@
"user_prefix": "Human",
"assistant_prefix": "Assistant"
}
}
},
"stop": ["Human:"]
}

CHAT_APP_CHAT_PROMPT_CONFIG = {
Expand All @@ -37,7 +38,8 @@
"prompt": {
"text": "{{#pre_prompt#}}"
}
}
},
"stop": ["Human:"]
}

BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG = {
Expand All @@ -49,7 +51,8 @@
"user_prefix": "用户",
"assistant_prefix": "助手"
}
}
},
"stop": ["用户:"]
}

BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG = {
Expand All @@ -75,5 +78,6 @@
"prompt": {
"text": "{{#pre_prompt#}}"
}
}
},
"stop": ["用户:"]
}
39 changes: 23 additions & 16 deletions api/services/advanced_prompt_template_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,51 +6,58 @@

class AdvancedPromptTemplateService:

def get_prompt(self, args: dict) -> dict:
@classmethod
def get_prompt(cls, args: dict) -> dict:
app_mode = args['app_mode']
model_mode = args['model_mode']
model_name = args['model_name']
has_context = args['has_context']

if 'baichuan' in model_name:
return self.get_baichuan_prompt(app_mode, model_mode, has_context)
return cls.get_baichuan_prompt(app_mode, model_mode, has_context)
else:
return self.get_common_prompt(app_mode, model_mode, has_context)
return cls.get_common_prompt(app_mode, model_mode, has_context)

@classmethod
def get_common_prompt(cls, app_mode: str, model_mode:str, has_context: str) -> dict:
context_prompt = copy.deepcopy(CONTEXT)

def get_common_prompt(self, app_mode: str, model_mode:str, has_context: bool) -> dict:
if app_mode == 'chat':
if model_mode == 'completion':
return self.get_completion_prompt(copy.deepcopy(CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, CONTEXT)
return cls.get_completion_prompt(copy.deepcopy(CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt)
elif model_mode == 'chat':
return self.get_chat_prompt(copy.deepcopy(CHAT_APP_CHAT_PROMPT_CONFIG), has_context, CONTEXT)
return cls.get_chat_prompt(copy.deepcopy(CHAT_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt)
elif app_mode == 'completion':
if model_mode == 'completion':
return self.get_completion_prompt(copy.deepcopy(COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, CONTEXT)
return cls.get_completion_prompt(copy.deepcopy(COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt)
elif model_mode == 'chat':
return self.get_chat_prompt(copy.deepcopy(COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, CONTEXT)
return cls.get_chat_prompt(copy.deepcopy(COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt)

def get_completion_prompt(self, prompt_template: str, has_context: bool, context: str) -> dict:
@classmethod
def get_completion_prompt(cls, prompt_template: dict, has_context: str, context: str) -> dict:
if has_context == 'true':
prompt_template['completion_prompt_config']['prompt']['text'] = context + prompt_template['completion_prompt_config']['prompt']['text']

return prompt_template


def get_chat_prompt(self, prompt_template: str, has_context: bool, context: str) -> dict:
@classmethod
def get_chat_prompt(cls, prompt_template: dict, has_context: str, context: str) -> dict:
if has_context == 'true':
prompt_template['chat_prompt_config']['prompt'][0]['text'] = context + prompt_template['chat_prompt_config']['prompt'][0]['text']

return prompt_template

@classmethod
def get_baichuan_prompt(cls, app_mode: str, model_mode:str, has_context: str) -> dict:
baichuan_context_prompt = copy.deepcopy(BAICHUAN_CONTEXT)

def get_baichuan_prompt(self, app_mode: str, model_mode:str, has_context: bool) -> dict:
if app_mode == 'chat':
if model_mode == 'completion':
return self.get_completion_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
return cls.get_completion_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, baichuan_context_prompt)
elif model_mode == 'chat':
return self.get_chat_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
return cls.get_chat_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt)
elif app_mode == 'completion':
if model_mode == 'completion':
return self.get_completion_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
return cls.get_completion_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, baichuan_context_prompt)
elif model_mode == 'chat':
return self.get_chat_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
return cls.get_chat_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt)
3 changes: 3 additions & 0 deletions api/services/app_model_config_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,9 @@ def validate_model_completion_params(cp: dict, model_name: str) -> dict:
cp["stop"] = []
elif not isinstance(cp["stop"], list):
raise ValueError("stop in model.completion_params must be of list type")

if len(cp["stop"]) > 4:
raise ValueError("stop sequences must be less than 4")

# Filter out extra parameters
filtered_cp = {
Expand Down

0 comments on commit 695841a

Please sign in to comment.