advanced_prompt_template_service.py 3.1 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556
  1. import copy
  2. from core.prompt.advanced_prompt_templates import CHAT_APP_COMPLETION_PROMPT_CONFIG, CHAT_APP_CHAT_PROMPT_CONFIG, COMPLETION_APP_CHAT_PROMPT_CONFIG, COMPLETION_APP_COMPLETION_PROMPT_CONFIG, \
  3. BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG, BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG, BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG, BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG, CONTEXT, BAICHUAN_CONTEXT
  4. class AdvancedPromptTemplateService:
  5. def get_prompt(self, args: dict) -> dict:
  6. app_mode = args['app_mode']
  7. model_mode = args['model_mode']
  8. model_name = args['model_name']
  9. has_context = args['has_context']
  10. if 'baichuan' in model_name:
  11. return self.get_baichuan_prompt(app_mode, model_mode, has_context)
  12. else:
  13. return self.get_common_prompt(app_mode, model_mode, has_context)
  14. def get_common_prompt(self, app_mode: str, model_mode:str, has_context: bool) -> dict:
  15. if app_mode == 'chat':
  16. if model_mode == 'completion':
  17. return self.get_completion_prompt(copy.deepcopy(CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, CONTEXT)
  18. elif model_mode == 'chat':
  19. return self.get_chat_prompt(copy.deepcopy(CHAT_APP_CHAT_PROMPT_CONFIG), has_context, CONTEXT)
  20. elif app_mode == 'completion':
  21. if model_mode == 'completion':
  22. return self.get_completion_prompt(copy.deepcopy(COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, CONTEXT)
  23. elif model_mode == 'chat':
  24. return self.get_chat_prompt(copy.deepcopy(COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, CONTEXT)
  25. def get_completion_prompt(self, prompt_template: str, has_context: bool, context: str) -> dict:
  26. if has_context == 'true':
  27. prompt_template['completion_prompt_config']['prompt']['text'] = context + prompt_template['completion_prompt_config']['prompt']['text']
  28. return prompt_template
  29. def get_chat_prompt(self, prompt_template: str, has_context: bool, context: str) -> dict:
  30. if has_context == 'true':
  31. prompt_template['chat_prompt_config']['prompt'][0]['text'] = context + prompt_template['chat_prompt_config']['prompt'][0]['text']
  32. return prompt_template
  33. def get_baichuan_prompt(self, app_mode: str, model_mode:str, has_context: bool) -> dict:
  34. if app_mode == 'chat':
  35. if model_mode == 'completion':
  36. return self.get_completion_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
  37. elif model_mode == 'chat':
  38. return self.get_chat_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
  39. elif app_mode == 'completion':
  40. if model_mode == 'completion':
  41. return self.get_completion_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
  42. elif model_mode == 'chat':
  43. return self.get_chat_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)