Jelajahi Sumber

fix: prompt for baichuan text generation models (#1299)

takatost 1 tahun lalu
induk
melakukan
8480b0197b
1 mengubah file dengan 6 tambahan dan 0 penghapusan
  1. 6 0
      api/core/model_providers/models/llm/baichuan_model.py

+ 6 - 0
api/core/model_providers/models/llm/baichuan_model.py

@@ -37,6 +37,12 @@ class BaichuanModel(BaseLLM):
         prompts = self._get_prompt_from_messages(messages)
         return self._client.generate([prompts], stop, callbacks)
 
+    def prompt_file_name(self, mode: str) -> str:
+        if mode == 'completion':
+            return 'baichuan_completion'
+        else:
+            return 'baichuan_chat'
+
     def get_num_tokens(self, messages: List[PromptMessage]) -> int:
         """
         get num tokens of prompt messages.