Explorar o código

fix: bedrock get_num_tokens prompt_messages parameter name err (#4932)

takatost hai 10 meses
pai
achega
f44d1e62d2
Modificáronse 1 ficheiros con 5 adicións e 6 borrados
  1. 5 6
      api/core/model_runtime/model_providers/bedrock/llm/llm.py

+ 5 - 6
api/core/model_runtime/model_providers/bedrock/llm/llm.py

@@ -358,26 +358,25 @@ class BedrockLargeLanguageModel(LargeLanguageModel):
 
         return message_dict
 
-    def get_num_tokens(self, model: str, credentials: dict, messages: list[PromptMessage] | str,
+    def get_num_tokens(self, model: str, credentials: dict, prompt_messages: list[PromptMessage] | str,
                        tools: Optional[list[PromptMessageTool]] = None) -> int:
         """
         Get number of tokens for given prompt messages
 
         :param model: model name
         :param credentials: model credentials
-        :param messages: prompt messages or message string
+        :param prompt_messages: prompt messages or message string
         :param tools: tools for tool calling
         :return:md = genai.GenerativeModel(model)
         """
         prefix = model.split('.')[0]
         model_name = model.split('.')[1]
-        if isinstance(messages, str):
-            prompt = messages
+        if isinstance(prompt_messages, str):
+            prompt = prompt_messages
         else:
-            prompt = self._convert_messages_to_prompt(messages, prefix, model_name)
+            prompt = self._convert_messages_to_prompt(prompt_messages, prefix, model_name)
 
         return self._get_num_tokens_by_gpt2(prompt)
-    
 
     def validate_credentials(self, model: str, credentials: dict) -> None:
         """