Explorar o código

fix: openllm completion start with prompt, remove it (#1303)

takatost hai 1 ano
pai
achega
875dfbbf0e
Modificáronse 1 ficheiros con 1 adicións e 0 borrados
  1. 1 0
      api/core/third_party/langchain/llms/openllm.py

+ 1 - 0
api/core/third_party/langchain/llms/openllm.py

@@ -66,6 +66,7 @@ class OpenLLM(LLM):
 
         json_response = response.json()
         completion = json_response["responses"][0]
+        completion = completion.lstrip(prompt)
 
         if stop is not None:
             completion = enforce_stop_tokens(completion, stop)