Browse Source

fix: max tokens of OpenAI gpt-3.5-turbo-instruct to 4097 (#1338)

takatost 1 year ago
parent
commit
9822f687f7

+ 1 - 1
api/core/model_providers/models/llm/openai_model.py

@@ -33,7 +33,7 @@ MODEL_MAX_TOKENS = {
     'gpt-4': 8192,
     'gpt-4-32k': 32768,
     'gpt-3.5-turbo': 4096,
-    'gpt-3.5-turbo-instruct': 8192,
+    'gpt-3.5-turbo-instruct': 4097,
     'gpt-3.5-turbo-16k': 16384,
     'text-davinci-003': 4097,
 }

+ 1 - 1
api/core/model_providers/providers/openai_provider.py

@@ -144,7 +144,7 @@ class OpenAIProvider(BaseModelProvider):
             'gpt-4': 8192,
             'gpt-4-32k': 32768,
             'gpt-3.5-turbo': 4096,
-            'gpt-3.5-turbo-instruct': 8192,
+            'gpt-3.5-turbo-instruct': 4097,
             'gpt-3.5-turbo-16k': 16384,
             'text-davinci-003': 4097,
         }