소스 검색

fix azure chatgpt o1 parameter error (#10067)

Charlie.Wei 5 달 전
부모
커밋
f6fecb957e
1개의 변경된 파일13개의 추가작업 그리고 18개의 파일을 삭제
  1. 13 18
      api/core/model_runtime/model_providers/azure_openai/_constant.py

+ 13 - 18
api/core/model_runtime/model_providers/azure_openai/_constant.py

@@ -37,6 +37,17 @@ def _get_max_tokens(default: int, min_val: int, max_val: int) -> ParameterRule:
     return rule
 
 
+def _get_o1_max_tokens(default: int, min_val: int, max_val: int) -> ParameterRule:
+    rule = ParameterRule(
+        name="max_completion_tokens",
+        **PARAMETER_RULE_TEMPLATE[DefaultParameterName.MAX_TOKENS],
+    )
+    rule.default = default
+    rule.min = min_val
+    rule.max = max_val
+    return rule
+
+
 class AzureBaseModel(BaseModel):
     base_model_name: str
     entity: AIModelEntity
@@ -1098,14 +1109,6 @@ LLM_BASE_MODELS = [
                 ModelPropertyKey.CONTEXT_SIZE: 128000,
             },
             parameter_rules=[
-                ParameterRule(
-                    name="temperature",
-                    **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE],
-                ),
-                ParameterRule(
-                    name="top_p",
-                    **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P],
-                ),
                 ParameterRule(
                     name="response_format",
                     label=I18nObject(zh_Hans="回复格式", en_US="response_format"),
@@ -1116,7 +1119,7 @@ LLM_BASE_MODELS = [
                     required=False,
                     options=["text", "json_object"],
                 ),
-                _get_max_tokens(default=512, min_val=1, max_val=32768),
+                _get_o1_max_tokens(default=512, min_val=1, max_val=32768),
             ],
             pricing=PriceConfig(
                 input=15.00,
@@ -1143,14 +1146,6 @@ LLM_BASE_MODELS = [
                 ModelPropertyKey.CONTEXT_SIZE: 128000,
             },
             parameter_rules=[
-                ParameterRule(
-                    name="temperature",
-                    **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TEMPERATURE],
-                ),
-                ParameterRule(
-                    name="top_p",
-                    **PARAMETER_RULE_TEMPLATE[DefaultParameterName.TOP_P],
-                ),
                 ParameterRule(
                     name="response_format",
                     label=I18nObject(zh_Hans="回复格式", en_US="response_format"),
@@ -1161,7 +1156,7 @@ LLM_BASE_MODELS = [
                     required=False,
                     options=["text", "json_object"],
                 ),
-                _get_max_tokens(default=512, min_val=1, max_val=65536),
+                _get_o1_max_tokens(default=512, min_val=1, max_val=65536),
             ],
             pricing=PriceConfig(
                 input=3.00,