Explorar o código

feat(ark): support doubao 1.5 series of models (#12935)

sino hai 3 meses
pai
achega
d167d5b1be

+ 39 - 18
api/core/model_runtime/model_providers/volcengine_maas/llm/models.py

@@ -18,72 +18,93 @@ class ModelConfig(BaseModel):
 
 
 configs: dict[str, ModelConfig] = {
+    "Doubao-1.5-vision-pro-32k": ModelConfig(
+        properties=ModelProperties(context_size=32768, max_tokens=12288, mode=LLMMode.CHAT),
+        features=[ModelFeature.AGENT_THOUGHT, ModelFeature.VISION],
+    ),
+    "Doubao-1.5-pro-32k": ModelConfig(
+        properties=ModelProperties(context_size=32768, max_tokens=12288, mode=LLMMode.CHAT),
+        features=[ModelFeature.AGENT_THOUGHT],
+    ),
+    "Doubao-1.5-lite-32k": ModelConfig(
+        properties=ModelProperties(context_size=32768, max_tokens=12288, mode=LLMMode.CHAT),
+        features=[ModelFeature.AGENT_THOUGHT],
+    ),
+    "Doubao-1.5-pro-256k": ModelConfig(
+        properties=ModelProperties(context_size=262144, max_tokens=12288, mode=LLMMode.CHAT),
+        features=[ModelFeature.AGENT_THOUGHT],
+    ),
     "Doubao-vision-pro-32k": ModelConfig(
         properties=ModelProperties(context_size=32768, max_tokens=4096, mode=LLMMode.CHAT),
-        features=[ModelFeature.VISION],
+        features=[ModelFeature.AGENT_THOUGHT, ModelFeature.VISION],
     ),
     "Doubao-vision-lite-32k": ModelConfig(
         properties=ModelProperties(context_size=32768, max_tokens=4096, mode=LLMMode.CHAT),
-        features=[ModelFeature.VISION],
+        features=[ModelFeature.AGENT_THOUGHT, ModelFeature.VISION],
     ),
     "Doubao-pro-4k": ModelConfig(
         properties=ModelProperties(context_size=4096, max_tokens=4096, mode=LLMMode.CHAT),
-        features=[ModelFeature.TOOL_CALL],
+        features=[ModelFeature.AGENT_THOUGHT, ModelFeature.TOOL_CALL],
     ),
     "Doubao-lite-4k": ModelConfig(
         properties=ModelProperties(context_size=4096, max_tokens=4096, mode=LLMMode.CHAT),
-        features=[ModelFeature.TOOL_CALL],
+        features=[ModelFeature.AGENT_THOUGHT, ModelFeature.TOOL_CALL],
     ),
     "Doubao-pro-32k": ModelConfig(
         properties=ModelProperties(context_size=32768, max_tokens=4096, mode=LLMMode.CHAT),
-        features=[ModelFeature.TOOL_CALL],
+        features=[ModelFeature.AGENT_THOUGHT, ModelFeature.TOOL_CALL],
     ),
     "Doubao-lite-32k": ModelConfig(
         properties=ModelProperties(context_size=32768, max_tokens=4096, mode=LLMMode.CHAT),
-        features=[ModelFeature.TOOL_CALL],
+        features=[ModelFeature.AGENT_THOUGHT, ModelFeature.TOOL_CALL],
     ),
     "Doubao-pro-256k": ModelConfig(
         properties=ModelProperties(context_size=262144, max_tokens=4096, mode=LLMMode.CHAT),
-        features=[],
+        features=[ModelFeature.AGENT_THOUGHT],
     ),
     "Doubao-pro-128k": ModelConfig(
         properties=ModelProperties(context_size=131072, max_tokens=4096, mode=LLMMode.CHAT),
-        features=[ModelFeature.TOOL_CALL],
+        features=[ModelFeature.AGENT_THOUGHT, ModelFeature.TOOL_CALL],
     ),
     "Doubao-lite-128k": ModelConfig(
-        properties=ModelProperties(context_size=131072, max_tokens=4096, mode=LLMMode.CHAT), features=[]
+        properties=ModelProperties(context_size=131072, max_tokens=4096, mode=LLMMode.CHAT),
+        features=[ModelFeature.AGENT_THOUGHT],
     ),
     "Skylark2-pro-4k": ModelConfig(
-        properties=ModelProperties(context_size=4096, max_tokens=4096, mode=LLMMode.CHAT), features=[]
+        properties=ModelProperties(context_size=4096, max_tokens=4096, mode=LLMMode.CHAT),
+        features=[ModelFeature.AGENT_THOUGHT],
     ),
     "Llama3-8B": ModelConfig(
-        properties=ModelProperties(context_size=8192, max_tokens=8192, mode=LLMMode.CHAT), features=[]
+        properties=ModelProperties(context_size=8192, max_tokens=8192, mode=LLMMode.CHAT),
+        features=[ModelFeature.AGENT_THOUGHT],
     ),
     "Llama3-70B": ModelConfig(
-        properties=ModelProperties(context_size=8192, max_tokens=8192, mode=LLMMode.CHAT), features=[]
+        properties=ModelProperties(context_size=8192, max_tokens=8192, mode=LLMMode.CHAT),
+        features=[ModelFeature.AGENT_THOUGHT],
     ),
     "Moonshot-v1-8k": ModelConfig(
         properties=ModelProperties(context_size=8192, max_tokens=4096, mode=LLMMode.CHAT),
-        features=[ModelFeature.TOOL_CALL],
+        features=[ModelFeature.AGENT_THOUGHT, ModelFeature.TOOL_CALL],
     ),
     "Moonshot-v1-32k": ModelConfig(
         properties=ModelProperties(context_size=32768, max_tokens=16384, mode=LLMMode.CHAT),
-        features=[ModelFeature.TOOL_CALL],
+        features=[ModelFeature.AGENT_THOUGHT, ModelFeature.TOOL_CALL],
     ),
     "Moonshot-v1-128k": ModelConfig(
         properties=ModelProperties(context_size=131072, max_tokens=65536, mode=LLMMode.CHAT),
-        features=[ModelFeature.TOOL_CALL],
+        features=[ModelFeature.AGENT_THOUGHT, ModelFeature.TOOL_CALL],
     ),
     "GLM3-130B": ModelConfig(
         properties=ModelProperties(context_size=8192, max_tokens=4096, mode=LLMMode.CHAT),
-        features=[ModelFeature.TOOL_CALL],
+        features=[ModelFeature.AGENT_THOUGHT, ModelFeature.TOOL_CALL],
     ),
     "GLM3-130B-Fin": ModelConfig(
         properties=ModelProperties(context_size=8192, max_tokens=4096, mode=LLMMode.CHAT),
-        features=[ModelFeature.TOOL_CALL],
+        features=[ModelFeature.AGENT_THOUGHT, ModelFeature.TOOL_CALL],
     ),
     "Mistral-7B": ModelConfig(
-        properties=ModelProperties(context_size=8192, max_tokens=2048, mode=LLMMode.CHAT), features=[]
+        properties=ModelProperties(context_size=8192, max_tokens=2048, mode=LLMMode.CHAT),
+        features=[ModelFeature.AGENT_THOUGHT],
     ),
 }
 

+ 24 - 0
api/core/model_runtime/model_providers/volcengine_maas/volcengine_maas.yaml

@@ -118,6 +118,30 @@ model_credential_schema:
       type: select
       required: true
       options:
+        - label:
+            en_US: Doubao-1.5-vision-pro-32k
+          value: Doubao-1.5-vision-pro-32k
+          show_on:
+            - variable: __model_type
+              value: llm
+        - label:
+            en_US: Doubao-1.5-pro-32k
+          value: Doubao-1.5-pro-32k
+          show_on:
+            - variable: __model_type
+              value: llm
+        - label:
+            en_US: Doubao-1.5-lite-32k
+          value: Doubao-1.5-lite-32k
+          show_on:
+            - variable: __model_type
+              value: llm
+        - label:
+            en_US: Doubao-1.5-pro-256k
+          value: Doubao-1.5-pro-256k
+          show_on:
+            - variable: __model_type
+              value: llm
         - label:
             en_US: Doubao-vision-pro-32k
           value: Doubao-vision-pro-32k