Browse Source

feat: add deepseek-v2.5 for model provider siliconflow (#8639)

zhuhao 7 months ago
parent
commit
e34f04380d

+ 1 - 0
api/core/model_runtime/model_providers/siliconflow/llm/_position.yaml

@@ -10,6 +10,7 @@
 - 01-ai/Yi-1.5-9B-Chat-16K
 - 01-ai/Yi-1.5-6B-Chat
 - THUDM/glm-4-9b-chat
+- deepseek-ai/DeepSeek-V2.5
 - deepseek-ai/DeepSeek-V2-Chat
 - deepseek-ai/DeepSeek-Coder-V2-Instruct
 - internlm/internlm2_5-7b-chat

+ 30 - 0
api/core/model_runtime/model_providers/siliconflow/llm/deepseek-v2.5.yaml

@@ -0,0 +1,30 @@
+model: deepseek-ai/DeepSeek-V2.5
+label:
+  en_US: deepseek-ai/DeepSeek-V2.5
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 32768
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+  - name: max_tokens
+    use_template: max_tokens
+    type: int
+    default: 512
+    min: 1
+    max: 4096
+    help:
+      zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。
+      en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter.
+  - name: top_p
+    use_template: top_p
+  - name: frequency_penalty
+    use_template: frequency_penalty
+pricing:
+  input: '1.33'
+  output: '1.33'
+  unit: '0.000001'
+  currency: RMB