Sfoglia il codice sorgente

Update deepseek model configuration (#12899)

JasonVV 3 mesi fa
parent
commit
a7b9375877

+ 1 - 1
api/core/model_runtime/model_providers/deepseek/llm/deepseek-chat.yaml

@@ -10,7 +10,7 @@ features:
   - stream-tool-call
 model_properties:
   mode: chat
-  context_size: 128000
+  context_size: 64000
 parameter_rules:
   - name: temperature
     use_template: temperature

+ 1 - 1
api/core/model_runtime/model_providers/deepseek/llm/deepseek-coder.yaml

@@ -10,7 +10,7 @@ features:
   - stream-tool-call
 model_properties:
   mode: chat
-  context_size: 128000
+  context_size: 64000
 parameter_rules:
   - name: temperature
     use_template: temperature

+ 6 - 1
api/core/model_runtime/model_providers/deepseek/llm/deepseek-reasoner.yaml

@@ -7,10 +7,15 @@ features:
   - agent-thought
 model_properties:
   mode: chat
-  context_size: 128000
+  context_size: 64000
 parameter_rules:
   - name: max_tokens
     use_template: max_tokens
     min: 1
     max: 8192
     default: 4096
+pricing:
+  input: "4"
+  output: "16"
+  unit: "0.000001"
+  currency: RMB