فهرست منبع

feat(model_providers): Add Groq DeepSeek-R1-Distill-Llama-70b (#13229)

Signed-off-by: -LAN- <laipz8200@outlook.com>
-LAN- 2 ماه پیش
والد
کامیت
c6ddf6d6cc

+ 1 - 0
api/core/model_runtime/model_providers/groq/llm/_position.yaml

@@ -1,3 +1,4 @@
+- deepseek-r1-distill-llama-70b
 - llama-3.1-405b-reasoning
 - llama-3.3-70b-versatile
 - llama-3.1-70b-versatile

+ 36 - 0
api/core/model_runtime/model_providers/groq/llm/deepseek-r1-distill-llama-70b.yaml

@@ -0,0 +1,36 @@
+model: deepseek-r1-distill-llama-70b
+label:
+  en_US: DeepSeek R1 Distill Llama 70b
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 128000
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+  - name: top_p
+    use_template: top_p
+  - name: max_tokens
+    use_template: max_tokens
+    default: 512
+    min: 1
+    max: 8192
+  - name: response_format
+    label:
+      zh_Hans: 回复格式
+      en_US: Response Format
+    type: string
+    help:
+      zh_Hans: 指定模型必须输出的格式
+      en_US: specifying the format that the model must output
+    required: false
+    options:
+      - text
+      - json_object
+pricing:
+  input: '3.00'
+  output: '3.00'
+  unit: '0.000001'
+  currency: USD