浏览代码

Feat/9081 add support for llamaguard through groq provider (#9083)

crazywoola 6 月之前
父节点
当前提交
3a0734d94c

+ 1 - 0
api/core/model_runtime/model_providers/groq/llm/_position.yaml

@@ -5,3 +5,4 @@
 - llama3-8b-8192
 - mixtral-8x7b-32768
 - llama2-70b-4096
+- llama-guard-3-8b

+ 25 - 0
api/core/model_runtime/model_providers/groq/llm/llama-guard-3-8b.yaml

@@ -0,0 +1,25 @@
+model: llama-guard-3-8b
+label:
+  zh_Hans: Llama-Guard-3-8B
+  en_US: Llama-Guard-3-8B
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 8192
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+  - name: top_p
+    use_template: top_p
+  - name: max_tokens
+    use_template: max_tokens
+    default: 512
+    min: 1
+    max: 8192
+pricing:
+  input: '0.20'
+  output: '0.20'
+  unit: '0.000001'
+  currency: USD