ソースを参照

Update Novita AI predefined models (#13045)

Jason 2 ヶ月 前
コミット
d4be5ef9de
25 ファイル変更614 行追加56 行削除
  1. 8 16
      api/core/model_runtime/model_providers/novita/_assets/icon_l_en.svg
  2. 2 9
      api/core/model_runtime/model_providers/novita/_assets/icon_s_en.svg
  3. 41 0
      api/core/model_runtime/model_providers/novita/llm/L3-8B-Stheno-v3.2.yaml
  4. 40 0
      api/core/model_runtime/model_providers/novita/llm/_position.yaml
  5. 41 0
      api/core/model_runtime/model_providers/novita/llm/deepseek_v3.yaml
  6. 41 0
      api/core/model_runtime/model_providers/novita/llm/l3-8b-lunaris.yaml
  7. 41 0
      api/core/model_runtime/model_providers/novita/llm/l31-70b-euryale-v2.2.yaml
  8. 2 2
      api/core/model_runtime/model_providers/novita/llm/llama-3-8b-instruct.yaml
  9. 3 3
      api/core/model_runtime/model_providers/novita/llm/llama-3.1-70b-instruct.yaml
  10. 41 0
      api/core/model_runtime/model_providers/novita/llm/llama-3.1-8b-instruct-bf16.yaml
  11. 41 0
      api/core/model_runtime/model_providers/novita/llm/llama-3.1-8b-instruct-max.yaml
  12. 3 3
      api/core/model_runtime/model_providers/novita/llm/llama-3.1-8b-instruct.yaml
  13. 41 0
      api/core/model_runtime/model_providers/novita/llm/llama-3.2-11b-vision-instruct.yaml
  14. 41 0
      api/core/model_runtime/model_providers/novita/llm/llama-3.2-1b-instruct.yaml
  15. 5 5
      api/core/model_runtime/model_providers/novita/llm/llama-3.2-3b-instruct.yaml
  16. 41 0
      api/core/model_runtime/model_providers/novita/llm/llama-3.3-70b-instruct.yaml
  17. 41 0
      api/core/model_runtime/model_providers/novita/llm/mistral-nemo.yaml
  18. 2 2
      api/core/model_runtime/model_providers/novita/llm/mythomax-l2-13b.yaml
  19. 5 5
      api/core/model_runtime/model_providers/novita/llm/openchat-7b.yaml
  20. 5 5
      api/core/model_runtime/model_providers/novita/llm/qwen-2-72b-instruct.yaml
  21. 41 0
      api/core/model_runtime/model_providers/novita/llm/qwen-2-7b-instruct.yaml
  22. 41 0
      api/core/model_runtime/model_providers/novita/llm/qwen-2-vl-72b-instruct.yaml
  23. 41 0
      api/core/model_runtime/model_providers/novita/llm/qwen-2.5-72b-instruct.yaml
  24. 2 2
      api/core/model_runtime/model_providers/novita/llm/wizardlm-2-8x22b.yaml
  25. 4 4
      api/core/model_runtime/model_providers/novita/novita.yaml

+ 8 - 16
api/core/model_runtime/model_providers/novita/_assets/icon_l_en.svg

@@ -1,19 +1,11 @@
-<svg width="162" height="36" viewBox="0 0 162 36" fill="none" xmlns="http://www.w3.org/2000/svg">
-<path fill-rule="evenodd" clip-rule="evenodd" d="M2 0C0.895431 0 0 0.895432 0 2V29.1891C0 30.2937 0.895433 31.1891 2 31.1891H5.51171L16.0608 35.1377C16.7145 35.3824 17.4114 34.8991 17.4114 34.2012V11.3669C17.4114 10.533 16.894 9.78665 16.1131 9.49405L5.51171 5.52152H25.58V31.1891H29.0917C30.1963 31.1891 31.0917 30.2937 31.0917 29.1891V2C31.0917 0.895431 30.1963 0 29.0917 0H2ZM14.6022 23.7351C15.0558 23.956 15.4239 23.6812 15.4239 23.1185C15.4239 22.5557 15.0558 21.9204 14.6022 21.6995C14.1486 21.4775 13.7804 21.7545 13.7804 22.3161C13.7804 22.8777 14.1486 23.513 14.6022 23.7351Z" fill="white"/>
-<path fill-rule="evenodd" clip-rule="evenodd" d="M2 0C0.895431 0 0 0.895432 0 2V29.1891C0 30.2937 0.895433 31.1891 2 31.1891H5.51171L16.0608 35.1377C16.7145 35.3824 17.4114 34.8991 17.4114 34.2012V11.3669C17.4114 10.533 16.894 9.78665 16.1131 9.49405L5.51171 5.52152H25.58V31.1891H29.0917C30.1963 31.1891 31.0917 30.2937 31.0917 29.1891V2C31.0917 0.895431 30.1963 0 29.0917 0H2ZM14.6022 23.7351C15.0558 23.956 15.4239 23.6812 15.4239 23.1185C15.4239 22.5557 15.0558 21.9204 14.6022 21.6995C14.1486 21.4775 13.7804 21.7545 13.7804 22.3161C13.7804 22.8777 14.1486 23.513 14.6022 23.7351Z" fill="url(#paint0_linear_1473_71)"/>
-<path d="M55.9397 27.8804H59.0566V19.0803C59.0566 14.9105 56.381 12.7172 52.8228 12.7172C51.0023 12.7172 49.3197 13.4483 48.2991 14.6668V12.9609H45.1546V27.8804H48.2991V19.5406C48.2991 16.8059 49.8162 15.3978 52.1332 15.3978C54.4226 15.3978 55.9397 16.8059 55.9397 19.5406V27.8804Z" fill="#11101A"/>
-<path fill-rule="evenodd" clip-rule="evenodd" d="M69.7881 12.7172C74.1187 12.7172 77.539 15.7228 77.539 20.4071C77.539 25.0915 74.0083 28.1241 69.6502 28.1241C65.3196 28.1241 62.0372 25.0915 62.0372 20.4071C62.0372 15.7228 65.4575 12.7172 69.7881 12.7172ZM69.7342 15.3979C67.362 15.3979 65.2381 17.0225 65.2381 20.4071C65.2381 23.7918 67.2793 25.4435 69.6514 25.4435C71.996 25.4435 74.313 23.7918 74.313 20.4071C74.313 17.0225 72.0788 15.3979 69.7342 15.3979Z" fill="#11101A"/>
-<path d="M78.861 12.9609L84.6259 27.8804H88.3772L94.1697 12.9609H90.8321L86.5291 25.1185L82.2261 12.9609H78.861Z" fill="#11101A"/>
-<path fill-rule="evenodd" clip-rule="evenodd" d="M100.13 9.00761C100.13 10.1178 99.2477 10.9842 98.1443 10.9842C97.0134 10.9842 96.1308 10.1178 96.1308 9.00761C96.1308 7.89745 97.0134 7.03098 98.1443 7.03098C99.2477 7.03098 100.13 7.89745 100.13 9.00761ZM99.6882 27.8804H96.5437V12.9609H99.6882V27.8804Z" fill="#11101A"/>
-<path d="M104.322 23.7376C104.322 26.7702 106.004 27.8804 108.708 27.8804H111.19V25.308H109.259C107.935 25.308 107.494 24.8477 107.494 23.7376V15.479H111.19V12.9609H107.494V9.25128H104.322V12.9609H102.529V15.479H104.322V23.7376Z" fill="#11101A"/>
-<path fill-rule="evenodd" clip-rule="evenodd" d="M120.154 28.1241C116.209 28.1241 113.037 24.9561 113.037 20.353C113.037 15.7498 116.209 12.7172 120.209 12.7172C122.774 12.7172 124.539 13.9086 125.477 15.1271V12.9609H128.649V27.8804H125.477V25.6601C124.512 26.9327 122.691 28.1241 120.154 28.1241ZM120.87 25.4435C123.242 25.4435 125.476 23.6293 125.476 20.4071C125.476 17.212 123.242 15.3979 120.87 15.3979C118.526 15.3979 116.264 17.1308 116.264 20.353C116.264 23.5752 118.526 25.4435 120.87 25.4435Z" fill="#11101A"/>
-<path d="M136.043 26.0933C136.043 24.9832 135.16 24.1167 134.057 24.1167C132.926 24.1167 132.043 24.9832 132.043 26.0933C132.043 27.2035 132.926 28.07 134.057 28.07C135.16 28.07 136.043 27.2035 136.043 26.0933Z" fill="#11101A"/>
-<path fill-rule="evenodd" clip-rule="evenodd" d="M145.502 28.1241C141.558 28.1241 138.386 24.9561 138.386 20.353C138.386 15.7498 141.558 12.7172 145.557 12.7172C148.123 12.7172 149.888 13.9086 150.826 15.1271V12.9609H153.998V27.8804H150.826V25.6601C149.86 26.9327 148.04 28.1241 145.502 28.1241ZM146.219 25.4435C148.591 25.4435 150.825 23.6293 150.825 20.4071C150.825 17.212 148.591 15.3979 146.219 15.3979C143.874 15.3979 141.612 17.1308 141.612 20.353C141.612 23.5752 143.874 25.4435 146.219 25.4435Z" fill="#11101A"/>
-<path fill-rule="evenodd" clip-rule="evenodd" d="M161.722 9.00761C161.722 10.1178 160.84 10.9842 159.736 10.9842C158.605 10.9842 157.723 10.1178 157.723 9.00761C157.723 7.89745 158.605 7.03098 159.736 7.03098C160.84 7.03098 161.722 7.89745 161.722 9.00761ZM161.28 27.8804H158.136V12.9609H161.28V27.8804Z" fill="#11101A"/>
+<svg width="88" height="24" viewBox="0 0 88 24" fill="none" xmlns="http://www.w3.org/2000/svg">
+<g clip-path="url(#clip0_1923_1287)">
+<path d="M24 18.8323V18.8326H14.3246L9.16716 13.6751V18.8326H0V18.8314L9.16716 9.66422V4H9.16774L24 18.8323Z" fill="black"/>
+</g>
+<path fill-rule="evenodd" clip-rule="evenodd" d="M73.2505 16.8061H76.5869V18.9145H73.9391C72.0857 18.9145 70.9202 17.8952 70.9202 15.9977V10.3921H69.0316V8.26609H70.9202L71.4677 5.47209H73.2329V8.26609H76.5869V10.3921H73.2505V16.8061ZM33.8133 4.85699L38.6679 15.681H38.809V4.85699H41.3333V18.9145H37.52L32.6654 8.09046H32.5243V18.9145H30V4.85699H33.8133ZM47.812 19.1254C44.7225 19.1254 42.7457 16.9641 42.7457 13.6079C42.7457 10.2517 44.6873 8.05518 47.812 8.05518C50.9367 8.05518 52.8429 10.1635 52.8429 13.6079C52.8429 17.0523 50.9014 19.1254 47.812 19.1254ZM47.812 17.017C49.1891 17.017 50.3363 16.5423 50.3715 15.1894V12.0265C50.3715 10.6383 49.2068 10.1635 47.812 10.1635C46.4172 10.1635 45.2171 10.6383 45.2171 12.0265V15.1894C45.2524 16.5599 46.4348 17.017 47.812 17.017ZM55.5444 8.24846L58.2979 16.6826H58.439L61.1926 8.24846H63.7346L59.9389 18.8968H56.7966L53.0186 8.24846H55.5429H55.5444ZM65.0419 8.26609H67.3722V18.9145H65.0419V8.26609ZM64.9001 4.85699H67.5126V6.86027H64.9001V4.85699ZM82.3064 19.143C79.4639 19.143 77.6458 16.9817 77.6458 13.6079C77.6458 10.2341 79.4286 8.07282 82.3064 8.07282C83.6483 8.07282 84.7425 8.59973 85.3958 9.58373H85.5369L85.9962 8.26609H87.7614V18.9145H85.9962L85.5369 17.6314H85.3958C84.6896 18.5625 83.5072 19.1423 82.3064 19.1423V19.143ZM82.7826 17.017C84.1774 17.017 85.3951 16.5776 85.4304 15.1894V12.0265C85.4304 10.603 84.159 10.1988 82.7297 10.1988C81.3004 10.1988 80.1172 10.6383 80.1172 12.0265V15.1894C80.1525 16.5952 81.3709 17.017 82.7826 17.017Z" fill="black"/>
 <defs>
-<linearGradient id="paint0_linear_1473_71" x1="31" y1="-2" x2="0.975591" y2="14.2625" gradientUnits="userSpaceOnUse">
-<stop stop-color="#2622FF"/>
-<stop offset="1" stop-color="#A717FF"/>
-</linearGradient>
+<clipPath id="clip0_1923_1287">
+<rect width="24" height="14.8326" fill="white" transform="translate(0 4)"/>
+</clipPath>
 </defs>
 </svg>

+ 2 - 9
api/core/model_runtime/model_providers/novita/_assets/icon_s_en.svg

@@ -1,10 +1,3 @@
-<svg width="32" height="36" viewBox="0 0 32 36" fill="none" xmlns="http://www.w3.org/2000/svg">
-<path fill-rule="evenodd" clip-rule="evenodd" d="M2 0C0.895431 0 0 0.895432 0 2V29.1891C0 30.2937 0.895433 31.1891 2 31.1891H5.51171L16.0608 35.1377C16.7145 35.3824 17.4114 34.8991 17.4114 34.2012V11.3669C17.4114 10.533 16.894 9.78665 16.1131 9.49405L5.51171 5.52152H25.58V31.1891H29.0917C30.1963 31.1891 31.0917 30.2937 31.0917 29.1891V2C31.0917 0.895431 30.1963 0 29.0917 0H2ZM14.6022 23.7351C15.0558 23.956 15.4239 23.6812 15.4239 23.1185C15.4239 22.5557 15.0558 21.9204 14.6022 21.6995C14.1486 21.4775 13.7804 21.7545 13.7804 22.3161C13.7804 22.8777 14.1486 23.513 14.6022 23.7351Z" fill="white"/>
-<path fill-rule="evenodd" clip-rule="evenodd" d="M2 0C0.895431 0 0 0.895432 0 2V29.1891C0 30.2937 0.895433 31.1891 2 31.1891H5.51171L16.0608 35.1377C16.7145 35.3824 17.4114 34.8991 17.4114 34.2012V11.3669C17.4114 10.533 16.894 9.78665 16.1131 9.49405L5.51171 5.52152H25.58V31.1891H29.0917C30.1963 31.1891 31.0917 30.2937 31.0917 29.1891V2C31.0917 0.895431 30.1963 0 29.0917 0H2ZM14.6022 23.7351C15.0558 23.956 15.4239 23.6812 15.4239 23.1185C15.4239 22.5557 15.0558 21.9204 14.6022 21.6995C14.1486 21.4775 13.7804 21.7545 13.7804 22.3161C13.7804 22.8777 14.1486 23.513 14.6022 23.7351Z" fill="url(#paint0_linear_1473_97)"/>
-<defs>
-<linearGradient id="paint0_linear_1473_97" x1="31" y1="-2" x2="0.975591" y2="14.2625" gradientUnits="userSpaceOnUse">
-<stop stop-color="#2622FF"/>
-<stop offset="1" stop-color="#A717FF"/>
-</linearGradient>
-</defs>
+<svg width="24" height="15" viewBox="0 0 24 15" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M24 14.8323V14.8326H14.3246L9.16716 9.67507V14.8326H0V14.8314L9.16716 5.66422V0H9.16774L24 14.8323Z" fill="black"/>
 </svg>

+ 41 - 0
api/core/model_runtime/model_providers/novita/llm/L3-8B-Stheno-v3.2.yaml

@@ -0,0 +1,41 @@
+model: Sao10K/L3-8B-Stheno-v3.2
+label:
+  zh_Hans: Sao10K/L3-8B-Stheno-v3.2
+  en_US: Sao10K/L3-8B-Stheno-v3.2
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 8192
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+    min: 0
+    max: 2
+    default: 1
+  - name: top_p
+    use_template: top_p
+    min: 0
+    max: 1
+    default: 1
+  - name: max_tokens
+    use_template: max_tokens
+    min: 1
+    max: 2048
+    default: 512
+  - name: frequency_penalty
+    use_template: frequency_penalty
+    min: -2
+    max: 2
+    default: 0
+  - name: presence_penalty
+    use_template: presence_penalty
+    min: -2
+    max: 2
+    default: 0
+pricing:
+  input: '0.0005'
+  output: '0.0005'
+  unit: '0.0001'
+  currency: USD

+ 40 - 0
api/core/model_runtime/model_providers/novita/llm/_position.yaml

@@ -0,0 +1,40 @@
+# Deepseek Models
+- deepseek/deepseek_v3
+
+# LLaMA Models
+- meta-llama/llama-3.3-70b-instruct
+- meta-llama/llama-3.2-11b-vision-instruct
+- meta-llama/llama-3.2-3b-instruct
+- meta-llama/llama-3.2-1b-instruct
+- meta-llama/llama-3.1-70b-instruct
+- meta-llama/llama-3.1-8b-instruct
+- meta-llama/llama-3.1-8b-instruct-max
+- meta-llama/llama-3.1-8b-instruct-bf16
+- meta-llama/llama-3-70b-instruct
+- meta-llama/llama-3-8b-instruct
+
+# Mistral Models
+- mistralai/mistral-nemo
+- mistralai/mistral-7b-instruct
+
+# Qwen Models
+- qwen/qwen-2.5-72b-instruct
+- qwen/qwen-2-72b-instruct
+- qwen/qwen-2-vl-72b-instruct
+- qwen/qwen-2-7b-instruct
+
+# Other Models
+- sao10k/L3-8B-Stheno-v3.2
+- sao10k/l3-70b-euryale-v2.1
+- sao10k/l31-70b-euryale-v2.2
+- sao10k/l3-8b-lunaris
+- jondurbin/airoboros-l2-70b
+- cognitivecomputations/dolphin-mixtral-8x22b
+- google/gemma-2-9b-it
+- nousresearch/hermes-2-pro-llama-3-8b
+- sophosympatheia/midnight-rose-70b
+- gryphe/mythomax-l2-13b
+- nousresearch/nous-hermes-llama2-13b
+- openchat/openchat-7b
+- teknium/openhermes-2.5-mistral-7b
+- microsoft/wizardlm-2-8x22b

+ 41 - 0
api/core/model_runtime/model_providers/novita/llm/deepseek_v3.yaml

@@ -0,0 +1,41 @@
+model: deepseek/deepseek_v3
+label:
+  zh_Hans: deepseek/deepseek_v3
+  en_US: deepseek/deepseek_v3
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 64000
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+    min: 0
+    max: 2
+    default: 1
+  - name: top_p
+    use_template: top_p
+    min: 0
+    max: 1
+    default: 1
+  - name: max_tokens
+    use_template: max_tokens
+    min: 1
+    max: 2048
+    default: 512
+  - name: frequency_penalty
+    use_template: frequency_penalty
+    min: -2
+    max: 2
+    default: 0
+  - name: presence_penalty
+    use_template: presence_penalty
+    min: -2
+    max: 2
+    default: 0
+pricing:
+  input: '0.0089'
+  output: '0.0089'
+  unit: '0.0001'
+  currency: USD

+ 41 - 0
api/core/model_runtime/model_providers/novita/llm/l3-8b-lunaris.yaml

@@ -0,0 +1,41 @@
+model: sao10k/l3-8b-lunaris
+label:
+  zh_Hans: sao10k/l3-8b-lunaris
+  en_US: sao10k/l3-8b-lunaris
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 8192
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+    min: 0
+    max: 2
+    default: 1
+  - name: top_p
+    use_template: top_p
+    min: 0
+    max: 1
+    default: 1
+  - name: max_tokens
+    use_template: max_tokens
+    min: 1
+    max: 2048
+    default: 512
+  - name: frequency_penalty
+    use_template: frequency_penalty
+    min: -2
+    max: 2
+    default: 0
+  - name: presence_penalty
+    use_template: presence_penalty
+    min: -2
+    max: 2
+    default: 0
+pricing:
+  input: '0.0005'
+  output: '0.0005'
+  unit: '0.0001'
+  currency: USD

+ 41 - 0
api/core/model_runtime/model_providers/novita/llm/l31-70b-euryale-v2.2.yaml

@@ -0,0 +1,41 @@
+model: sao10k/l31-70b-euryale-v2.2
+label:
+  zh_Hans: sao10k/l31-70b-euryale-v2.2
+  en_US: sao10k/l31-70b-euryale-v2.2
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 16000
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+    min: 0
+    max: 2
+    default: 1
+  - name: top_p
+    use_template: top_p
+    min: 0
+    max: 1
+    default: 1
+  - name: max_tokens
+    use_template: max_tokens
+    min: 1
+    max: 2048
+    default: 512
+  - name: frequency_penalty
+    use_template: frequency_penalty
+    min: -2
+    max: 2
+    default: 0
+  - name: presence_penalty
+    use_template: presence_penalty
+    min: -2
+    max: 2
+    default: 0
+pricing:
+  input: '0.0148'
+  output: '0.0148'
+  unit: '0.0001'
+  currency: USD

+ 2 - 2
api/core/model_runtime/model_providers/novita/llm/llama-3-8b-instruct.yaml

@@ -35,7 +35,7 @@ parameter_rules:
     max: 2
     default: 0
 pricing:
-  input: '0.00063'
-  output: '0.00063'
+  input: '0.0004'
+  output: '0.0004'
   unit: '0.0001'
   currency: USD

+ 3 - 3
api/core/model_runtime/model_providers/novita/llm/llama-3.1-70b-instruct.yaml

@@ -7,7 +7,7 @@ features:
   - agent-thought
 model_properties:
   mode: chat
-  context_size: 8192
+  context_size: 32768
 parameter_rules:
   - name: temperature
     use_template: temperature
@@ -35,7 +35,7 @@ parameter_rules:
     max: 2
     default: 0
 pricing:
-  input: '0.0055'
-  output: '0.0076'
+  input: '0.0034'
+  output: '0.0039'
   unit: '0.0001'
   currency: USD

+ 41 - 0
api/core/model_runtime/model_providers/novita/llm/llama-3.1-8b-instruct-bf16.yaml

@@ -0,0 +1,41 @@
+model: meta-llama/llama-3.1-8b-instruct-bf16
+label:
+  zh_Hans: meta-llama/llama-3.1-8b-instruct-bf16
+  en_US: meta-llama/llama-3.1-8b-instruct-bf16
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 8192
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+    min: 0
+    max: 2
+    default: 1
+  - name: top_p
+    use_template: top_p
+    min: 0
+    max: 1
+    default: 1
+  - name: max_tokens
+    use_template: max_tokens
+    min: 1
+    max: 2048
+    default: 512
+  - name: frequency_penalty
+    use_template: frequency_penalty
+    min: -2
+    max: 2
+    default: 0
+  - name: presence_penalty
+    use_template: presence_penalty
+    min: -2
+    max: 2
+    default: 0
+pricing:
+  input: '0.0006'
+  output: '0.0006'
+  unit: '0.0001'
+  currency: USD

+ 41 - 0
api/core/model_runtime/model_providers/novita/llm/llama-3.1-8b-instruct-max.yaml

@@ -0,0 +1,41 @@
+model: meta-llama/llama-3.1-8b-instruct-max
+label:
+  zh_Hans: meta-llama/llama-3.1-8b-instruct-max
+  en_US: meta-llama/llama-3.1-8b-instruct-max
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 16384
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+    min: 0
+    max: 2
+    default: 1
+  - name: top_p
+    use_template: top_p
+    min: 0
+    max: 1
+    default: 1
+  - name: max_tokens
+    use_template: max_tokens
+    min: 1
+    max: 2048
+    default: 512
+  - name: frequency_penalty
+    use_template: frequency_penalty
+    min: -2
+    max: 2
+    default: 0
+  - name: presence_penalty
+    use_template: presence_penalty
+    min: -2
+    max: 2
+    default: 0
+pricing:
+  input: '0.0005'
+  output: '0.0005'
+  unit: '0.0001'
+  currency: USD

+ 3 - 3
api/core/model_runtime/model_providers/novita/llm/llama-3.1-8b-instruct.yaml

@@ -7,7 +7,7 @@ features:
   - agent-thought
 model_properties:
   mode: chat
-  context_size: 8192
+  context_size: 16384
 parameter_rules:
   - name: temperature
     use_template: temperature
@@ -35,7 +35,7 @@ parameter_rules:
     max: 2
     default: 0
 pricing:
-  input: '0.001'
-  output: '0.001'
+  input: '0.0005'
+  output: '0.0005'
   unit: '0.0001'
   currency: USD

+ 41 - 0
api/core/model_runtime/model_providers/novita/llm/llama-3.2-11b-vision-instruct.yaml

@@ -0,0 +1,41 @@
+model: meta-llama/llama-3.2-11b-vision-instruct
+label:
+  zh_Hans: meta-llama/llama-3.2-11b-vision-instruct
+  en_US: meta-llama/llama-3.2-11b-vision-instruct
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 32768
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+    min: 0
+    max: 2
+    default: 1
+  - name: top_p
+    use_template: top_p
+    min: 0
+    max: 1
+    default: 1
+  - name: max_tokens
+    use_template: max_tokens
+    min: 1
+    max: 2048
+    default: 512
+  - name: frequency_penalty
+    use_template: frequency_penalty
+    min: -2
+    max: 2
+    default: 0
+  - name: presence_penalty
+    use_template: presence_penalty
+    min: -2
+    max: 2
+    default: 0
+pricing:
+  input: '0.0006'
+  output: '0.0006'
+  unit: '0.0001'
+  currency: USD

+ 41 - 0
api/core/model_runtime/model_providers/novita/llm/llama-3.2-1b-instruct.yaml

@@ -0,0 +1,41 @@
+model: meta-llama/llama-3.2-1b-instruct
+label:
+  zh_Hans: meta-llama/llama-3.2-1b-instruct
+  en_US: meta-llama/llama-3.2-1b-instruct
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 131000
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+    min: 0
+    max: 2
+    default: 1
+  - name: top_p
+    use_template: top_p
+    min: 0
+    max: 1
+    default: 1
+  - name: max_tokens
+    use_template: max_tokens
+    min: 1
+    max: 2048
+    default: 512
+  - name: frequency_penalty
+    use_template: frequency_penalty
+    min: -2
+    max: 2
+    default: 0
+  - name: presence_penalty
+    use_template: presence_penalty
+    min: -2
+    max: 2
+    default: 0
+pricing:
+  input: '0.0002'
+  output: '0.0002'
+  unit: '0.0001'
+  currency: USD

+ 5 - 5
api/core/model_runtime/model_providers/novita/llm/llama-3.1-405b-instruct.yaml → api/core/model_runtime/model_providers/novita/llm/llama-3.2-3b-instruct.yaml

@@ -1,7 +1,7 @@
-model: meta-llama/llama-3.1-405b-instruct
+model: meta-llama/llama-3.2-3b-instruct
 label:
-  zh_Hans: meta-llama/llama-3.1-405b-instruct
-  en_US: meta-llama/llama-3.1-405b-instruct
+  zh_Hans: meta-llama/llama-3.2-3b-instruct
+  en_US: meta-llama/llama-3.2-3b-instruct
 model_type: llm
 features:
   - agent-thought
@@ -35,7 +35,7 @@ parameter_rules:
     max: 2
     default: 0
 pricing:
-  input: '0.03'
-  output: '0.05'
+  input: '0.0003'
+  output: '0.0005'
   unit: '0.0001'
   currency: USD

+ 41 - 0
api/core/model_runtime/model_providers/novita/llm/llama-3.3-70b-instruct.yaml

@@ -0,0 +1,41 @@
+model: meta-llama/llama-3.3-70b-instruct
+label:
+  zh_Hans: meta-llama/llama-3.3-70b-instruct
+  en_US: meta-llama/llama-3.3-70b-instruct
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 131072
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+    min: 0
+    max: 2
+    default: 1
+  - name: top_p
+    use_template: top_p
+    min: 0
+    max: 1
+    default: 1
+  - name: max_tokens
+    use_template: max_tokens
+    min: 1
+    max: 2048
+    default: 512
+  - name: frequency_penalty
+    use_template: frequency_penalty
+    min: -2
+    max: 2
+    default: 0
+  - name: presence_penalty
+    use_template: presence_penalty
+    min: -2
+    max: 2
+    default: 0
+pricing:
+  input: '0.0039'
+  output: '0.0039'
+  unit: '0.0001'
+  currency: USD

+ 41 - 0
api/core/model_runtime/model_providers/novita/llm/mistral-nemo.yaml

@@ -0,0 +1,41 @@
+model: mistralai/mistral-nemo
+label:
+  zh_Hans: mistralai/mistral-nemo
+  en_US: mistralai/mistral-nemo
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 131072
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+    min: 0
+    max: 2
+    default: 1
+  - name: top_p
+    use_template: top_p
+    min: 0
+    max: 1
+    default: 1
+  - name: max_tokens
+    use_template: max_tokens
+    min: 1
+    max: 2048
+    default: 512
+  - name: frequency_penalty
+    use_template: frequency_penalty
+    min: -2
+    max: 2
+    default: 0
+  - name: presence_penalty
+    use_template: presence_penalty
+    min: -2
+    max: 2
+    default: 0
+pricing:
+  input: '0.0017'
+  output: '0.0017'
+  unit: '0.0001'
+  currency: USD

+ 2 - 2
api/core/model_runtime/model_providers/novita/llm/mythomax-l2-13b.yaml

@@ -35,7 +35,7 @@ parameter_rules:
     max: 2
     default: 0
 pricing:
-  input: '0.00119'
-  output: '0.00119'
+  input: '0.0009'
+  output: '0.0009'
   unit: '0.0001'
   currency: USD

+ 5 - 5
api/core/model_runtime/model_providers/novita/llm/lzlv_70b.yaml → api/core/model_runtime/model_providers/novita/llm/openchat-7b.yaml

@@ -1,7 +1,7 @@
-model: lzlv_70b
+model: openchat/openchat-7b
 label:
-  zh_Hans: lzlv_70b
-  en_US: lzlv_70b
+  zh_Hans: openchat/openchat-7b
+  en_US: openchat/openchat-7b
 model_type: llm
 features:
   - agent-thought
@@ -35,7 +35,7 @@ parameter_rules:
     max: 2
     default: 0
 pricing:
-  input: '0.0058'
-  output: '0.0078'
+  input: '0.0006'
+  output: '0.0006'
   unit: '0.0001'
   currency: USD

+ 5 - 5
api/core/model_runtime/model_providers/novita/llm/Nous-Hermes-2-Mixtral-8x7B-DPO.yaml → api/core/model_runtime/model_providers/novita/llm/qwen-2-72b-instruct.yaml

@@ -1,7 +1,7 @@
-model: Nous-Hermes-2-Mixtral-8x7B-DPO
+model: qwen/qwen-2-72b-instruct
 label:
-  zh_Hans: Nous-Hermes-2-Mixtral-8x7B-DPO
-  en_US: Nous-Hermes-2-Mixtral-8x7B-DPO
+  zh_Hans: qwen/qwen-2-72b-instruct
+  en_US: qwen/qwen-2-72b-instruct
 model_type: llm
 features:
   - agent-thought
@@ -35,7 +35,7 @@ parameter_rules:
     max: 2
     default: 0
 pricing:
-  input: '0.0027'
-  output: '0.0027'
+  input: '0.0034'
+  output: '0.0039'
   unit: '0.0001'
   currency: USD

+ 41 - 0
api/core/model_runtime/model_providers/novita/llm/qwen-2-7b-instruct.yaml

@@ -0,0 +1,41 @@
+model: qwen/qwen-2-7b-instruct
+label:
+  zh_Hans: qwen/qwen-2-7b-instruct
+  en_US: qwen/qwen-2-7b-instruct
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 32768
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+    min: 0
+    max: 2
+    default: 1
+  - name: top_p
+    use_template: top_p
+    min: 0
+    max: 1
+    default: 1
+  - name: max_tokens
+    use_template: max_tokens
+    min: 1
+    max: 2048
+    default: 512
+  - name: frequency_penalty
+    use_template: frequency_penalty
+    min: -2
+    max: 2
+    default: 0
+  - name: presence_penalty
+    use_template: presence_penalty
+    min: -2
+    max: 2
+    default: 0
+pricing:
+  input: '0.00054'
+  output: '0.00054'
+  unit: '0.0001'
+  currency: USD

+ 41 - 0
api/core/model_runtime/model_providers/novita/llm/qwen-2-vl-72b-instruct.yaml

@@ -0,0 +1,41 @@
+model: qwen/qwen-2-vl-72b-instruct
+label:
+  zh_Hans: qwen/qwen-2-vl-72b-instruct
+  en_US: qwen/qwen-2-vl-72b-instruct
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 32768
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+    min: 0
+    max: 2
+    default: 1
+  - name: top_p
+    use_template: top_p
+    min: 0
+    max: 1
+    default: 1
+  - name: max_tokens
+    use_template: max_tokens
+    min: 1
+    max: 2048
+    default: 512
+  - name: frequency_penalty
+    use_template: frequency_penalty
+    min: -2
+    max: 2
+    default: 0
+  - name: presence_penalty
+    use_template: presence_penalty
+    min: -2
+    max: 2
+    default: 0
+pricing:
+  input: '0.0045'
+  output: '0.0045'
+  unit: '0.0001'
+  currency: USD

+ 41 - 0
api/core/model_runtime/model_providers/novita/llm/qwen-2.5-72b-instruct.yaml

@@ -0,0 +1,41 @@
+model: qwen/qwen-2.5-72b-instruct
+label:
+  zh_Hans: qwen/qwen-2.5-72b-instruct
+  en_US: qwen/qwen-2.5-72b-instruct
+model_type: llm
+features:
+  - agent-thought
+model_properties:
+  mode: chat
+  context_size: 32000
+parameter_rules:
+  - name: temperature
+    use_template: temperature
+    min: 0
+    max: 2
+    default: 1
+  - name: top_p
+    use_template: top_p
+    min: 0
+    max: 1
+    default: 1
+  - name: max_tokens
+    use_template: max_tokens
+    min: 1
+    max: 2048
+    default: 512
+  - name: frequency_penalty
+    use_template: frequency_penalty
+    min: -2
+    max: 2
+    default: 0
+  - name: presence_penalty
+    use_template: presence_penalty
+    min: -2
+    max: 2
+    default: 0
+pricing:
+  input: '0.0038'
+  output: '0.004'
+  unit: '0.0001'
+  currency: USD

+ 2 - 2
api/core/model_runtime/model_providers/novita/llm/wizardlm-2-8x22b.yaml

@@ -35,7 +35,7 @@ parameter_rules:
     max: 2
     default: 0
 pricing:
-  input: '0.0064'
-  output: '0.0064'
+  input: '0.0062'
+  output: '0.0062'
   unit: '0.0001'
   currency: USD

+ 4 - 4
api/core/model_runtime/model_providers/novita/novita.yaml

@@ -1,6 +1,6 @@
 provider: novita
 label:
-  en_US: novita.ai
+  en_US: Novita AI
 description:
   en_US: An LLM API that matches various application scenarios with high cost-effectiveness.
   zh_Hans: 适配多种海外应用场景的高性价比 LLM API
@@ -11,10 +11,10 @@ icon_large:
 background: "#eadeff"
 help:
   title:
-    en_US: Get your API key from novita.ai
-    zh_Hans: 从 novita.ai 获取 API Key
+    en_US: Get your API key from Novita AI
+    zh_Hans: 从 Novita AI 获取 API Key
   url:
-    en_US: https://novita.ai/settings#key-management?utm_source=dify&utm_medium=ch&utm_campaign=api
+    en_US: https://novita.ai/settings/key-management?utm_source=dify&utm_medium=ch&utm_campaign=api
 supported_model_types:
   - llm
 configurate_methods: