Browse Source

improve: mordernizing validation by migrating pydantic from 1.x to 2.x (#4592)

Bowen Liang 10 months ago
parent
commit
f976740b57
87 changed files with 702 additions and 305 deletions
  1. 2 2
      api/controllers/console/feature.py
  2. 1 1
      api/controllers/web/feature.py
  3. 7 7
      api/core/agent/cot_agent_runner.py
  4. 2 2
      api/core/agent/fc_agent_runner.py
  5. 9 9
      api/core/app/apps/agent_chat/app_runner.py
  6. 1 1
      api/core/app/apps/base_app_queue_manager.py
  7. 2 2
      api/core/app/apps/base_app_runner.py
  8. 10 10
      api/core/app/apps/chat/app_runner.py
  9. 8 8
      api/core/app/apps/completion/app_runner.py
  10. 2 2
      api/core/app/apps/message_based_app_generator.py
  11. 8 2
      api/core/app/entities/app_invoke_entities.py
  12. 28 27
      api/core/app/entities/queue_entities.py
  13. 5 7
      api/core/app/entities/task_entities.py
  14. 1 1
      api/core/app/features/hosting_moderation/hosting_moderation.py
  15. 1 1
      api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py
  16. 1 1
      api/core/callback_handler/agent_tool_callback_handler.py
  17. 1 1
      api/core/entities/message_entities.py
  18. 4 1
      api/core/entities/model_entities.py
  19. 7 5
      api/core/entities/provider_configuration.py
  20. 7 1
      api/core/entities/provider_entities.py
  21. 1 1
      api/core/extension/extensible.py
  22. 3 3
      api/core/helper/code_executor/code_executor.py
  23. 1 1
      api/core/helper/code_executor/code_node_provider.py
  24. 1 3
      api/core/helper/code_executor/template_transformer.py
  25. 1 1
      api/core/indexing_runner.py
  26. 8 1
      api/core/model_runtime/entities/message_entities.py
  27. 2 4
      api/core/model_runtime/entities/model_entities.py
  28. 3 3
      api/core/model_runtime/entities/provider_entities.py
  29. 5 0
      api/core/model_runtime/model_providers/__base/ai_model.py
  30. 5 0
      api/core/model_runtime/model_providers/__base/large_language_model.py
  31. 5 0
      api/core/model_runtime/model_providers/__base/moderation_model.py
  32. 5 0
      api/core/model_runtime/model_providers/__base/speech2text_model.py
  33. 5 0
      api/core/model_runtime/model_providers/__base/text2img_model.py
  34. 5 0
      api/core/model_runtime/model_providers/__base/text_embedding_model.py
  35. 5 0
      api/core/model_runtime/model_providers/__base/tts_model.py
  36. 2 6
      api/core/model_runtime/model_providers/model_provider_factory.py
  37. 1 1
      api/core/model_runtime/utils/encoders.py
  38. 1 1
      api/core/model_runtime/utils/helper.py
  39. 2 2
      api/core/moderation/api/api.py
  40. 2 4
      api/core/moderation/output_moderation.py
  41. 2 2
      api/core/prompt/entities/advanced_prompt_entities.py
  42. 2 2
      api/core/rag/datasource/vdb/milvus/milvus_vector.py
  43. 2 2
      api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py
  44. 2 2
      api/core/rag/datasource/vdb/pgvector/pgvector.py
  45. 2 2
      api/core/rag/datasource/vdb/qdrant/qdrant_vector.py
  46. 2 2
      api/core/rag/datasource/vdb/relyt/relyt_vector.py
  47. 2 2
      api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py
  48. 3 3
      api/core/rag/datasource/vdb/weaviate/weaviate_vector.py
  49. 5 7
      api/core/rag/extractor/blod/blod.py
  50. 3 7
      api/core/rag/extractor/entity/extract_setting.py
  51. 1 1
      api/core/tools/entities/api_entities.py
  52. 10 2
      api/core/tools/entities/tool_entities.py
  53. 8 3
      api/core/tools/provider/builtin/aippt/tools/aippt.py
  54. 4 8
      api/core/tools/provider/builtin/arxiv/tools/arxiv_search.py
  55. 1 1
      api/core/tools/provider/builtin/bing/tools/bing_web_search.py
  56. 3 3
      api/core/tools/provider/builtin/brave/tools/brave_search.py
  57. 174 0
      api/core/tools/provider/builtin/duckduckgo/tools/duckduckgo_search.py
  58. 2 2
      api/core/tools/provider/builtin/firecrawl/tools/crawl.yaml
  59. 7 7
      api/core/tools/provider/builtin/pubmed/tools/pubmed_search.py
  60. 1 1
      api/core/tools/provider/builtin/qrcode/tools/qrcode_generator.py
  61. 3 3
      api/core/tools/provider/builtin/searxng/tools/searxng_search.py
  62. 1 1
      api/core/tools/provider/builtin/stability/tools/text2image.py
  63. 2 2
      api/core/tools/provider/builtin/stackexchange/tools/searchStackExQuestions.yaml
  64. 12 12
      api/core/tools/provider/builtin/tavily/tools/tavily_search.yaml
  65. 4 3
      api/core/tools/provider/builtin/twilio/tools/send_message.py
  66. 2 2
      api/core/tools/provider/builtin/webscraper/tools/webscraper.yaml
  67. 3 3
      api/core/tools/tool/api_tool.py
  68. 2 4
      api/core/tools/tool/dataset_retriever/dataset_retriever_base_tool.py
  69. 10 5
      api/core/tools/tool/tool.py
  70. 1 1
      api/core/workflow/nodes/code/entities.py
  71. 9 6
      api/core/workflow/nodes/http_request/entities.py
  72. 1 1
      api/core/workflow/nodes/http_request/http_request_node.py
  73. 1 1
      api/core/workflow/nodes/iteration/entities.py
  74. 3 3
      api/core/workflow/nodes/knowledge_retrieval/entities.py
  75. 10 8
      api/core/workflow/nodes/parameter_extractor/entities.py
  76. 2 2
      api/core/workflow/nodes/question_classifier/entities.py
  77. 12 9
      api/core/workflow/nodes/tool/entities.py
  78. 1 1
      api/core/workflow/nodes/variable_aggregator/entities.py
  79. 1 1
      api/core/workflow/workflow_engine_manager.py
  80. 1 1
      api/events/event_handlers/deduct_quota_when_messaeg_created.py
  81. 1 1
      api/events/event_handlers/update_provider_last_used_at_when_messaeg_created.py
  82. 195 48
      api/poetry.lock
  83. 3 1
      api/pyproject.toml
  84. 3 1
      api/requirements.txt
  85. 8 2
      api/services/entities/model_provider_entities.py
  86. 4 1
      api/services/feature_service.py
  87. 1 1
      api/tests/unit_tests/core/rag/datasource/vdb/milvus/test_milvus.py

+ 2 - 2
api/controllers/console/feature.py

@@ -16,12 +16,12 @@ class FeatureApi(Resource):
     @account_initialization_required
     @account_initialization_required
     @cloud_utm_record
     @cloud_utm_record
     def get(self):
     def get(self):
-        return FeatureService.get_features(current_user.current_tenant_id).dict()
+        return FeatureService.get_features(current_user.current_tenant_id).model_dump()
 
 
 
 
 class SystemFeatureApi(Resource):
 class SystemFeatureApi(Resource):
     def get(self):
     def get(self):
-        return FeatureService.get_system_features().dict()
+        return FeatureService.get_system_features().model_dump()
 
 
 
 
 api.add_resource(FeatureApi, '/features')
 api.add_resource(FeatureApi, '/features')

+ 1 - 1
api/controllers/web/feature.py

@@ -6,7 +6,7 @@ from services.feature_service import FeatureService
 
 
 class SystemFeatureApi(Resource):
 class SystemFeatureApi(Resource):
     def get(self):
     def get(self):
-        return FeatureService.get_system_features().dict()
+        return FeatureService.get_system_features().model_dump()
 
 
 
 
 api.add_resource(SystemFeatureApi, '/system-features')
 api.add_resource(SystemFeatureApi, '/system-features')

+ 7 - 7
api/core/agent/cot_agent_runner.py

@@ -43,9 +43,9 @@ class CotAgentRunner(BaseAgentRunner, ABC):
         self._init_react_state(query)
         self._init_react_state(query)
 
 
         # check model mode
         # check model mode
-        if 'Observation' not in app_generate_entity.model_config.stop:
-            if app_generate_entity.model_config.provider not in self._ignore_observation_providers:
-                app_generate_entity.model_config.stop.append('Observation')
+        if 'Observation' not in app_generate_entity.model_conf.stop:
+            if app_generate_entity.model_conf.provider not in self._ignore_observation_providers:
+                app_generate_entity.model_conf.stop.append('Observation')
 
 
         app_config = self.app_config
         app_config = self.app_config
 
 
@@ -109,9 +109,9 @@ class CotAgentRunner(BaseAgentRunner, ABC):
             # invoke model
             # invoke model
             chunks: Generator[LLMResultChunk, None, None] = model_instance.invoke_llm(
             chunks: Generator[LLMResultChunk, None, None] = model_instance.invoke_llm(
                 prompt_messages=prompt_messages,
                 prompt_messages=prompt_messages,
-                model_parameters=app_generate_entity.model_config.parameters,
+                model_parameters=app_generate_entity.model_conf.parameters,
                 tools=[],
                 tools=[],
-                stop=app_generate_entity.model_config.stop,
+                stop=app_generate_entity.model_conf.stop,
                 stream=True,
                 stream=True,
                 user=self.user_id,
                 user=self.user_id,
                 callbacks=[],
                 callbacks=[],
@@ -141,8 +141,8 @@ class CotAgentRunner(BaseAgentRunner, ABC):
                 if isinstance(chunk, AgentScratchpadUnit.Action):
                 if isinstance(chunk, AgentScratchpadUnit.Action):
                     action = chunk
                     action = chunk
                     # detect action
                     # detect action
-                    scratchpad.agent_response += json.dumps(chunk.dict())
-                    scratchpad.action_str = json.dumps(chunk.dict())
+                    scratchpad.agent_response += json.dumps(chunk.model_dump())
+                    scratchpad.action_str = json.dumps(chunk.model_dump())
                     scratchpad.action = action
                     scratchpad.action = action
                 else:
                 else:
                     scratchpad.agent_response += chunk
                     scratchpad.agent_response += chunk

+ 2 - 2
api/core/agent/fc_agent_runner.py

@@ -84,9 +84,9 @@ class FunctionCallAgentRunner(BaseAgentRunner):
             # invoke model
             # invoke model
             chunks: Union[Generator[LLMResultChunk, None, None], LLMResult] = model_instance.invoke_llm(
             chunks: Union[Generator[LLMResultChunk, None, None], LLMResult] = model_instance.invoke_llm(
                 prompt_messages=prompt_messages,
                 prompt_messages=prompt_messages,
-                model_parameters=app_generate_entity.model_config.parameters,
+                model_parameters=app_generate_entity.model_conf.parameters,
                 tools=prompt_messages_tools,
                 tools=prompt_messages_tools,
-                stop=app_generate_entity.model_config.stop,
+                stop=app_generate_entity.model_conf.stop,
                 stream=self.stream_tool_call,
                 stream=self.stream_tool_call,
                 user=self.user_id,
                 user=self.user_id,
                 callbacks=[],
                 callbacks=[],

+ 9 - 9
api/core/app/apps/agent_chat/app_runner.py

@@ -58,7 +58,7 @@ class AgentChatAppRunner(AppRunner):
         # Not Include: memory, external data, dataset context
         # Not Include: memory, external data, dataset context
         self.get_pre_calculate_rest_tokens(
         self.get_pre_calculate_rest_tokens(
             app_record=app_record,
             app_record=app_record,
-            model_config=application_generate_entity.model_config,
+            model_config=application_generate_entity.model_conf,
             prompt_template_entity=app_config.prompt_template,
             prompt_template_entity=app_config.prompt_template,
             inputs=inputs,
             inputs=inputs,
             files=files,
             files=files,
@@ -69,8 +69,8 @@ class AgentChatAppRunner(AppRunner):
         if application_generate_entity.conversation_id:
         if application_generate_entity.conversation_id:
             # get memory of conversation (read-only)
             # get memory of conversation (read-only)
             model_instance = ModelInstance(
             model_instance = ModelInstance(
-                provider_model_bundle=application_generate_entity.model_config.provider_model_bundle,
-                model=application_generate_entity.model_config.model
+                provider_model_bundle=application_generate_entity.model_conf.provider_model_bundle,
+                model=application_generate_entity.model_conf.model
             )
             )
 
 
             memory = TokenBufferMemory(
             memory = TokenBufferMemory(
@@ -83,7 +83,7 @@ class AgentChatAppRunner(AppRunner):
         #          memory(optional)
         #          memory(optional)
         prompt_messages, _ = self.organize_prompt_messages(
         prompt_messages, _ = self.organize_prompt_messages(
             app_record=app_record,
             app_record=app_record,
-            model_config=application_generate_entity.model_config,
+            model_config=application_generate_entity.model_conf,
             prompt_template_entity=app_config.prompt_template,
             prompt_template_entity=app_config.prompt_template,
             inputs=inputs,
             inputs=inputs,
             files=files,
             files=files,
@@ -152,7 +152,7 @@ class AgentChatAppRunner(AppRunner):
         #          memory(optional), external data, dataset context(optional)
         #          memory(optional), external data, dataset context(optional)
         prompt_messages, _ = self.organize_prompt_messages(
         prompt_messages, _ = self.organize_prompt_messages(
             app_record=app_record,
             app_record=app_record,
-            model_config=application_generate_entity.model_config,
+            model_config=application_generate_entity.model_conf,
             prompt_template_entity=app_config.prompt_template,
             prompt_template_entity=app_config.prompt_template,
             inputs=inputs,
             inputs=inputs,
             files=files,
             files=files,
@@ -182,12 +182,12 @@ class AgentChatAppRunner(AppRunner):
 
 
         # init model instance
         # init model instance
         model_instance = ModelInstance(
         model_instance = ModelInstance(
-            provider_model_bundle=application_generate_entity.model_config.provider_model_bundle,
-            model=application_generate_entity.model_config.model
+            provider_model_bundle=application_generate_entity.model_conf.provider_model_bundle,
+            model=application_generate_entity.model_conf.model
         )
         )
         prompt_message, _ = self.organize_prompt_messages(
         prompt_message, _ = self.organize_prompt_messages(
             app_record=app_record,
             app_record=app_record,
-            model_config=application_generate_entity.model_config,
+            model_config=application_generate_entity.model_conf,
             prompt_template_entity=app_config.prompt_template,
             prompt_template_entity=app_config.prompt_template,
             inputs=inputs,
             inputs=inputs,
             files=files,
             files=files,
@@ -225,7 +225,7 @@ class AgentChatAppRunner(AppRunner):
             application_generate_entity=application_generate_entity,
             application_generate_entity=application_generate_entity,
             conversation=conversation,
             conversation=conversation,
             app_config=app_config,
             app_config=app_config,
-            model_config=application_generate_entity.model_config,
+            model_config=application_generate_entity.model_conf,
             config=agent_entity,
             config=agent_entity,
             queue_manager=queue_manager,
             queue_manager=queue_manager,
             message=message,
             message=message,

+ 1 - 1
api/core/app/apps/base_app_queue_manager.py

@@ -100,7 +100,7 @@ class AppQueueManager:
         :param pub_from:
         :param pub_from:
         :return:
         :return:
         """
         """
-        self._check_for_sqlalchemy_models(event.dict())
+        self._check_for_sqlalchemy_models(event.model_dump())
         self._publish(event, pub_from)
         self._publish(event, pub_from)
 
 
     @abstractmethod
     @abstractmethod

+ 2 - 2
api/core/app/apps/base_app_runner.py

@@ -218,7 +218,7 @@ class AppRunner:
             index = 0
             index = 0
             for token in text:
             for token in text:
                 chunk = LLMResultChunk(
                 chunk = LLMResultChunk(
-                    model=app_generate_entity.model_config.model,
+                    model=app_generate_entity.model_conf.model,
                     prompt_messages=prompt_messages,
                     prompt_messages=prompt_messages,
                     delta=LLMResultChunkDelta(
                     delta=LLMResultChunkDelta(
                         index=index,
                         index=index,
@@ -237,7 +237,7 @@ class AppRunner:
         queue_manager.publish(
         queue_manager.publish(
             QueueMessageEndEvent(
             QueueMessageEndEvent(
                 llm_result=LLMResult(
                 llm_result=LLMResult(
-                    model=app_generate_entity.model_config.model,
+                    model=app_generate_entity.model_conf.model,
                     prompt_messages=prompt_messages,
                     prompt_messages=prompt_messages,
                     message=AssistantPromptMessage(content=text),
                     message=AssistantPromptMessage(content=text),
                     usage=usage if usage else LLMUsage.empty_usage()
                     usage=usage if usage else LLMUsage.empty_usage()

+ 10 - 10
api/core/app/apps/chat/app_runner.py

@@ -54,7 +54,7 @@ class ChatAppRunner(AppRunner):
         # Not Include: memory, external data, dataset context
         # Not Include: memory, external data, dataset context
         self.get_pre_calculate_rest_tokens(
         self.get_pre_calculate_rest_tokens(
             app_record=app_record,
             app_record=app_record,
-            model_config=application_generate_entity.model_config,
+            model_config=application_generate_entity.model_conf,
             prompt_template_entity=app_config.prompt_template,
             prompt_template_entity=app_config.prompt_template,
             inputs=inputs,
             inputs=inputs,
             files=files,
             files=files,
@@ -65,8 +65,8 @@ class ChatAppRunner(AppRunner):
         if application_generate_entity.conversation_id:
         if application_generate_entity.conversation_id:
             # get memory of conversation (read-only)
             # get memory of conversation (read-only)
             model_instance = ModelInstance(
             model_instance = ModelInstance(
-                provider_model_bundle=application_generate_entity.model_config.provider_model_bundle,
-                model=application_generate_entity.model_config.model
+                provider_model_bundle=application_generate_entity.model_conf.provider_model_bundle,
+                model=application_generate_entity.model_conf.model
             )
             )
 
 
             memory = TokenBufferMemory(
             memory = TokenBufferMemory(
@@ -79,7 +79,7 @@ class ChatAppRunner(AppRunner):
         #          memory(optional)
         #          memory(optional)
         prompt_messages, stop = self.organize_prompt_messages(
         prompt_messages, stop = self.organize_prompt_messages(
             app_record=app_record,
             app_record=app_record,
-            model_config=application_generate_entity.model_config,
+            model_config=application_generate_entity.model_conf,
             prompt_template_entity=app_config.prompt_template,
             prompt_template_entity=app_config.prompt_template,
             inputs=inputs,
             inputs=inputs,
             files=files,
             files=files,
@@ -159,7 +159,7 @@ class ChatAppRunner(AppRunner):
                 app_id=app_record.id,
                 app_id=app_record.id,
                 user_id=application_generate_entity.user_id,
                 user_id=application_generate_entity.user_id,
                 tenant_id=app_record.tenant_id,
                 tenant_id=app_record.tenant_id,
-                model_config=application_generate_entity.model_config,
+                model_config=application_generate_entity.model_conf,
                 config=app_config.dataset,
                 config=app_config.dataset,
                 query=query,
                 query=query,
                 invoke_from=application_generate_entity.invoke_from,
                 invoke_from=application_generate_entity.invoke_from,
@@ -173,7 +173,7 @@ class ChatAppRunner(AppRunner):
         #          memory(optional), external data, dataset context(optional)
         #          memory(optional), external data, dataset context(optional)
         prompt_messages, stop = self.organize_prompt_messages(
         prompt_messages, stop = self.organize_prompt_messages(
             app_record=app_record,
             app_record=app_record,
-            model_config=application_generate_entity.model_config,
+            model_config=application_generate_entity.model_conf,
             prompt_template_entity=app_config.prompt_template,
             prompt_template_entity=app_config.prompt_template,
             inputs=inputs,
             inputs=inputs,
             files=files,
             files=files,
@@ -194,21 +194,21 @@ class ChatAppRunner(AppRunner):
 
 
         # Re-calculate the max tokens if sum(prompt_token +  max_tokens) over model token limit
         # Re-calculate the max tokens if sum(prompt_token +  max_tokens) over model token limit
         self.recalc_llm_max_tokens(
         self.recalc_llm_max_tokens(
-            model_config=application_generate_entity.model_config,
+            model_config=application_generate_entity.model_conf,
             prompt_messages=prompt_messages
             prompt_messages=prompt_messages
         )
         )
 
 
         # Invoke model
         # Invoke model
         model_instance = ModelInstance(
         model_instance = ModelInstance(
-            provider_model_bundle=application_generate_entity.model_config.provider_model_bundle,
-            model=application_generate_entity.model_config.model
+            provider_model_bundle=application_generate_entity.model_conf.provider_model_bundle,
+            model=application_generate_entity.model_conf.model
         )
         )
 
 
         db.session.close()
         db.session.close()
 
 
         invoke_result = model_instance.invoke_llm(
         invoke_result = model_instance.invoke_llm(
             prompt_messages=prompt_messages,
             prompt_messages=prompt_messages,
-            model_parameters=application_generate_entity.model_config.parameters,
+            model_parameters=application_generate_entity.model_conf.parameters,
             stop=stop,
             stop=stop,
             stream=application_generate_entity.stream,
             stream=application_generate_entity.stream,
             user=application_generate_entity.user_id,
             user=application_generate_entity.user_id,

+ 8 - 8
api/core/app/apps/completion/app_runner.py

@@ -50,7 +50,7 @@ class CompletionAppRunner(AppRunner):
         # Not Include: memory, external data, dataset context
         # Not Include: memory, external data, dataset context
         self.get_pre_calculate_rest_tokens(
         self.get_pre_calculate_rest_tokens(
             app_record=app_record,
             app_record=app_record,
-            model_config=application_generate_entity.model_config,
+            model_config=application_generate_entity.model_conf,
             prompt_template_entity=app_config.prompt_template,
             prompt_template_entity=app_config.prompt_template,
             inputs=inputs,
             inputs=inputs,
             files=files,
             files=files,
@@ -61,7 +61,7 @@ class CompletionAppRunner(AppRunner):
         # Include: prompt template, inputs, query(optional), files(optional)
         # Include: prompt template, inputs, query(optional), files(optional)
         prompt_messages, stop = self.organize_prompt_messages(
         prompt_messages, stop = self.organize_prompt_messages(
             app_record=app_record,
             app_record=app_record,
-            model_config=application_generate_entity.model_config,
+            model_config=application_generate_entity.model_conf,
             prompt_template_entity=app_config.prompt_template,
             prompt_template_entity=app_config.prompt_template,
             inputs=inputs,
             inputs=inputs,
             files=files,
             files=files,
@@ -119,7 +119,7 @@ class CompletionAppRunner(AppRunner):
                 app_id=app_record.id,
                 app_id=app_record.id,
                 user_id=application_generate_entity.user_id,
                 user_id=application_generate_entity.user_id,
                 tenant_id=app_record.tenant_id,
                 tenant_id=app_record.tenant_id,
-                model_config=application_generate_entity.model_config,
+                model_config=application_generate_entity.model_conf,
                 config=dataset_config,
                 config=dataset_config,
                 query=query,
                 query=query,
                 invoke_from=application_generate_entity.invoke_from,
                 invoke_from=application_generate_entity.invoke_from,
@@ -132,7 +132,7 @@ class CompletionAppRunner(AppRunner):
         #          memory(optional), external data, dataset context(optional)
         #          memory(optional), external data, dataset context(optional)
         prompt_messages, stop = self.organize_prompt_messages(
         prompt_messages, stop = self.organize_prompt_messages(
             app_record=app_record,
             app_record=app_record,
-            model_config=application_generate_entity.model_config,
+            model_config=application_generate_entity.model_conf,
             prompt_template_entity=app_config.prompt_template,
             prompt_template_entity=app_config.prompt_template,
             inputs=inputs,
             inputs=inputs,
             files=files,
             files=files,
@@ -152,21 +152,21 @@ class CompletionAppRunner(AppRunner):
 
 
         # Re-calculate the max tokens if sum(prompt_token +  max_tokens) over model token limit
         # Re-calculate the max tokens if sum(prompt_token +  max_tokens) over model token limit
         self.recalc_llm_max_tokens(
         self.recalc_llm_max_tokens(
-            model_config=application_generate_entity.model_config,
+            model_config=application_generate_entity.model_conf,
             prompt_messages=prompt_messages
             prompt_messages=prompt_messages
         )
         )
 
 
         # Invoke model
         # Invoke model
         model_instance = ModelInstance(
         model_instance = ModelInstance(
-            provider_model_bundle=application_generate_entity.model_config.provider_model_bundle,
-            model=application_generate_entity.model_config.model
+            provider_model_bundle=application_generate_entity.model_conf.provider_model_bundle,
+            model=application_generate_entity.model_conf.model
         )
         )
 
 
         db.session.close()
         db.session.close()
 
 
         invoke_result = model_instance.invoke_llm(
         invoke_result = model_instance.invoke_llm(
             prompt_messages=prompt_messages,
             prompt_messages=prompt_messages,
-            model_parameters=application_generate_entity.model_config.parameters,
+            model_parameters=application_generate_entity.model_conf.parameters,
             stop=stop,
             stop=stop,
             stream=application_generate_entity.stream,
             stream=application_generate_entity.stream,
             user=application_generate_entity.user_id,
             user=application_generate_entity.user_id,

+ 2 - 2
api/core/app/apps/message_based_app_generator.py

@@ -158,8 +158,8 @@ class MessageBasedAppGenerator(BaseAppGenerator):
             model_id = None
             model_id = None
         else:
         else:
             app_model_config_id = app_config.app_model_config_id
             app_model_config_id = app_config.app_model_config_id
-            model_provider = application_generate_entity.model_config.provider
-            model_id = application_generate_entity.model_config.model
+            model_provider = application_generate_entity.model_conf.provider
+            model_id = application_generate_entity.model_conf.model
             override_model_configs = None
             override_model_configs = None
             if app_config.app_model_config_from == EasyUIBasedAppModelConfigFrom.ARGS \
             if app_config.app_model_config_from == EasyUIBasedAppModelConfigFrom.ARGS \
                     and app_config.app_mode in [AppMode.AGENT_CHAT, AppMode.CHAT, AppMode.COMPLETION]:
                     and app_config.app_mode in [AppMode.AGENT_CHAT, AppMode.CHAT, AppMode.COMPLETION]:

+ 8 - 2
api/core/app/entities/app_invoke_entities.py

@@ -1,7 +1,7 @@
 from enum import Enum
 from enum import Enum
 from typing import Any, Optional
 from typing import Any, Optional
 
 
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
 
 
 from core.app.app_config.entities import AppConfig, EasyUIBasedAppConfig, WorkflowUIBasedAppConfig
 from core.app.app_config.entities import AppConfig, EasyUIBasedAppConfig, WorkflowUIBasedAppConfig
 from core.entities.provider_configuration import ProviderModelBundle
 from core.entities.provider_configuration import ProviderModelBundle
@@ -62,6 +62,9 @@ class ModelConfigWithCredentialsEntity(BaseModel):
     parameters: dict[str, Any] = {}
     parameters: dict[str, Any] = {}
     stop: list[str] = []
     stop: list[str] = []
 
 
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
+
 
 
 class AppGenerateEntity(BaseModel):
 class AppGenerateEntity(BaseModel):
     """
     """
@@ -93,10 +96,13 @@ class EasyUIBasedAppGenerateEntity(AppGenerateEntity):
     """
     """
     # app config
     # app config
     app_config: EasyUIBasedAppConfig
     app_config: EasyUIBasedAppConfig
-    model_config: ModelConfigWithCredentialsEntity
+    model_conf: ModelConfigWithCredentialsEntity
 
 
     query: Optional[str] = None
     query: Optional[str] = None
 
 
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
+
 
 
 class ChatAppGenerateEntity(EasyUIBasedAppGenerateEntity):
 class ChatAppGenerateEntity(EasyUIBasedAppGenerateEntity):
     """
     """

+ 28 - 27
api/core/app/entities/queue_entities.py

@@ -1,14 +1,14 @@
 from enum import Enum
 from enum import Enum
 from typing import Any, Optional
 from typing import Any, Optional
 
 
-from pydantic import BaseModel, validator
+from pydantic import BaseModel, field_validator
 
 
 from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk
 from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk
 from core.workflow.entities.base_node_data_entities import BaseNodeData
 from core.workflow.entities.base_node_data_entities import BaseNodeData
 from core.workflow.entities.node_entities import NodeType
 from core.workflow.entities.node_entities import NodeType
 
 
 
 
-class QueueEvent(Enum):
+class QueueEvent(str, Enum):
     """
     """
     QueueEvent enum
     QueueEvent enum
     """
     """
@@ -47,14 +47,14 @@ class QueueLLMChunkEvent(AppQueueEvent):
     """
     """
     QueueLLMChunkEvent entity
     QueueLLMChunkEvent entity
     """
     """
-    event = QueueEvent.LLM_CHUNK
+    event: QueueEvent = QueueEvent.LLM_CHUNK
     chunk: LLMResultChunk
     chunk: LLMResultChunk
 
 
 class QueueIterationStartEvent(AppQueueEvent):
 class QueueIterationStartEvent(AppQueueEvent):
     """
     """
     QueueIterationStartEvent entity
     QueueIterationStartEvent entity
     """
     """
-    event = QueueEvent.ITERATION_START
+    event: QueueEvent = QueueEvent.ITERATION_START
     node_id: str
     node_id: str
     node_type: NodeType
     node_type: NodeType
     node_data: BaseNodeData
     node_data: BaseNodeData
@@ -68,16 +68,17 @@ class QueueIterationNextEvent(AppQueueEvent):
     """
     """
     QueueIterationNextEvent entity
     QueueIterationNextEvent entity
     """
     """
-    event = QueueEvent.ITERATION_NEXT
+    event: QueueEvent = QueueEvent.ITERATION_NEXT
 
 
     index: int
     index: int
     node_id: str
     node_id: str
     node_type: NodeType
     node_type: NodeType
 
 
     node_run_index: int
     node_run_index: int
-    output: Optional[Any] # output for the current iteration
+    output: Optional[Any] = None # output for the current iteration
 
 
-    @validator('output', pre=True, always=True)
+    @classmethod
+    @field_validator('output', mode='before')
     def set_output(cls, v):
     def set_output(cls, v):
         """
         """
         Set output
         Set output
@@ -92,7 +93,7 @@ class QueueIterationCompletedEvent(AppQueueEvent):
     """
     """
     QueueIterationCompletedEvent entity
     QueueIterationCompletedEvent entity
     """
     """
-    event = QueueEvent.ITERATION_COMPLETED
+    event:QueueEvent = QueueEvent.ITERATION_COMPLETED
 
 
     node_id: str
     node_id: str
     node_type: NodeType
     node_type: NodeType
@@ -104,7 +105,7 @@ class QueueTextChunkEvent(AppQueueEvent):
     """
     """
     QueueTextChunkEvent entity
     QueueTextChunkEvent entity
     """
     """
-    event = QueueEvent.TEXT_CHUNK
+    event: QueueEvent = QueueEvent.TEXT_CHUNK
     text: str
     text: str
     metadata: Optional[dict] = None
     metadata: Optional[dict] = None
 
 
@@ -113,7 +114,7 @@ class QueueAgentMessageEvent(AppQueueEvent):
     """
     """
     QueueMessageEvent entity
     QueueMessageEvent entity
     """
     """
-    event = QueueEvent.AGENT_MESSAGE
+    event: QueueEvent = QueueEvent.AGENT_MESSAGE
     chunk: LLMResultChunk
     chunk: LLMResultChunk
 
 
     
     
@@ -121,7 +122,7 @@ class QueueMessageReplaceEvent(AppQueueEvent):
     """
     """
     QueueMessageReplaceEvent entity
     QueueMessageReplaceEvent entity
     """
     """
-    event = QueueEvent.MESSAGE_REPLACE
+    event: QueueEvent = QueueEvent.MESSAGE_REPLACE
     text: str
     text: str
 
 
 
 
@@ -129,7 +130,7 @@ class QueueRetrieverResourcesEvent(AppQueueEvent):
     """
     """
     QueueRetrieverResourcesEvent entity
     QueueRetrieverResourcesEvent entity
     """
     """
-    event = QueueEvent.RETRIEVER_RESOURCES
+    event: QueueEvent = QueueEvent.RETRIEVER_RESOURCES
     retriever_resources: list[dict]
     retriever_resources: list[dict]
 
 
 
 
@@ -137,7 +138,7 @@ class QueueAnnotationReplyEvent(AppQueueEvent):
     """
     """
     QueueAnnotationReplyEvent entity
     QueueAnnotationReplyEvent entity
     """
     """
-    event = QueueEvent.ANNOTATION_REPLY
+    event: QueueEvent = QueueEvent.ANNOTATION_REPLY
     message_annotation_id: str
     message_annotation_id: str
 
 
 
 
@@ -145,7 +146,7 @@ class QueueMessageEndEvent(AppQueueEvent):
     """
     """
     QueueMessageEndEvent entity
     QueueMessageEndEvent entity
     """
     """
-    event = QueueEvent.MESSAGE_END
+    event: QueueEvent = QueueEvent.MESSAGE_END
     llm_result: Optional[LLMResult] = None
     llm_result: Optional[LLMResult] = None
 
 
 
 
@@ -153,28 +154,28 @@ class QueueAdvancedChatMessageEndEvent(AppQueueEvent):
     """
     """
     QueueAdvancedChatMessageEndEvent entity
     QueueAdvancedChatMessageEndEvent entity
     """
     """
-    event = QueueEvent.ADVANCED_CHAT_MESSAGE_END
+    event: QueueEvent = QueueEvent.ADVANCED_CHAT_MESSAGE_END
 
 
 
 
 class QueueWorkflowStartedEvent(AppQueueEvent):
 class QueueWorkflowStartedEvent(AppQueueEvent):
     """
     """
     QueueWorkflowStartedEvent entity
     QueueWorkflowStartedEvent entity
     """
     """
-    event = QueueEvent.WORKFLOW_STARTED
+    event: QueueEvent = QueueEvent.WORKFLOW_STARTED
 
 
 
 
 class QueueWorkflowSucceededEvent(AppQueueEvent):
 class QueueWorkflowSucceededEvent(AppQueueEvent):
     """
     """
     QueueWorkflowSucceededEvent entity
     QueueWorkflowSucceededEvent entity
     """
     """
-    event = QueueEvent.WORKFLOW_SUCCEEDED
+    event: QueueEvent = QueueEvent.WORKFLOW_SUCCEEDED
 
 
 
 
 class QueueWorkflowFailedEvent(AppQueueEvent):
 class QueueWorkflowFailedEvent(AppQueueEvent):
     """
     """
     QueueWorkflowFailedEvent entity
     QueueWorkflowFailedEvent entity
     """
     """
-    event = QueueEvent.WORKFLOW_FAILED
+    event: QueueEvent = QueueEvent.WORKFLOW_FAILED
     error: str
     error: str
 
 
 
 
@@ -182,7 +183,7 @@ class QueueNodeStartedEvent(AppQueueEvent):
     """
     """
     QueueNodeStartedEvent entity
     QueueNodeStartedEvent entity
     """
     """
-    event = QueueEvent.NODE_STARTED
+    event: QueueEvent = QueueEvent.NODE_STARTED
 
 
     node_id: str
     node_id: str
     node_type: NodeType
     node_type: NodeType
@@ -195,7 +196,7 @@ class QueueNodeSucceededEvent(AppQueueEvent):
     """
     """
     QueueNodeSucceededEvent entity
     QueueNodeSucceededEvent entity
     """
     """
-    event = QueueEvent.NODE_SUCCEEDED
+    event: QueueEvent = QueueEvent.NODE_SUCCEEDED
 
 
     node_id: str
     node_id: str
     node_type: NodeType
     node_type: NodeType
@@ -213,7 +214,7 @@ class QueueNodeFailedEvent(AppQueueEvent):
     """
     """
     QueueNodeFailedEvent entity
     QueueNodeFailedEvent entity
     """
     """
-    event = QueueEvent.NODE_FAILED
+    event: QueueEvent = QueueEvent.NODE_FAILED
 
 
     node_id: str
     node_id: str
     node_type: NodeType
     node_type: NodeType
@@ -230,7 +231,7 @@ class QueueAgentThoughtEvent(AppQueueEvent):
     """
     """
     QueueAgentThoughtEvent entity
     QueueAgentThoughtEvent entity
     """
     """
-    event = QueueEvent.AGENT_THOUGHT
+    event: QueueEvent = QueueEvent.AGENT_THOUGHT
     agent_thought_id: str
     agent_thought_id: str
 
 
 
 
@@ -238,7 +239,7 @@ class QueueMessageFileEvent(AppQueueEvent):
     """
     """
     QueueAgentThoughtEvent entity
     QueueAgentThoughtEvent entity
     """
     """
-    event = QueueEvent.MESSAGE_FILE
+    event: QueueEvent = QueueEvent.MESSAGE_FILE
     message_file_id: str
     message_file_id: str
 
 
 
 
@@ -246,15 +247,15 @@ class QueueErrorEvent(AppQueueEvent):
     """
     """
     QueueErrorEvent entity
     QueueErrorEvent entity
     """
     """
-    event = QueueEvent.ERROR
-    error: Any
+    event: QueueEvent = QueueEvent.ERROR
+    error: Any = None
 
 
 
 
 class QueuePingEvent(AppQueueEvent):
 class QueuePingEvent(AppQueueEvent):
     """
     """
     QueuePingEvent entity
     QueuePingEvent entity
     """
     """
-    event = QueueEvent.PING
+    event: QueueEvent = QueueEvent.PING
 
 
 
 
 class QueueStopEvent(AppQueueEvent):
 class QueueStopEvent(AppQueueEvent):
@@ -270,7 +271,7 @@ class QueueStopEvent(AppQueueEvent):
         OUTPUT_MODERATION = "output-moderation"
         OUTPUT_MODERATION = "output-moderation"
         INPUT_MODERATION = "input-moderation"
         INPUT_MODERATION = "input-moderation"
 
 
-    event = QueueEvent.STOP
+    event: QueueEvent = QueueEvent.STOP
     stopped_by: StopBy
     stopped_by: StopBy
 
 
 
 

+ 5 - 7
api/core/app/entities/task_entities.py

@@ -1,7 +1,7 @@
 from enum import Enum
 from enum import Enum
 from typing import Any, Optional
 from typing import Any, Optional
 
 
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
 
 
 from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage
 from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage
 from core.model_runtime.utils.encoders import jsonable_encoder
 from core.model_runtime.utils.encoders import jsonable_encoder
@@ -118,9 +118,7 @@ class ErrorStreamResponse(StreamResponse):
     """
     """
     event: StreamEvent = StreamEvent.ERROR
     event: StreamEvent = StreamEvent.ERROR
     err: Exception
     err: Exception
-
-    class Config:
-        arbitrary_types_allowed = True
+    model_config = ConfigDict(arbitrary_types_allowed=True)
 
 
 
 
 class MessageStreamResponse(StreamResponse):
 class MessageStreamResponse(StreamResponse):
@@ -360,7 +358,7 @@ class IterationNodeNextStreamResponse(StreamResponse):
         title: str
         title: str
         index: int
         index: int
         created_at: int
         created_at: int
-        pre_iteration_output: Optional[Any]
+        pre_iteration_output: Optional[Any] = None
         extras: dict = {}
         extras: dict = {}
 
 
     event: StreamEvent = StreamEvent.ITERATION_NEXT
     event: StreamEvent = StreamEvent.ITERATION_NEXT
@@ -379,12 +377,12 @@ class IterationNodeCompletedStreamResponse(StreamResponse):
         node_id: str
         node_id: str
         node_type: str
         node_type: str
         title: str
         title: str
-        outputs: Optional[dict]
+        outputs: Optional[dict] = None
         created_at: int
         created_at: int
         extras: dict = None
         extras: dict = None
         inputs: dict = None
         inputs: dict = None
         status: WorkflowNodeExecutionStatus
         status: WorkflowNodeExecutionStatus
-        error: Optional[str]
+        error: Optional[str] = None
         elapsed_time: float
         elapsed_time: float
         total_tokens: int
         total_tokens: int
         finished_at: int
         finished_at: int

+ 1 - 1
api/core/app/features/hosting_moderation/hosting_moderation.py

@@ -16,7 +16,7 @@ class HostingModerationFeature:
         :param prompt_messages: prompt messages
         :param prompt_messages: prompt messages
         :return:
         :return:
         """
         """
-        model_config = application_generate_entity.model_config
+        model_config = application_generate_entity.model_conf
 
 
         text = ""
         text = ""
         for prompt_message in prompt_messages:
         for prompt_message in prompt_messages:

+ 1 - 1
api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py

@@ -85,7 +85,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline, MessageCycleMan
         :param stream: stream
         :param stream: stream
         """
         """
         super().__init__(application_generate_entity, queue_manager, user, stream)
         super().__init__(application_generate_entity, queue_manager, user, stream)
-        self._model_config = application_generate_entity.model_config
+        self._model_config = application_generate_entity.model_conf
         self._conversation = conversation
         self._conversation = conversation
         self._message = message
         self._message = message
 
 

+ 1 - 1
api/core/callback_handler/agent_tool_callback_handler.py

@@ -29,7 +29,7 @@ def print_text(
 class DifyAgentCallbackHandler(BaseModel):
 class DifyAgentCallbackHandler(BaseModel):
     """Callback Handler that prints to std out."""
     """Callback Handler that prints to std out."""
     color: Optional[str] = ''
     color: Optional[str] = ''
-    current_loop = 1
+    current_loop: int = 1
 
 
     def __init__(self, color: Optional[str] = None) -> None:
     def __init__(self, color: Optional[str] = None) -> None:
         super().__init__()
         super().__init__()

+ 1 - 1
api/core/entities/message_entities.py

@@ -17,7 +17,7 @@ class PromptMessageFileType(enum.Enum):
 
 
 class PromptMessageFile(BaseModel):
 class PromptMessageFile(BaseModel):
     type: PromptMessageFileType
     type: PromptMessageFileType
-    data: Any
+    data: Any = None
 
 
 
 
 class ImagePromptMessageFile(PromptMessageFile):
 class ImagePromptMessageFile(PromptMessageFile):

+ 4 - 1
api/core/entities/model_entities.py

@@ -1,7 +1,7 @@
 from enum import Enum
 from enum import Enum
 from typing import Optional
 from typing import Optional
 
 
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
 
 
 from core.model_runtime.entities.common_entities import I18nObject
 from core.model_runtime.entities.common_entities import I18nObject
 from core.model_runtime.entities.model_entities import ModelType, ProviderModel
 from core.model_runtime.entities.model_entities import ModelType, ProviderModel
@@ -77,3 +77,6 @@ class DefaultModelEntity(BaseModel):
     model: str
     model: str
     model_type: ModelType
     model_type: ModelType
     provider: DefaultModelProviderEntity
     provider: DefaultModelProviderEntity
+
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())

+ 7 - 5
api/core/entities/provider_configuration.py

@@ -6,7 +6,7 @@ from collections.abc import Iterator
 from json import JSONDecodeError
 from json import JSONDecodeError
 from typing import Optional
 from typing import Optional
 
 
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
 
 
 from core.entities.model_entities import ModelStatus, ModelWithProviderEntity, SimpleModelProviderEntity
 from core.entities.model_entities import ModelStatus, ModelWithProviderEntity, SimpleModelProviderEntity
 from core.entities.provider_entities import (
 from core.entities.provider_entities import (
@@ -54,6 +54,9 @@ class ProviderConfiguration(BaseModel):
     custom_configuration: CustomConfiguration
     custom_configuration: CustomConfiguration
     model_settings: list[ModelSettings]
     model_settings: list[ModelSettings]
 
 
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
+
     def __init__(self, **data):
     def __init__(self, **data):
         super().__init__(**data)
         super().__init__(**data)
 
 
@@ -1019,7 +1022,6 @@ class ProviderModelBundle(BaseModel):
     provider_instance: ModelProvider
     provider_instance: ModelProvider
     model_type_instance: AIModel
     model_type_instance: AIModel
 
 
-    class Config:
-        """Configuration for this pydantic object."""
-
-        arbitrary_types_allowed = True
+    # pydantic configs
+    model_config = ConfigDict(arbitrary_types_allowed=True,
+                              protected_namespaces=())

+ 7 - 1
api/core/entities/provider_entities.py

@@ -1,7 +1,7 @@
 from enum import Enum
 from enum import Enum
 from typing import Optional
 from typing import Optional
 
 
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
 
 
 from core.model_runtime.entities.model_entities import ModelType
 from core.model_runtime.entities.model_entities import ModelType
 from models.provider import ProviderQuotaType
 from models.provider import ProviderQuotaType
@@ -27,6 +27,9 @@ class RestrictModel(BaseModel):
     base_model_name: Optional[str] = None
     base_model_name: Optional[str] = None
     model_type: ModelType
     model_type: ModelType
 
 
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
+
 
 
 class QuotaConfiguration(BaseModel):
 class QuotaConfiguration(BaseModel):
     """
     """
@@ -65,6 +68,9 @@ class CustomModelConfiguration(BaseModel):
     model_type: ModelType
     model_type: ModelType
     credentials: dict
     credentials: dict
 
 
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
+
 
 
 class CustomConfiguration(BaseModel):
 class CustomConfiguration(BaseModel):
     """
     """

+ 1 - 1
api/core/extension/extensible.py

@@ -16,7 +16,7 @@ class ExtensionModule(enum.Enum):
 
 
 
 
 class ModuleExtension(BaseModel):
 class ModuleExtension(BaseModel):
-    extension_class: Any
+    extension_class: Any = None
     name: str
     name: str
     label: Optional[dict] = None
     label: Optional[dict] = None
     form_schema: Optional[list] = None
     form_schema: Optional[list] = None

+ 3 - 3
api/core/helper/code_executor/code_executor.py

@@ -28,8 +28,8 @@ class CodeExecutionException(Exception):
 
 
 class CodeExecutionResponse(BaseModel):
 class CodeExecutionResponse(BaseModel):
     class Data(BaseModel):
     class Data(BaseModel):
-        stdout: Optional[str]
-        error: Optional[str]
+        stdout: Optional[str] = None
+        error: Optional[str] = None
 
 
     code: int
     code: int
     message: str
     message: str
@@ -88,7 +88,7 @@ class CodeExecutor:
         }
         }
 
 
         if dependencies:
         if dependencies:
-            data['dependencies'] = [dependency.dict() for dependency in dependencies]
+            data['dependencies'] = [dependency.model_dump() for dependency in dependencies]
 
 
         try:
         try:
             response = post(str(url), json=data, headers=headers, timeout=CODE_EXECUTION_TIMEOUT)
             response = post(str(url), json=data, headers=headers, timeout=CODE_EXECUTION_TIMEOUT)

+ 1 - 1
api/core/helper/code_executor/code_node_provider.py

@@ -25,7 +25,7 @@ class CodeNodeProvider(BaseModel):
 
 
     @classmethod
     @classmethod
     def get_default_available_packages(cls) -> list[dict]:
     def get_default_available_packages(cls) -> list[dict]:
-        return [p.dict() for p in CodeExecutor.list_dependencies(cls.get_language())]
+        return [p.model_dump() for p in CodeExecutor.list_dependencies(cls.get_language())]
 
 
     @classmethod
     @classmethod
     def get_default_config(cls) -> dict:
     def get_default_config(cls) -> dict:

+ 1 - 3
api/core/helper/code_executor/template_transformer.py

@@ -4,12 +4,10 @@ from abc import ABC, abstractmethod
 from base64 import b64encode
 from base64 import b64encode
 from typing import Optional
 from typing import Optional
 
 
-from pydantic import BaseModel
-
 from core.helper.code_executor.entities import CodeDependency
 from core.helper.code_executor.entities import CodeDependency
 
 
 
 
-class TemplateTransformer(ABC, BaseModel):
+class TemplateTransformer(ABC):
     _code_placeholder: str = '{{code}}'
     _code_placeholder: str = '{{code}}'
     _inputs_placeholder: str = '{{inputs}}'
     _inputs_placeholder: str = '{{inputs}}'
     _result_tag: str = '<<RESULT>>'
     _result_tag: str = '<<RESULT>>'

+ 1 - 1
api/core/indexing_runner.py

@@ -550,7 +550,7 @@ class IndexingRunner:
                 document_qa_list = self.format_split_text(response)
                 document_qa_list = self.format_split_text(response)
                 qa_documents = []
                 qa_documents = []
                 for result in document_qa_list:
                 for result in document_qa_list:
-                    qa_document = Document(page_content=result['question'], metadata=document_node.metadata.copy())
+                    qa_document = Document(page_content=result['question'], metadata=document_node.metadata.model_copy())
                     doc_id = str(uuid.uuid4())
                     doc_id = str(uuid.uuid4())
                     hash = helper.generate_text_hash(result['question'])
                     hash = helper.generate_text_hash(result['question'])
                     qa_document.metadata['answer'] = result['answer']
                     qa_document.metadata['answer'] = result['answer']

+ 8 - 1
api/core/model_runtime/entities/message_entities.py

@@ -2,7 +2,7 @@ from abc import ABC
 from enum import Enum
 from enum import Enum
 from typing import Optional
 from typing import Optional
 
 
-from pydantic import BaseModel
+from pydantic import BaseModel, field_validator
 
 
 
 
 class PromptMessageRole(Enum):
 class PromptMessageRole(Enum):
@@ -123,6 +123,13 @@ class AssistantPromptMessage(PromptMessage):
         type: str
         type: str
         function: ToolCallFunction
         function: ToolCallFunction
 
 
+        @field_validator('id', mode='before')
+        def transform_id_to_str(cls, value) -> str:
+            if not isinstance(value, str):
+                return str(value)
+            else:
+                return value
+
     role: PromptMessageRole = PromptMessageRole.ASSISTANT
     role: PromptMessageRole = PromptMessageRole.ASSISTANT
     tool_calls: list[ToolCall] = []
     tool_calls: list[ToolCall] = []
 
 

+ 2 - 4
api/core/model_runtime/entities/model_entities.py

@@ -2,7 +2,7 @@ from decimal import Decimal
 from enum import Enum
 from enum import Enum
 from typing import Any, Optional
 from typing import Any, Optional
 
 
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
 
 
 from core.model_runtime.entities.common_entities import I18nObject
 from core.model_runtime.entities.common_entities import I18nObject
 
 
@@ -148,9 +148,7 @@ class ProviderModel(BaseModel):
     fetch_from: FetchFrom
     fetch_from: FetchFrom
     model_properties: dict[ModelPropertyKey, Any]
     model_properties: dict[ModelPropertyKey, Any]
     deprecated: bool = False
     deprecated: bool = False
-
-    class Config:
-        protected_namespaces = ()
+    model_config = ConfigDict(protected_namespaces=())
 
 
 
 
 class ParameterRule(BaseModel):
 class ParameterRule(BaseModel):

+ 3 - 3
api/core/model_runtime/entities/provider_entities.py

@@ -1,7 +1,7 @@
 from enum import Enum
 from enum import Enum
 from typing import Optional
 from typing import Optional
 
 
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
 
 
 from core.model_runtime.entities.common_entities import I18nObject
 from core.model_runtime.entities.common_entities import I18nObject
 from core.model_runtime.entities.model_entities import AIModelEntity, ModelType, ProviderModel
 from core.model_runtime.entities.model_entities import AIModelEntity, ModelType, ProviderModel
@@ -122,8 +122,8 @@ class ProviderEntity(BaseModel):
     provider_credential_schema: Optional[ProviderCredentialSchema] = None
     provider_credential_schema: Optional[ProviderCredentialSchema] = None
     model_credential_schema: Optional[ModelCredentialSchema] = None
     model_credential_schema: Optional[ModelCredentialSchema] = None
 
 
-    class Config:
-        protected_namespaces = ()
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
 
 
     def to_simple_provider(self) -> SimpleProviderEntity:
     def to_simple_provider(self) -> SimpleProviderEntity:
         """
         """

+ 5 - 0
api/core/model_runtime/model_providers/__base/ai_model.py

@@ -3,6 +3,8 @@ import os
 from abc import ABC, abstractmethod
 from abc import ABC, abstractmethod
 from typing import Optional
 from typing import Optional
 
 
+from pydantic import ConfigDict
+
 from core.helper.position_helper import get_position_map, sort_by_position_map
 from core.helper.position_helper import get_position_map, sort_by_position_map
 from core.model_runtime.entities.common_entities import I18nObject
 from core.model_runtime.entities.common_entities import I18nObject
 from core.model_runtime.entities.defaults import PARAMETER_RULE_TEMPLATE
 from core.model_runtime.entities.defaults import PARAMETER_RULE_TEMPLATE
@@ -28,6 +30,9 @@ class AIModel(ABC):
     model_schemas: list[AIModelEntity] = None
     model_schemas: list[AIModelEntity] = None
     started_at: float = 0
     started_at: float = 0
 
 
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
+
     @abstractmethod
     @abstractmethod
     def validate_credentials(self, model: str, credentials: dict) -> None:
     def validate_credentials(self, model: str, credentials: dict) -> None:
         """
         """

+ 5 - 0
api/core/model_runtime/model_providers/__base/large_language_model.py

@@ -6,6 +6,8 @@ from abc import abstractmethod
 from collections.abc import Generator
 from collections.abc import Generator
 from typing import Optional, Union
 from typing import Optional, Union
 
 
+from pydantic import ConfigDict
+
 from core.model_runtime.callbacks.base_callback import Callback
 from core.model_runtime.callbacks.base_callback import Callback
 from core.model_runtime.callbacks.logging_callback import LoggingCallback
 from core.model_runtime.callbacks.logging_callback import LoggingCallback
 from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
 from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
@@ -34,6 +36,9 @@ class LargeLanguageModel(AIModel):
     """
     """
     model_type: ModelType = ModelType.LLM
     model_type: ModelType = ModelType.LLM
 
 
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
+
     def invoke(self, model: str, credentials: dict,
     def invoke(self, model: str, credentials: dict,
                prompt_messages: list[PromptMessage], model_parameters: Optional[dict] = None,
                prompt_messages: list[PromptMessage], model_parameters: Optional[dict] = None,
                tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None,
                tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None,

+ 5 - 0
api/core/model_runtime/model_providers/__base/moderation_model.py

@@ -2,6 +2,8 @@ import time
 from abc import abstractmethod
 from abc import abstractmethod
 from typing import Optional
 from typing import Optional
 
 
+from pydantic import ConfigDict
+
 from core.model_runtime.entities.model_entities import ModelType
 from core.model_runtime.entities.model_entities import ModelType
 from core.model_runtime.model_providers.__base.ai_model import AIModel
 from core.model_runtime.model_providers.__base.ai_model import AIModel
 
 
@@ -12,6 +14,9 @@ class ModerationModel(AIModel):
     """
     """
     model_type: ModelType = ModelType.MODERATION
     model_type: ModelType = ModelType.MODERATION
 
 
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
+
     def invoke(self, model: str, credentials: dict,
     def invoke(self, model: str, credentials: dict,
                text: str, user: Optional[str] = None) \
                text: str, user: Optional[str] = None) \
             -> bool:
             -> bool:

+ 5 - 0
api/core/model_runtime/model_providers/__base/speech2text_model.py

@@ -2,6 +2,8 @@ import os
 from abc import abstractmethod
 from abc import abstractmethod
 from typing import IO, Optional
 from typing import IO, Optional
 
 
+from pydantic import ConfigDict
+
 from core.model_runtime.entities.model_entities import ModelType
 from core.model_runtime.entities.model_entities import ModelType
 from core.model_runtime.model_providers.__base.ai_model import AIModel
 from core.model_runtime.model_providers.__base.ai_model import AIModel
 
 
@@ -12,6 +14,9 @@ class Speech2TextModel(AIModel):
     """
     """
     model_type: ModelType = ModelType.SPEECH2TEXT
     model_type: ModelType = ModelType.SPEECH2TEXT
 
 
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
+
     def invoke(self, model: str, credentials: dict,
     def invoke(self, model: str, credentials: dict,
                file: IO[bytes], user: Optional[str] = None) \
                file: IO[bytes], user: Optional[str] = None) \
             -> str:
             -> str:

+ 5 - 0
api/core/model_runtime/model_providers/__base/text2img_model.py

@@ -1,6 +1,8 @@
 from abc import abstractmethod
 from abc import abstractmethod
 from typing import IO, Optional
 from typing import IO, Optional
 
 
+from pydantic import ConfigDict
+
 from core.model_runtime.entities.model_entities import ModelType
 from core.model_runtime.entities.model_entities import ModelType
 from core.model_runtime.model_providers.__base.ai_model import AIModel
 from core.model_runtime.model_providers.__base.ai_model import AIModel
 
 
@@ -11,6 +13,9 @@ class Text2ImageModel(AIModel):
     """
     """
     model_type: ModelType = ModelType.TEXT2IMG
     model_type: ModelType = ModelType.TEXT2IMG
 
 
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
+
     def invoke(self, model: str, credentials: dict, prompt: str, 
     def invoke(self, model: str, credentials: dict, prompt: str, 
                model_parameters: dict, user: Optional[str] = None) \
                model_parameters: dict, user: Optional[str] = None) \
             -> list[IO[bytes]]:
             -> list[IO[bytes]]:

+ 5 - 0
api/core/model_runtime/model_providers/__base/text_embedding_model.py

@@ -2,6 +2,8 @@ import time
 from abc import abstractmethod
 from abc import abstractmethod
 from typing import Optional
 from typing import Optional
 
 
+from pydantic import ConfigDict
+
 from core.model_runtime.entities.model_entities import ModelPropertyKey, ModelType
 from core.model_runtime.entities.model_entities import ModelPropertyKey, ModelType
 from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult
 from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult
 from core.model_runtime.model_providers.__base.ai_model import AIModel
 from core.model_runtime.model_providers.__base.ai_model import AIModel
@@ -13,6 +15,9 @@ class TextEmbeddingModel(AIModel):
     """
     """
     model_type: ModelType = ModelType.TEXT_EMBEDDING
     model_type: ModelType = ModelType.TEXT_EMBEDDING
 
 
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
+
     def invoke(self, model: str, credentials: dict,
     def invoke(self, model: str, credentials: dict,
                texts: list[str], user: Optional[str] = None) \
                texts: list[str], user: Optional[str] = None) \
             -> TextEmbeddingResult:
             -> TextEmbeddingResult:

+ 5 - 0
api/core/model_runtime/model_providers/__base/tts_model.py

@@ -4,6 +4,8 @@ import uuid
 from abc import abstractmethod
 from abc import abstractmethod
 from typing import Optional
 from typing import Optional
 
 
+from pydantic import ConfigDict
+
 from core.model_runtime.entities.model_entities import ModelPropertyKey, ModelType
 from core.model_runtime.entities.model_entities import ModelPropertyKey, ModelType
 from core.model_runtime.errors.invoke import InvokeBadRequestError
 from core.model_runtime.errors.invoke import InvokeBadRequestError
 from core.model_runtime.model_providers.__base.ai_model import AIModel
 from core.model_runtime.model_providers.__base.ai_model import AIModel
@@ -15,6 +17,9 @@ class TTSModel(AIModel):
     """
     """
     model_type: ModelType = ModelType.TTS
     model_type: ModelType = ModelType.TTS
 
 
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
+
     def invoke(self, model: str, tenant_id: str, credentials: dict, content_text: str, voice: str, streaming: bool,
     def invoke(self, model: str, tenant_id: str, credentials: dict, content_text: str, voice: str, streaming: bool,
                user: Optional[str] = None):
                user: Optional[str] = None):
         """
         """

+ 2 - 6
api/core/model_runtime/model_providers/model_provider_factory.py

@@ -2,7 +2,7 @@ import logging
 import os
 import os
 from typing import Optional
 from typing import Optional
 
 
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
 
 
 from core.helper.module_import_helper import load_single_subclass_from_source
 from core.helper.module_import_helper import load_single_subclass_from_source
 from core.helper.position_helper import get_position_map, sort_to_dict_by_position_map
 from core.helper.position_helper import get_position_map, sort_to_dict_by_position_map
@@ -19,11 +19,7 @@ class ModelProviderExtension(BaseModel):
     provider_instance: ModelProvider
     provider_instance: ModelProvider
     name: str
     name: str
     position: Optional[int] = None
     position: Optional[int] = None
-
-    class Config:
-        """Configuration for this pydantic object."""
-
-        arbitrary_types_allowed = True
+    model_config = ConfigDict(arbitrary_types_allowed=True)
 
 
 
 
 class ModelProviderFactory:
 class ModelProviderFactory:

+ 1 - 1
api/core/model_runtime/utils/encoders.py

@@ -12,9 +12,9 @@ from typing import Any, Optional, Union
 from uuid import UUID
 from uuid import UUID
 
 
 from pydantic import BaseModel
 from pydantic import BaseModel
-from pydantic.color import Color
 from pydantic.networks import AnyUrl, NameEmail
 from pydantic.networks import AnyUrl, NameEmail
 from pydantic.types import SecretBytes, SecretStr
 from pydantic.types import SecretBytes, SecretStr
+from pydantic_extra_types.color import Color
 
 
 from ._compat import PYDANTIC_V2, Url, _model_dump
 from ._compat import PYDANTIC_V2, Url, _model_dump
 
 

+ 1 - 1
api/core/model_runtime/utils/helper.py

@@ -6,4 +6,4 @@ def dump_model(model: BaseModel) -> dict:
     if hasattr(pydantic, 'model_dump'):
     if hasattr(pydantic, 'model_dump'):
         return pydantic.model_dump(model)
         return pydantic.model_dump(model)
     else:
     else:
-        return model.dict()
+        return model.model_dump()

+ 2 - 2
api/core/moderation/api/api.py

@@ -51,7 +51,7 @@ class ApiModeration(Moderation):
                 query=query
                 query=query
             )
             )
 
 
-            result = self._get_config_by_requestor(APIBasedExtensionPoint.APP_MODERATION_INPUT, params.dict())
+            result = self._get_config_by_requestor(APIBasedExtensionPoint.APP_MODERATION_INPUT, params.model_dump())
             return ModerationInputsResult(**result)
             return ModerationInputsResult(**result)
 
 
         return ModerationInputsResult(flagged=flagged, action=ModerationAction.DIRECT_OUTPUT, preset_response=preset_response)
         return ModerationInputsResult(flagged=flagged, action=ModerationAction.DIRECT_OUTPUT, preset_response=preset_response)
@@ -66,7 +66,7 @@ class ApiModeration(Moderation):
                 text=text
                 text=text
             )
             )
 
 
-            result = self._get_config_by_requestor(APIBasedExtensionPoint.APP_MODERATION_OUTPUT, params.dict())
+            result = self._get_config_by_requestor(APIBasedExtensionPoint.APP_MODERATION_OUTPUT, params.model_dump())
             return ModerationOutputsResult(**result)
             return ModerationOutputsResult(**result)
 
 
         return ModerationOutputsResult(flagged=flagged, action=ModerationAction.DIRECT_OUTPUT, preset_response=preset_response)
         return ModerationOutputsResult(flagged=flagged, action=ModerationAction.DIRECT_OUTPUT, preset_response=preset_response)

+ 2 - 4
api/core/moderation/output_moderation.py

@@ -4,7 +4,7 @@ import time
 from typing import Any, Optional
 from typing import Any, Optional
 
 
 from flask import Flask, current_app
 from flask import Flask, current_app
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
 
 
 from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
 from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
 from core.app.entities.queue_entities import QueueMessageReplaceEvent
 from core.app.entities.queue_entities import QueueMessageReplaceEvent
@@ -33,9 +33,7 @@ class OutputModeration(BaseModel):
     buffer: str = ''
     buffer: str = ''
     is_final_chunk: bool = False
     is_final_chunk: bool = False
     final_output: Optional[str] = None
     final_output: Optional[str] = None
-
-    class Config:
-        arbitrary_types_allowed = True
+    model_config = ConfigDict(arbitrary_types_allowed=True)
 
 
     def should_direct_output(self):
     def should_direct_output(self):
         return self.final_output is not None
         return self.final_output is not None

+ 2 - 2
api/core/prompt/entities/advanced_prompt_entities.py

@@ -11,7 +11,7 @@ class ChatModelMessage(BaseModel):
     """
     """
     text: str
     text: str
     role: PromptMessageRole
     role: PromptMessageRole
-    edition_type: Optional[Literal['basic', 'jinja2']]
+    edition_type: Optional[Literal['basic', 'jinja2']] = None
 
 
 
 
 class CompletionModelPromptTemplate(BaseModel):
 class CompletionModelPromptTemplate(BaseModel):
@@ -19,7 +19,7 @@ class CompletionModelPromptTemplate(BaseModel):
     Completion Model Prompt Template.
     Completion Model Prompt Template.
     """
     """
     text: str
     text: str
-    edition_type: Optional[Literal['basic', 'jinja2']]
+    edition_type: Optional[Literal['basic', 'jinja2']] = None
 
 
 
 
 class MemoryConfig(BaseModel):
 class MemoryConfig(BaseModel):

+ 2 - 2
api/core/rag/datasource/vdb/milvus/milvus_vector.py

@@ -4,7 +4,7 @@ from typing import Any, Optional
 from uuid import uuid4
 from uuid import uuid4
 
 
 from flask import current_app
 from flask import current_app
-from pydantic import BaseModel, root_validator
+from pydantic import BaseModel, model_validator
 from pymilvus import MilvusClient, MilvusException, connections
 from pymilvus import MilvusClient, MilvusException, connections
 
 
 from core.rag.datasource.entity.embedding import Embeddings
 from core.rag.datasource.entity.embedding import Embeddings
@@ -28,7 +28,7 @@ class MilvusConfig(BaseModel):
     batch_size: int = 100
     batch_size: int = 100
     database: str = "default"
     database: str = "default"
 
 
-    @root_validator()
+    @model_validator(mode='before')
     def validate_config(cls, values: dict) -> dict:
     def validate_config(cls, values: dict) -> dict:
         if not values.get('host'):
         if not values.get('host'):
             raise ValueError("config MILVUS_HOST is required")
             raise ValueError("config MILVUS_HOST is required")

+ 2 - 2
api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py

@@ -6,7 +6,7 @@ from uuid import UUID, uuid4
 from flask import current_app
 from flask import current_app
 from numpy import ndarray
 from numpy import ndarray
 from pgvecto_rs.sqlalchemy import Vector
 from pgvecto_rs.sqlalchemy import Vector
-from pydantic import BaseModel, root_validator
+from pydantic import BaseModel, model_validator
 from sqlalchemy import Float, String, create_engine, insert, select, text
 from sqlalchemy import Float, String, create_engine, insert, select, text
 from sqlalchemy import text as sql_text
 from sqlalchemy import text as sql_text
 from sqlalchemy.dialects import postgresql
 from sqlalchemy.dialects import postgresql
@@ -31,7 +31,7 @@ class PgvectoRSConfig(BaseModel):
     password: str
     password: str
     database: str
     database: str
 
 
-    @root_validator()
+    @model_validator(mode='before')
     def validate_config(cls, values: dict) -> dict:
     def validate_config(cls, values: dict) -> dict:
         if not values['host']:
         if not values['host']:
             raise ValueError("config PGVECTO_RS_HOST is required")
             raise ValueError("config PGVECTO_RS_HOST is required")

+ 2 - 2
api/core/rag/datasource/vdb/pgvector/pgvector.py

@@ -6,7 +6,7 @@ from typing import Any
 import psycopg2.extras
 import psycopg2.extras
 import psycopg2.pool
 import psycopg2.pool
 from flask import current_app
 from flask import current_app
-from pydantic import BaseModel, root_validator
+from pydantic import BaseModel, model_validator
 
 
 from core.rag.datasource.entity.embedding import Embeddings
 from core.rag.datasource.entity.embedding import Embeddings
 from core.rag.datasource.vdb.vector_base import BaseVector
 from core.rag.datasource.vdb.vector_base import BaseVector
@@ -24,7 +24,7 @@ class PGVectorConfig(BaseModel):
     password: str
     password: str
     database: str
     database: str
 
 
-    @root_validator()
+    @model_validator(mode='before')
     def validate_config(cls, values: dict) -> dict:
     def validate_config(cls, values: dict) -> dict:
         if not values["host"]:
         if not values["host"]:
             raise ValueError("config PGVECTOR_HOST is required")
             raise ValueError("config PGVECTOR_HOST is required")

+ 2 - 2
api/core/rag/datasource/vdb/qdrant/qdrant_vector.py

@@ -40,9 +40,9 @@ if TYPE_CHECKING:
 
 
 class QdrantConfig(BaseModel):
 class QdrantConfig(BaseModel):
     endpoint: str
     endpoint: str
-    api_key: Optional[str]
+    api_key: Optional[str] = None
     timeout: float = 20
     timeout: float = 20
-    root_path: Optional[str]
+    root_path: Optional[str] = None
     grpc_port: int = 6334
     grpc_port: int = 6334
     prefer_grpc: bool = False
     prefer_grpc: bool = False
 
 

+ 2 - 2
api/core/rag/datasource/vdb/relyt/relyt_vector.py

@@ -3,7 +3,7 @@ import uuid
 from typing import Any, Optional
 from typing import Any, Optional
 
 
 from flask import current_app
 from flask import current_app
-from pydantic import BaseModel, root_validator
+from pydantic import BaseModel, model_validator
 from sqlalchemy import Column, Sequence, String, Table, create_engine, insert
 from sqlalchemy import Column, Sequence, String, Table, create_engine, insert
 from sqlalchemy import text as sql_text
 from sqlalchemy import text as sql_text
 from sqlalchemy.dialects.postgresql import JSON, TEXT
 from sqlalchemy.dialects.postgresql import JSON, TEXT
@@ -33,7 +33,7 @@ class RelytConfig(BaseModel):
     password: str
     password: str
     database: str
     database: str
 
 
-    @root_validator()
+    @model_validator(mode='before')
     def validate_config(cls, values: dict) -> dict:
     def validate_config(cls, values: dict) -> dict:
         if not values['host']:
         if not values['host']:
             raise ValueError("config RELYT_HOST is required")
             raise ValueError("config RELYT_HOST is required")

+ 2 - 2
api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py

@@ -4,7 +4,7 @@ from typing import Any
 
 
 import sqlalchemy
 import sqlalchemy
 from flask import current_app
 from flask import current_app
-from pydantic import BaseModel, root_validator
+from pydantic import BaseModel, model_validator
 from sqlalchemy import JSON, TEXT, Column, DateTime, String, Table, create_engine, insert
 from sqlalchemy import JSON, TEXT, Column, DateTime, String, Table, create_engine, insert
 from sqlalchemy import text as sql_text
 from sqlalchemy import text as sql_text
 from sqlalchemy.orm import Session, declarative_base
 from sqlalchemy.orm import Session, declarative_base
@@ -27,7 +27,7 @@ class TiDBVectorConfig(BaseModel):
     password: str
     password: str
     database: str
     database: str
 
 
-    @root_validator()
+    @model_validator(mode='before')
     def validate_config(cls, values: dict) -> dict:
     def validate_config(cls, values: dict) -> dict:
         if not values['host']:
         if not values['host']:
             raise ValueError("config TIDB_VECTOR_HOST is required")
             raise ValueError("config TIDB_VECTOR_HOST is required")

+ 3 - 3
api/core/rag/datasource/vdb/weaviate/weaviate_vector.py

@@ -5,7 +5,7 @@ from typing import Any, Optional
 import requests
 import requests
 import weaviate
 import weaviate
 from flask import current_app
 from flask import current_app
-from pydantic import BaseModel, root_validator
+from pydantic import BaseModel, model_validator
 
 
 from core.rag.datasource.entity.embedding import Embeddings
 from core.rag.datasource.entity.embedding import Embeddings
 from core.rag.datasource.vdb.field import Field
 from core.rag.datasource.vdb.field import Field
@@ -19,10 +19,10 @@ from models.dataset import Dataset
 
 
 class WeaviateConfig(BaseModel):
 class WeaviateConfig(BaseModel):
     endpoint: str
     endpoint: str
-    api_key: Optional[str]
+    api_key: Optional[str] = None
     batch_size: int = 100
     batch_size: int = 100
 
 
-    @root_validator()
+    @model_validator(mode='before')
     def validate_config(cls, values: dict) -> dict:
     def validate_config(cls, values: dict) -> dict:
         if not values['endpoint']:
         if not values['endpoint']:
             raise ValueError("config WEAVIATE_ENDPOINT is required")
             raise ValueError("config WEAVIATE_ENDPOINT is required")

+ 5 - 7
api/core/rag/extractor/blod/blod.py

@@ -14,7 +14,7 @@ from io import BufferedReader, BytesIO
 from pathlib import PurePath
 from pathlib import PurePath
 from typing import Any, Optional, Union
 from typing import Any, Optional, Union
 
 
-from pydantic import BaseModel, root_validator
+from pydantic import BaseModel, ConfigDict, model_validator
 
 
 PathLike = Union[str, PurePath]
 PathLike = Union[str, PurePath]
 
 
@@ -29,7 +29,7 @@ class Blob(BaseModel):
     Inspired by: https://developer.mozilla.org/en-US/docs/Web/API/Blob
     Inspired by: https://developer.mozilla.org/en-US/docs/Web/API/Blob
     """
     """
 
 
-    data: Union[bytes, str, None]  # Raw data
+    data: Union[bytes, str, None] = None  # Raw data
     mimetype: Optional[str] = None  # Not to be confused with a file extension
     mimetype: Optional[str] = None  # Not to be confused with a file extension
     encoding: str = "utf-8"  # Use utf-8 as default encoding, if decoding to string
     encoding: str = "utf-8"  # Use utf-8 as default encoding, if decoding to string
     # Location where the original content was found
     # Location where the original content was found
@@ -37,17 +37,15 @@ class Blob(BaseModel):
     # Useful for situations where downstream code assumes it must work with file paths
     # Useful for situations where downstream code assumes it must work with file paths
     # rather than in-memory content.
     # rather than in-memory content.
     path: Optional[PathLike] = None
     path: Optional[PathLike] = None
-
-    class Config:
-        arbitrary_types_allowed = True
-        frozen = True
+    model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True)
 
 
     @property
     @property
     def source(self) -> Optional[str]:
     def source(self) -> Optional[str]:
         """The source location of the blob as string if known otherwise none."""
         """The source location of the blob as string if known otherwise none."""
         return str(self.path) if self.path else None
         return str(self.path) if self.path else None
 
 
-    @root_validator(pre=True)
+    @model_validator(mode="before")
+    @classmethod
     def check_blob_is_valid(cls, values: Mapping[str, Any]) -> Mapping[str, Any]:
     def check_blob_is_valid(cls, values: Mapping[str, Any]) -> Mapping[str, Any]:
         """Verify that either data or path is provided."""
         """Verify that either data or path is provided."""
         if "data" not in values and "path" not in values:
         if "data" not in values and "path" not in values:

+ 3 - 7
api/core/rag/extractor/entity/extract_setting.py

@@ -1,4 +1,4 @@
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
 
 
 from models.dataset import Document
 from models.dataset import Document
 from models.model import UploadFile
 from models.model import UploadFile
@@ -13,9 +13,7 @@ class NotionInfo(BaseModel):
     notion_page_type: str
     notion_page_type: str
     document: Document = None
     document: Document = None
     tenant_id: str
     tenant_id: str
-
-    class Config:
-        arbitrary_types_allowed = True
+    model_config = ConfigDict(arbitrary_types_allowed=True)
 
 
     def __init__(self, **data) -> None:
     def __init__(self, **data) -> None:
         super().__init__(**data)
         super().__init__(**data)
@@ -29,9 +27,7 @@ class ExtractSetting(BaseModel):
     upload_file: UploadFile = None
     upload_file: UploadFile = None
     notion_info: NotionInfo = None
     notion_info: NotionInfo = None
     document_model: str = None
     document_model: str = None
-
-    class Config:
-        arbitrary_types_allowed = True
+    model_config = ConfigDict(arbitrary_types_allowed=True)
 
 
     def __init__(self, **data) -> None:
     def __init__(self, **data) -> None:
         super().__init__(**data)
         super().__init__(**data)

+ 1 - 1
api/core/tools/entities/api_entities.py

@@ -13,7 +13,7 @@ class UserTool(BaseModel):
     name: str # identifier
     name: str # identifier
     label: I18nObject # label
     label: I18nObject # label
     description: I18nObject
     description: I18nObject
-    parameters: Optional[list[ToolParameter]]
+    parameters: Optional[list[ToolParameter]] = None
     labels: list[str] = None
     labels: list[str] = None
 
 
 UserToolProviderTypeLiteral = Optional[Literal[
 UserToolProviderTypeLiteral = Optional[Literal[

+ 10 - 2
api/core/tools/entities/tool_entities.py

@@ -1,7 +1,7 @@
 from enum import Enum
 from enum import Enum
 from typing import Any, Optional, Union, cast
 from typing import Any, Optional, Union, cast
 
 
-from pydantic import BaseModel, Field
+from pydantic import BaseModel, Field, field_validator
 
 
 from core.tools.entities.common_entities import I18nObject
 from core.tools.entities.common_entities import I18nObject
 
 
@@ -116,6 +116,14 @@ class ToolParameterOption(BaseModel):
     value: str = Field(..., description="The value of the option")
     value: str = Field(..., description="The value of the option")
     label: I18nObject = Field(..., description="The label of the option")
     label: I18nObject = Field(..., description="The label of the option")
 
 
+    @classmethod
+    @field_validator('value', mode='before')
+    def transform_id_to_str(cls, value) -> str:
+        if isinstance(value, bool):
+            return str(value)
+        else:
+            return value
+
 
 
 class ToolParameter(BaseModel):
 class ToolParameter(BaseModel):
     class ToolParameterType(str, Enum):
     class ToolParameterType(str, Enum):
@@ -278,7 +286,7 @@ class ToolRuntimeVariablePool(BaseModel):
             'conversation_id': self.conversation_id,
             'conversation_id': self.conversation_id,
             'user_id': self.user_id,
             'user_id': self.user_id,
             'tenant_id': self.tenant_id,
             'tenant_id': self.tenant_id,
-            'pool': [variable.dict() for variable in self.pool],
+            'pool': [variable.model_dump() for variable in self.pool],
         }
         }
     
     
     def set_text(self, tool_name: str, name: str, value: str) -> None:
     def set_text(self, tool_name: str, name: str, value: str) -> None:

+ 8 - 3
api/core/tools/provider/builtin/aippt/tools/aippt.py

@@ -4,7 +4,7 @@ from hmac import new as hmac_new
 from json import loads as json_loads
 from json import loads as json_loads
 from threading import Lock
 from threading import Lock
 from time import sleep, time
 from time import sleep, time
-from typing import Any
+from typing import Any, Optional
 
 
 from httpx import get, post
 from httpx import get, post
 from requests import get as requests_get
 from requests import get as requests_get
@@ -22,9 +22,9 @@ class AIPPTGenerateTool(BuiltinTool):
 
 
     _api_base_url = URL('https://co.aippt.cn/api')
     _api_base_url = URL('https://co.aippt.cn/api')
     _api_token_cache = {}
     _api_token_cache = {}
-    _api_token_cache_lock = Lock()
+    _api_token_cache_lock:Optional[Lock] = None
     _style_cache = {}
     _style_cache = {}
-    _style_cache_lock = Lock()
+    _style_cache_lock:Optional[Lock] = None
 
 
     _task = {}
     _task = {}
     _task_type_map = {
     _task_type_map = {
@@ -32,6 +32,11 @@ class AIPPTGenerateTool(BuiltinTool):
         'markdown': 7,
         'markdown': 7,
     }
     }
 
 
+    def __init__(self, **kwargs: Any):
+        super().__init__(**kwargs)
+        self._api_token_cache_lock = Lock()
+        self._style_cache_lock = Lock()
+
     def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage]:
     def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage]:
         """
         """
         Invokes the AIPPT generate tool with the given user ID and tool parameters.
         Invokes the AIPPT generate tool with the given user ID and tool parameters.

+ 4 - 8
api/core/tools/provider/builtin/arxiv/tools/arxiv_search.py

@@ -44,14 +44,10 @@ class ArxivAPIWrapper(BaseModel):
             arxiv.run("tree of thought llm)
             arxiv.run("tree of thought llm)
     """
     """
 
 
-    arxiv_search = arxiv.Search  #: :meta private:
-    arxiv_exceptions = (
-        arxiv.ArxivError,
-        arxiv.UnexpectedEmptyPageError,
-        arxiv.HTTPError,
-    )  # :meta private:
+    arxiv_search: type[arxiv.Search] = arxiv.Search  #: :meta private:
+    arxiv_http_error: tuple[type[Exception]] = (arxiv.ArxivError, arxiv.UnexpectedEmptyPageError, arxiv.HTTPError)
     top_k_results: int = 3
     top_k_results: int = 3
-    ARXIV_MAX_QUERY_LENGTH = 300
+    ARXIV_MAX_QUERY_LENGTH: int = 300
     load_max_docs: int = 100
     load_max_docs: int = 100
     load_all_available_meta: bool = False
     load_all_available_meta: bool = False
     doc_content_chars_max: Optional[int] = 4000
     doc_content_chars_max: Optional[int] = 4000
@@ -73,7 +69,7 @@ class ArxivAPIWrapper(BaseModel):
             results = self.arxiv_search(  # type: ignore
             results = self.arxiv_search(  # type: ignore
                 query[: self.ARXIV_MAX_QUERY_LENGTH], max_results=self.top_k_results
                 query[: self.ARXIV_MAX_QUERY_LENGTH], max_results=self.top_k_results
             ).results()
             ).results()
-        except self.arxiv_exceptions as ex:
+        except arxiv_http_error as ex:
             return f"Arxiv exception: {ex}"
             return f"Arxiv exception: {ex}"
         docs = [
         docs = [
             f"Published: {result.updated.date()}\n"
             f"Published: {result.updated.date()}\n"

+ 1 - 1
api/core/tools/provider/builtin/bing/tools/bing_web_search.py

@@ -8,7 +8,7 @@ from core.tools.tool.builtin_tool import BuiltinTool
 
 
 
 
 class BingSearchTool(BuiltinTool):
 class BingSearchTool(BuiltinTool):
-    url = 'https://api.bing.microsoft.com/v7.0/search'
+    url: str = 'https://api.bing.microsoft.com/v7.0/search'
 
 
     def _invoke_bing(self, 
     def _invoke_bing(self, 
                      user_id: str,
                      user_id: str,

+ 3 - 3
api/core/tools/provider/builtin/brave/tools/brave_search.py

@@ -15,7 +15,7 @@ class BraveSearchWrapper(BaseModel):
     """The API key to use for the Brave search engine."""
     """The API key to use for the Brave search engine."""
     search_kwargs: dict = Field(default_factory=dict)
     search_kwargs: dict = Field(default_factory=dict)
     """Additional keyword arguments to pass to the search request."""
     """Additional keyword arguments to pass to the search request."""
-    base_url = "https://api.search.brave.com/res/v1/web/search"
+    base_url: str = "https://api.search.brave.com/res/v1/web/search"
     """The base URL for the Brave search engine."""
     """The base URL for the Brave search engine."""
 
 
     def run(self, query: str) -> str:
     def run(self, query: str) -> str:
@@ -58,8 +58,8 @@ class BraveSearchWrapper(BaseModel):
 class BraveSearch(BaseModel):
 class BraveSearch(BaseModel):
     """Tool that queries the BraveSearch."""
     """Tool that queries the BraveSearch."""
 
 
-    name = "brave_search"
-    description = (
+    name: str = "brave_search"
+    description: str = (
         "a search engine. "
         "a search engine. "
         "useful for when you need to answer questions about current events."
         "useful for when you need to answer questions about current events."
         " input should be a search query."
         " input should be a search query."

+ 174 - 0
api/core/tools/provider/builtin/duckduckgo/tools/duckduckgo_search.py

@@ -0,0 +1,174 @@
+<<<<<<< HEAD
+=======
+from typing import Any, Optional
+
+from pydantic import BaseModel, Field
+
+from core.tools.entities.tool_entities import ToolInvokeMessage
+from core.tools.tool.builtin_tool import BuiltinTool
+
+
+class DuckDuckGoSearchAPIWrapper(BaseModel):
+    """Wrapper for DuckDuckGo Search API.
+
+    Free and does not require any setup.
+    """
+
+    region: Optional[str] = "wt-wt"
+    safesearch: str = "moderate"
+    time: Optional[str] = "y"
+    max_results: int = 5
+
+    def get_snippets(self, query: str) -> list[str]:
+        """Run query through DuckDuckGo and return concatenated results."""
+        from duckduckgo_search import DDGS
+
+        with DDGS() as ddgs:
+            results = ddgs.text(
+                query,
+                region=self.region,
+                safesearch=self.safesearch,
+                timelimit=self.time,
+            )
+            if results is None:
+                return ["No good DuckDuckGo Search Result was found"]
+            snippets = []
+            for i, res in enumerate(results, 1):
+                if res is not None:
+                    snippets.append(res["body"])
+                if len(snippets) == self.max_results:
+                    break
+        return snippets
+
+    def run(self, query: str) -> str:
+        snippets = self.get_snippets(query)
+        return " ".join(snippets)
+
+    def results(
+        self, query: str, num_results: int, backend: str = "api"
+    ) -> list[dict[str, str]]:
+        """Run query through DuckDuckGo and return metadata.
+
+        Args:
+            query: The query to search for.
+            num_results: The number of results to return.
+
+        Returns:
+            A list of dictionaries with the following keys:
+                snippet - The description of the result.
+                title - The title of the result.
+                link - The link to the result.
+        """
+        from duckduckgo_search import DDGS
+
+        with DDGS() as ddgs:
+            results = ddgs.text(
+                query,
+                region=self.region,
+                safesearch=self.safesearch,
+                timelimit=self.time,
+                backend=backend,
+            )
+            if results is None:
+                return [{"Result": "No good DuckDuckGo Search Result was found"}]
+
+            def to_metadata(result: dict) -> dict[str, str]:
+                if backend == "news":
+                    return {
+                        "date": result["date"],
+                        "title": result["title"],
+                        "snippet": result["body"],
+                        "source": result["source"],
+                        "link": result["url"],
+                    }
+                return {
+                    "snippet": result["body"],
+                    "title": result["title"],
+                    "link": result["href"],
+                }
+
+            formatted_results = []
+            for i, res in enumerate(results, 1):
+                if res is not None:
+                    formatted_results.append(to_metadata(res))
+                if len(formatted_results) == num_results:
+                    break
+        return formatted_results
+
+
+class DuckDuckGoSearchRun(BaseModel):
+    """Tool that queries the DuckDuckGo search API."""
+
+    name: str = "duckduckgo_search"
+    description: str = (
+        "A wrapper around DuckDuckGo Search. "
+        "Useful for when you need to answer questions about current events. "
+        "Input should be a search query."
+    )
+    api_wrapper: DuckDuckGoSearchAPIWrapper = Field(
+        default_factory=DuckDuckGoSearchAPIWrapper
+    )
+
+    def _run(
+        self,
+        query: str,
+    ) -> str:
+        """Use the tool."""
+        return self.api_wrapper.run(query)
+
+
+class DuckDuckGoSearchResults(BaseModel):
+    """Tool that queries the DuckDuckGo search API and gets back json."""
+
+    name: str = "DuckDuckGo Results JSON"
+    description: str = (
+        "A wrapper around Duck Duck Go Search. "
+        "Useful for when you need to answer questions about current events. "
+        "Input should be a search query. Output is a JSON array of the query results"
+    )
+    num_results: int = 4
+    api_wrapper: DuckDuckGoSearchAPIWrapper = Field(
+        default_factory=DuckDuckGoSearchAPIWrapper
+    )
+    backend: str = "api"
+
+    def _run(
+        self,
+        query: str,
+    ) -> str:
+        """Use the tool."""
+        res = self.api_wrapper.results(query, self.num_results, backend=self.backend)
+        res_strs = [", ".join([f"{k}: {v}" for k, v in d.items()]) for d in res]
+        return ", ".join([f"[{rs}]" for rs in res_strs])
+
+class DuckDuckGoInput(BaseModel):
+    query: str = Field(..., description="Search query.")
+
+class DuckDuckGoSearchTool(BuiltinTool):
+    """
+    Tool for performing a search using DuckDuckGo search engine.
+    """
+
+    def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage | list[ToolInvokeMessage]:
+        """
+        Invoke the DuckDuckGo search tool.
+
+        Args:
+            user_id (str): The ID of the user invoking the tool.
+            tool_parameters (dict[str, Any]): The parameters for the tool invocation.
+
+        Returns:
+            ToolInvokeMessage | list[ToolInvokeMessage]: The result of the tool invocation.
+        """
+        query = tool_parameters.get('query', '')
+
+        if not query:
+            return self.create_text_message('Please input query')
+
+        tool = DuckDuckGoSearchRun(args_schema=DuckDuckGoInput)
+
+        result = tool._run(query)
+
+        return self.create_text_message(self.summary(user_id=user_id, content=result))
+    
+>>>>>>> 4c2ba442b (missing type in DuckDuckGoSearchAPIWrapper)

+ 2 - 2
api/core/tools/provider/builtin/firecrawl/tools/crawl.yaml

@@ -69,10 +69,10 @@ parameters:
     options:
     options:
       - value: true
       - value: true
         label:
         label:
-          en_US: Yes
+          en_US: 'Yes'
           zh_Hans: 是
           zh_Hans: 是
       - value: false
       - value: false
         label:
         label:
-          en_US: No
+          en_US: 'No'
           zh_Hans: 否
           zh_Hans: 否
     default: false
     default: false

+ 7 - 7
api/core/tools/provider/builtin/pubmed/tools/pubmed_search.py

@@ -28,15 +28,15 @@ class PubMedAPIWrapper(BaseModel):
           if False: the `metadata` gets only the most informative fields.
           if False: the `metadata` gets only the most informative fields.
     """
     """
 
 
-    base_url_esearch = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?"
-    base_url_efetch = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?"
-    max_retry = 5
-    sleep_time = 0.2
+    base_url_esearch: str = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?"
+    base_url_efetch: str = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?"
+    max_retry: int = 5
+    sleep_time: float = 0.2
 
 
     # Default values for the parameters
     # Default values for the parameters
     top_k_results: int = 3
     top_k_results: int = 3
     load_max_docs: int = 25
     load_max_docs: int = 25
-    ARXIV_MAX_QUERY_LENGTH = 300
+    ARXIV_MAX_QUERY_LENGTH: int = 300
     doc_content_chars_max: int = 2000
     doc_content_chars_max: int = 2000
     load_all_available_meta: bool = False
     load_all_available_meta: bool = False
     email: str = "your_email@example.com"
     email: str = "your_email@example.com"
@@ -160,8 +160,8 @@ class PubMedAPIWrapper(BaseModel):
 class PubmedQueryRun(BaseModel):
 class PubmedQueryRun(BaseModel):
     """Tool that searches the PubMed API."""
     """Tool that searches the PubMed API."""
 
 
-    name = "PubMed"
-    description = (
+    name: str = "PubMed"
+    description: str = (
         "A wrapper around PubMed.org "
         "A wrapper around PubMed.org "
         "Useful for when you need to answer questions about Physics, Mathematics, "
         "Useful for when you need to answer questions about Physics, Mathematics, "
         "Computer Science, Quantitative Biology, Quantitative Finance, Statistics, "
         "Computer Science, Quantitative Biology, Quantitative Finance, Statistics, "

+ 1 - 1
api/core/tools/provider/builtin/qrcode/tools/qrcode_generator.py

@@ -12,7 +12,7 @@ from core.tools.tool.builtin_tool import BuiltinTool
 
 
 
 
 class QRCodeGeneratorTool(BuiltinTool):
 class QRCodeGeneratorTool(BuiltinTool):
-    error_correction_levels = {
+    error_correction_levels: dict[str, int] = {
         'L': ERROR_CORRECT_L,  # <=7%
         'L': ERROR_CORRECT_L,  # <=7%
         'M': ERROR_CORRECT_M,  # <=15%
         'M': ERROR_CORRECT_M,  # <=15%
         'Q': ERROR_CORRECT_Q,  # <=25%
         'Q': ERROR_CORRECT_Q,  # <=25%

+ 3 - 3
api/core/tools/provider/builtin/searxng/tools/searxng_search.py

@@ -24,21 +24,21 @@ class SearXNGSearchTool(BuiltinTool):
     Tool for performing a search using SearXNG engine.
     Tool for performing a search using SearXNG engine.
     """
     """
 
 
-    SEARCH_TYPE = {
+    SEARCH_TYPE: dict[str, str] = {
         "page": "general",
         "page": "general",
         "news": "news",
         "news": "news",
         "image": "images",
         "image": "images",
         # "video": "videos",
         # "video": "videos",
         # "file": "files"
         # "file": "files"
     }
     }
-    LINK_FILED = {
+    LINK_FILED: dict[str, str] = {
         "page": "url",
         "page": "url",
         "news": "url",
         "news": "url",
         "image": "img_src",
         "image": "img_src",
         # "video": "iframe_src",
         # "video": "iframe_src",
         # "file": "magnetlink"
         # "file": "magnetlink"
     }
     }
-    TEXT_FILED = {
+    TEXT_FILED: dict[str, str] = {
         "page": "content",
         "page": "content",
         "news": "content",
         "news": "content",
         "image": "img_src",
         "image": "img_src",

+ 1 - 1
api/core/tools/provider/builtin/stability/tools/text2image.py

@@ -11,7 +11,7 @@ class StableDiffusionTool(BuiltinTool, BaseStabilityAuthorization):
     """
     """
     This class is responsible for providing the stable diffusion tool.
     This class is responsible for providing the stable diffusion tool.
     """
     """
-    model_endpoint_map = {
+    model_endpoint_map: dict[str, str] = {
         'sd3': 'https://api.stability.ai/v2beta/stable-image/generate/sd3',
         'sd3': 'https://api.stability.ai/v2beta/stable-image/generate/sd3',
         'sd3-turbo': 'https://api.stability.ai/v2beta/stable-image/generate/sd3',
         'sd3-turbo': 'https://api.stability.ai/v2beta/stable-image/generate/sd3',
         'core': 'https://api.stability.ai/v2beta/stable-image/generate/core',
         'core': 'https://api.stability.ai/v2beta/stable-image/generate/core',

+ 2 - 2
api/core/tools/provider/builtin/stackexchange/tools/searchStackExQuestions.yaml

@@ -98,11 +98,11 @@ parameters:
     options:
     options:
       - value: true
       - value: true
         label:
         label:
-          en_US: Yes
+          en_US: 'Yes'
           zh_Hans: 是
           zh_Hans: 是
       - value: false
       - value: false
         label:
         label:
-          en_US: No
+          en_US: 'No'
           zh_Hans: 否
           zh_Hans: 否
     default: true
     default: true
   - name: pagesize
   - name: pagesize

+ 12 - 12
api/core/tools/provider/builtin/tavily/tools/tavily_search.yaml

@@ -64,14 +64,14 @@ parameters:
     options:
     options:
       - value: true
       - value: true
         label:
         label:
-          en_US: Yes
+          en_US: 'Yes'
           zh_Hans: 是
           zh_Hans: 是
-          pt_BR: Yes
+          pt_BR: 'Yes'
       - value: false
       - value: false
         label:
         label:
-          en_US: No
+          en_US: 'No'
           zh_Hans: 否
           zh_Hans: 否
-          pt_BR: No
+          pt_BR: 'No'
     default: false
     default: false
   - name: include_answer
   - name: include_answer
     type: boolean
     type: boolean
@@ -88,14 +88,14 @@ parameters:
     options:
     options:
       - value: true
       - value: true
         label:
         label:
-          en_US: Yes
+          en_US: 'Yes'
           zh_Hans: 是
           zh_Hans: 是
-          pt_BR: Yes
+          pt_BR: 'Yes'
       - value: false
       - value: false
         label:
         label:
-          en_US: No
+          en_US: 'No'
           zh_Hans: 否
           zh_Hans: 否
-          pt_BR: No
+          pt_BR: 'No'
     default: false
     default: false
   - name: include_raw_content
   - name: include_raw_content
     type: boolean
     type: boolean
@@ -112,14 +112,14 @@ parameters:
     options:
     options:
       - value: true
       - value: true
         label:
         label:
-          en_US: Yes
+          en_US: 'Yes'
           zh_Hans: 是
           zh_Hans: 是
-          pt_BR: Yes
+          pt_BR: 'Yes'
       - value: false
       - value: false
         label:
         label:
-          en_US: No
+          en_US: 'No'
           zh_Hans: 否
           zh_Hans: 否
-          pt_BR: No
+          pt_BR: 'No'
     default: false
     default: false
   - name: max_results
   - name: max_results
     type: number
     type: number

+ 4 - 3
api/core/tools/provider/builtin/twilio/tools/send_message.py

@@ -1,6 +1,6 @@
 from typing import Any, Optional, Union
 from typing import Any, Optional, Union
 
 
-from pydantic import BaseModel, validator
+from pydantic import BaseModel, field_validator
 
 
 from core.tools.entities.tool_entities import ToolInvokeMessage
 from core.tools.entities.tool_entities import ToolInvokeMessage
 from core.tools.tool.builtin_tool import BuiltinTool
 from core.tools.tool.builtin_tool import BuiltinTool
@@ -15,7 +15,7 @@ class TwilioAPIWrapper(BaseModel):
     named parameters to the constructor.
     named parameters to the constructor.
     """
     """
 
 
-    client: Any  #: :meta private:
+    client: Any = None  #: :meta private:
     account_sid: Optional[str] = None
     account_sid: Optional[str] = None
     """Twilio account string identifier."""
     """Twilio account string identifier."""
     auth_token: Optional[str] = None
     auth_token: Optional[str] = None
@@ -32,7 +32,8 @@ class TwilioAPIWrapper(BaseModel):
         must be empty.
         must be empty.
     """
     """
 
 
-    @validator("client", pre=True, always=True)
+    @classmethod
+    @field_validator('client', mode='before')
     def set_validator(cls, values: dict) -> dict:
     def set_validator(cls, values: dict) -> dict:
         """Validate that api key and python package exists in environment."""
         """Validate that api key and python package exists in environment."""
         try:
         try:

+ 2 - 2
api/core/tools/provider/builtin/webscraper/tools/webscraper.yaml

@@ -51,10 +51,10 @@ parameters:
     options:
     options:
       - value: true
       - value: true
         label:
         label:
-          en_US: Yes
+          en_US: 'Yes'
           zh_Hans: 是
           zh_Hans: 是
       - value: false
       - value: false
         label:
         label:
-          en_US: No
+          en_US: 'No'
           zh_Hans: 否
           zh_Hans: 否
     default: false
     default: false

+ 3 - 3
api/core/tools/tool/api_tool.py

@@ -32,10 +32,10 @@ class ApiTool(Tool):
             :return: the new tool
             :return: the new tool
         """
         """
         return self.__class__(
         return self.__class__(
-            identity=self.identity.copy() if self.identity else None,
+            identity=self.identity.model_copy() if self.identity else None,
             parameters=self.parameters.copy() if self.parameters else None,
             parameters=self.parameters.copy() if self.parameters else None,
-            description=self.description.copy() if self.description else None,
-            api_bundle=self.api_bundle.copy() if self.api_bundle else None,
+            description=self.description.model_copy() if self.description else None,
+            api_bundle=self.api_bundle.model_copy() if self.api_bundle else None,
             runtime=Tool.Runtime(**runtime)
             runtime=Tool.Runtime(**runtime)
         )
         )
     
     

+ 2 - 4
api/core/tools/tool/dataset_retriever/dataset_retriever_base_tool.py

@@ -2,7 +2,7 @@ from abc import abstractmethod
 from typing import Any, Optional
 from typing import Any, Optional
 
 
 from msal_extensions.persistence import ABC
 from msal_extensions.persistence import ABC
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
 
 
 from core.callback_handler.index_tool_callback_handler import DatasetIndexToolCallbackHandler
 from core.callback_handler.index_tool_callback_handler import DatasetIndexToolCallbackHandler
 
 
@@ -17,9 +17,7 @@ class DatasetRetrieverBaseTool(BaseModel, ABC):
     hit_callbacks: list[DatasetIndexToolCallbackHandler] = []
     hit_callbacks: list[DatasetIndexToolCallbackHandler] = []
     return_resource: bool
     return_resource: bool
     retriever_from: str
     retriever_from: str
-
-    class Config:
-        arbitrary_types_allowed = True
+    model_config = ConfigDict(arbitrary_types_allowed=True)
 
 
     @abstractmethod
     @abstractmethod
     def _run(
     def _run(

+ 10 - 5
api/core/tools/tool/tool.py

@@ -3,7 +3,8 @@ from copy import deepcopy
 from enum import Enum
 from enum import Enum
 from typing import Any, Optional, Union
 from typing import Any, Optional, Union
 
 
-from pydantic import BaseModel, validator
+from pydantic import BaseModel, ConfigDict, field_validator
+from pydantic_core.core_schema import ValidationInfo
 
 
 from core.app.entities.app_invoke_entities import InvokeFrom
 from core.app.entities.app_invoke_entities import InvokeFrom
 from core.file.file_obj import FileVar
 from core.file.file_obj import FileVar
@@ -28,8 +29,12 @@ class Tool(BaseModel, ABC):
     description: ToolDescription = None
     description: ToolDescription = None
     is_team_authorization: bool = False
     is_team_authorization: bool = False
 
 
-    @validator('parameters', pre=True, always=True)
-    def set_parameters(cls, v, values):
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
+
+    @classmethod
+    @field_validator('parameters', mode='before')
+    def set_parameters(cls, v, validation_info: ValidationInfo) -> list[ToolParameter]:
         return v or []
         return v or []
 
 
     class Runtime(BaseModel):
     class Runtime(BaseModel):
@@ -65,9 +70,9 @@ class Tool(BaseModel, ABC):
             :return: the new tool
             :return: the new tool
         """
         """
         return self.__class__(
         return self.__class__(
-            identity=self.identity.copy() if self.identity else None,
+            identity=self.identity.model_copy() if self.identity else None,
             parameters=self.parameters.copy() if self.parameters else None,
             parameters=self.parameters.copy() if self.parameters else None,
-            description=self.description.copy() if self.description else None,
+            description=self.description.model_copy() if self.description else None,
             runtime=Tool.Runtime(**runtime),
             runtime=Tool.Runtime(**runtime),
         )
         )
     
     

+ 1 - 1
api/core/workflow/nodes/code/entities.py

@@ -14,7 +14,7 @@ class CodeNodeData(BaseNodeData):
     """
     """
     class Output(BaseModel):
     class Output(BaseModel):
         type: Literal['string', 'number', 'object', 'array[string]', 'array[number]', 'array[object]']
         type: Literal['string', 'number', 'object', 'array[string]', 'array[number]', 'array[object]']
-        children: Optional[dict[str, 'Output']]
+        children: Optional[dict[str, 'Output']] = None
 
 
     variables: list[VariableSelector]
     variables: list[VariableSelector]
     code_language: Literal[CodeLanguage.PYTHON3, CodeLanguage.JAVASCRIPT]
     code_language: Literal[CodeLanguage.PYTHON3, CodeLanguage.JAVASCRIPT]

+ 9 - 6
api/core/workflow/nodes/http_request/entities.py

@@ -1,7 +1,7 @@
 import os
 import os
 from typing import Literal, Optional, Union
 from typing import Literal, Optional, Union
 
 
-from pydantic import BaseModel, validator
+from pydantic import BaseModel, field_validator
 
 
 from core.workflow.entities.base_node_data_entities import BaseNodeData
 from core.workflow.entities.base_node_data_entities import BaseNodeData
 
 
@@ -14,15 +14,18 @@ class HttpRequestNodeData(BaseNodeData):
     Code Node Data.
     Code Node Data.
     """
     """
     class Authorization(BaseModel):
     class Authorization(BaseModel):
+        # TODO[pydantic]: The `Config` class inherits from another class, please create the `model_config` manually.
+        # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
         class Config(BaseModel):
         class Config(BaseModel):
             type: Literal[None, 'basic', 'bearer', 'custom']
             type: Literal[None, 'basic', 'bearer', 'custom']
-            api_key: Union[None, str]
-            header: Union[None, str]
+            api_key: Union[None, str] = None
+            header: Union[None, str] = None
 
 
         type: Literal['no-auth', 'api-key']
         type: Literal['no-auth', 'api-key']
         config: Optional[Config]
         config: Optional[Config]
 
 
-        @validator('config', always=True, pre=True)
+        @classmethod
+        @field_validator('config', mode='before')
         def check_config(cls, v, values):
         def check_config(cls, v, values):
             """
             """
             Check config, if type is no-auth, config should be None, otherwise it should be a dict.
             Check config, if type is no-auth, config should be None, otherwise it should be a dict.
@@ -37,7 +40,7 @@ class HttpRequestNodeData(BaseNodeData):
 
 
     class Body(BaseModel):
     class Body(BaseModel):
         type: Literal['none', 'form-data', 'x-www-form-urlencoded', 'raw-text', 'json']
         type: Literal['none', 'form-data', 'x-www-form-urlencoded', 'raw-text', 'json']
-        data: Union[None, str]
+        data: Union[None, str] = None
 
 
     class Timeout(BaseModel):
     class Timeout(BaseModel):
         connect: Optional[int] = MAX_CONNECT_TIMEOUT
         connect: Optional[int] = MAX_CONNECT_TIMEOUT
@@ -50,5 +53,5 @@ class HttpRequestNodeData(BaseNodeData):
     headers: str
     headers: str
     params: str
     params: str
     body: Optional[Body]
     body: Optional[Body]
-    timeout: Optional[Timeout]
+    timeout: Optional[Timeout] = None
     mask_authorization_header: Optional[bool] = True
     mask_authorization_header: Optional[bool] = True

+ 1 - 1
api/core/workflow/nodes/http_request/http_request_node.py

@@ -39,7 +39,7 @@ class HttpRequestNode(BaseNode):
                     "type": "none"
                     "type": "none"
                 },
                 },
                 "timeout": {
                 "timeout": {
-                    **HTTP_REQUEST_DEFAULT_TIMEOUT.dict(),
+                    **HTTP_REQUEST_DEFAULT_TIMEOUT.model_dump(),
                     "max_connect_timeout": MAX_CONNECT_TIMEOUT,
                     "max_connect_timeout": MAX_CONNECT_TIMEOUT,
                     "max_read_timeout": MAX_READ_TIMEOUT,
                     "max_read_timeout": MAX_READ_TIMEOUT,
                     "max_write_timeout": MAX_WRITE_TIMEOUT,
                     "max_write_timeout": MAX_WRITE_TIMEOUT,

+ 1 - 1
api/core/workflow/nodes/iteration/entities.py

@@ -7,7 +7,7 @@ class IterationNodeData(BaseIterationNodeData):
     """
     """
     Iteration Node Data.
     Iteration Node Data.
     """
     """
-    parent_loop_id: Optional[str] # redundant field, not used currently
+    parent_loop_id: Optional[str] = None # redundant field, not used currently
     iterator_selector: list[str] # variable selector
     iterator_selector: list[str] # variable selector
     output_selector: list[str] # output selector
     output_selector: list[str] # output selector
 
 

+ 3 - 3
api/core/workflow/nodes/knowledge_retrieval/entities.py

@@ -18,7 +18,7 @@ class MultipleRetrievalConfig(BaseModel):
     Multiple Retrieval Config.
     Multiple Retrieval Config.
     """
     """
     top_k: int
     top_k: int
-    score_threshold: Optional[float]
+    score_threshold: Optional[float] = None
     reranking_model: RerankingModelConfig
     reranking_model: RerankingModelConfig
 
 
 
 
@@ -47,5 +47,5 @@ class KnowledgeRetrievalNodeData(BaseNodeData):
     query_variable_selector: list[str]
     query_variable_selector: list[str]
     dataset_ids: list[str]
     dataset_ids: list[str]
     retrieval_mode: Literal['single', 'multiple']
     retrieval_mode: Literal['single', 'multiple']
-    multiple_retrieval_config: Optional[MultipleRetrievalConfig]
-    single_retrieval_config: Optional[SingleRetrievalConfig]
+    multiple_retrieval_config: Optional[MultipleRetrievalConfig] = None
+    single_retrieval_config: Optional[SingleRetrievalConfig] = None

+ 10 - 8
api/core/workflow/nodes/parameter_extractor/entities.py

@@ -1,6 +1,6 @@
 from typing import Any, Literal, Optional
 from typing import Any, Literal, Optional
 
 
-from pydantic import BaseModel, validator
+from pydantic import BaseModel, field_validator
 
 
 from core.prompt.entities.advanced_prompt_entities import MemoryConfig
 from core.prompt.entities.advanced_prompt_entities import MemoryConfig
 from core.workflow.entities.base_node_data_entities import BaseNodeData
 from core.workflow.entities.base_node_data_entities import BaseNodeData
@@ -21,12 +21,13 @@ class ParameterConfig(BaseModel):
     """
     """
     name: str
     name: str
     type: Literal['string', 'number', 'bool', 'select', 'array[string]', 'array[number]', 'array[object]']
     type: Literal['string', 'number', 'bool', 'select', 'array[string]', 'array[number]', 'array[object]']
-    options: Optional[list[str]]
+    options: Optional[list[str]] = None
     description: str
     description: str
     required: bool
     required: bool
 
 
-    @validator('name', pre=True, always=True)
-    def validate_name(cls, value):
+    @classmethod
+    @field_validator('name', mode='before')
+    def validate_name(cls, value) -> str:
         if not value:
         if not value:
             raise ValueError('Parameter name is required')
             raise ValueError('Parameter name is required')
         if value in ['__reason', '__is_success']:
         if value in ['__reason', '__is_success']:
@@ -40,12 +41,13 @@ class ParameterExtractorNodeData(BaseNodeData):
     model: ModelConfig
     model: ModelConfig
     query: list[str]
     query: list[str]
     parameters: list[ParameterConfig]
     parameters: list[ParameterConfig]
-    instruction: Optional[str]
-    memory: Optional[MemoryConfig]
+    instruction: Optional[str] = None
+    memory: Optional[MemoryConfig] = None
     reasoning_mode: Literal['function_call', 'prompt']
     reasoning_mode: Literal['function_call', 'prompt']
 
 
-    @validator('reasoning_mode', pre=True, always=True)
-    def set_reasoning_mode(cls, v):
+    @classmethod
+    @field_validator('reasoning_mode', mode='before')
+    def set_reasoning_mode(cls, v) -> str:
         return v or 'function_call'
         return v or 'function_call'
 
 
     def get_parameter_json_schema(self) -> dict:
     def get_parameter_json_schema(self) -> dict:

+ 2 - 2
api/core/workflow/nodes/question_classifier/entities.py

@@ -32,5 +32,5 @@ class QuestionClassifierNodeData(BaseNodeData):
     type: str = 'question-classifier'
     type: str = 'question-classifier'
     model: ModelConfig
     model: ModelConfig
     classes: list[ClassConfig]
     classes: list[ClassConfig]
-    instruction: Optional[str]
-    memory: Optional[MemoryConfig]
+    instruction: Optional[str] = None
+    memory: Optional[MemoryConfig] = None

+ 12 - 9
api/core/workflow/nodes/tool/entities.py

@@ -1,6 +1,7 @@
 from typing import Any, Literal, Union
 from typing import Any, Literal, Union
 
 
-from pydantic import BaseModel, validator
+from pydantic import BaseModel, field_validator
+from pydantic_core.core_schema import ValidationInfo
 
 
 from core.workflow.entities.base_node_data_entities import BaseNodeData
 from core.workflow.entities.base_node_data_entities import BaseNodeData
 
 
@@ -13,13 +14,14 @@ class ToolEntity(BaseModel):
     tool_label: str # redundancy
     tool_label: str # redundancy
     tool_configurations: dict[str, Any]
     tool_configurations: dict[str, Any]
 
 
-    @validator('tool_configurations', pre=True, always=True)
-    def validate_tool_configurations(cls, value, values):
+    @classmethod
+    @field_validator('tool_configurations', mode='before')
+    def validate_tool_configurations(cls, value, values: ValidationInfo) -> dict[str, Any]:
         if not isinstance(value, dict):
         if not isinstance(value, dict):
             raise ValueError('tool_configurations must be a dictionary')
             raise ValueError('tool_configurations must be a dictionary')
         
         
-        for key in values.get('tool_configurations', {}).keys():
-            value = values.get('tool_configurations', {}).get(key)
+        for key in values.data.get('tool_configurations', {}).keys():
+            value = values.data.get('tool_configurations', {}).get(key)
             if not isinstance(value, str | int | float | bool):
             if not isinstance(value, str | int | float | bool):
                 raise ValueError(f'{key} must be a string')
                 raise ValueError(f'{key} must be a string')
             
             
@@ -30,10 +32,11 @@ class ToolNodeData(BaseNodeData, ToolEntity):
         value: Union[Any, list[str]]
         value: Union[Any, list[str]]
         type: Literal['mixed', 'variable', 'constant']
         type: Literal['mixed', 'variable', 'constant']
 
 
-        @validator('type', pre=True, always=True)
-        def check_type(cls, value, values):
+        @classmethod
+        @field_validator('type', mode='before')
+        def check_type(cls, value, validation_info: ValidationInfo):
             typ = value
             typ = value
-            value = values.get('value')
+            value = validation_info.data.get('value')
             if typ == 'mixed' and not isinstance(value, str):
             if typ == 'mixed' and not isinstance(value, str):
                 raise ValueError('value must be a string')
                 raise ValueError('value must be a string')
             elif typ == 'variable':
             elif typ == 'variable':
@@ -45,7 +48,7 @@ class ToolNodeData(BaseNodeData, ToolEntity):
             elif typ == 'constant' and not isinstance(value, str | int | float | bool):
             elif typ == 'constant' and not isinstance(value, str | int | float | bool):
                 raise ValueError('value must be a string, int, float, or bool')
                 raise ValueError('value must be a string, int, float, or bool')
             return typ
             return typ
-        
+
     """
     """
     Tool Node Schema
     Tool Node Schema
     """
     """

+ 1 - 1
api/core/workflow/nodes/variable_aggregator/entities.py

@@ -30,4 +30,4 @@ class VariableAssignerNodeData(BaseNodeData):
     type: str = 'variable-assigner'
     type: str = 'variable-assigner'
     output_type: str
     output_type: str
     variables: list[list[str]]
     variables: list[list[str]]
-    advanced_settings: Optional[AdvancedSettings]
+    advanced_settings: Optional[AdvancedSettings] = None

+ 1 - 1
api/core/workflow/workflow_engine_manager.py

@@ -592,7 +592,7 @@ class WorkflowEngineManager:
                         node_data=current_iteration_node.node_data,
                         node_data=current_iteration_node.node_data,
                         inputs=workflow_run_state.current_iteration_state.inputs,
                         inputs=workflow_run_state.current_iteration_state.inputs,
                         predecessor_node_id=predecessor_node_id,
                         predecessor_node_id=predecessor_node_id,
-                        metadata=workflow_run_state.current_iteration_state.metadata.dict()
+                        metadata=workflow_run_state.current_iteration_state.metadata.model_dump()
                     )
                     )
 
 
         # add steps
         # add steps

+ 1 - 1
api/events/event_handlers/deduct_quota_when_messaeg_created.py

@@ -13,7 +13,7 @@ def handle(sender, **kwargs):
     if not isinstance(application_generate_entity, ChatAppGenerateEntity | AgentChatAppGenerateEntity):
     if not isinstance(application_generate_entity, ChatAppGenerateEntity | AgentChatAppGenerateEntity):
         return
         return
 
 
-    model_config = application_generate_entity.model_config
+    model_config = application_generate_entity.model_conf
     provider_model_bundle = model_config.provider_model_bundle
     provider_model_bundle = model_config.provider_model_bundle
     provider_configuration = provider_model_bundle.configuration
     provider_configuration = provider_model_bundle.configuration
 
 

+ 1 - 1
api/events/event_handlers/update_provider_last_used_at_when_messaeg_created.py

@@ -16,6 +16,6 @@ def handle(sender, **kwargs):
 
 
     db.session.query(Provider).filter(
     db.session.query(Provider).filter(
         Provider.tenant_id == application_generate_entity.app_config.tenant_id,
         Provider.tenant_id == application_generate_entity.app_config.tenant_id,
-        Provider.provider_name == application_generate_entity.model_config.provider
+        Provider.provider_name == application_generate_entity.model_conf.provider
     ).update({'last_used': datetime.now(timezone.utc).replace(tzinfo=None)})
     ).update({'last_used': datetime.now(timezone.utc).replace(tzinfo=None)})
     db.session.commit()
     db.session.commit()

+ 195 - 48
api/poetry.lock

@@ -199,6 +199,17 @@ files = [
 [package.extras]
 [package.extras]
 dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"]
 dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"]
 
 
+[[package]]
+name = "annotated-types"
+version = "0.7.0"
+description = "Reusable constraint types to use with typing.Annotated"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
+    {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
+]
+
 [[package]]
 [[package]]
 name = "anthropic"
 name = "anthropic"
 version = "0.23.1"
 version = "0.23.1"
@@ -787,6 +798,23 @@ typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "
 uv = ["uv (>=0.1.18)"]
 uv = ["uv (>=0.1.18)"]
 virtualenv = ["virtualenv (>=20.0.35)"]
 virtualenv = ["virtualenv (>=20.0.35)"]
 
 
+[[package]]
+name = "bump-pydantic"
+version = "0.8.0"
+description = "Convert Pydantic from V1 to V2 ♻"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "bump_pydantic-0.8.0-py3-none-any.whl", hash = "sha256:6cbb4deb5869a69baa5a477f28f3e2d8fb09b687e114c018bd54470590ae7bf7"},
+    {file = "bump_pydantic-0.8.0.tar.gz", hash = "sha256:6092e61930e85619e74eeb04131b4387feda16f02d8bb2e3cf9507fa492c69e9"},
+]
+
+[package.dependencies]
+libcst = ">=0.4.2"
+rich = "*"
+typer = ">=0.7.0"
+typing-extensions = "*"
+
 [[package]]
 [[package]]
 name = "cachetools"
 name = "cachetools"
 version = "5.3.3"
 version = "5.3.3"
@@ -3856,6 +3884,46 @@ files = [
 [package.dependencies]
 [package.dependencies]
 six = "*"
 six = "*"
 
 
+[[package]]
+name = "libcst"
+version = "1.4.0"
+description = "A concrete syntax tree with AST-like properties for Python 3.0 through 3.12 programs."
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "libcst-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:279b54568ea1f25add50ea4ba3d76d4f5835500c82f24d54daae4c5095b986aa"},
+    {file = "libcst-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3401dae41fe24565387a65baee3887e31a44e3e58066b0250bc3f3ccf85b1b5a"},
+    {file = "libcst-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1989fa12d3cd79118ebd29ebe2a6976d23d509b1a4226bc3d66fcb7cb50bd5d"},
+    {file = "libcst-1.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:addc6d585141a7677591868886f6bda0577529401a59d210aa8112114340e129"},
+    {file = "libcst-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17d71001cb25e94cfe8c3d997095741a8c4aa7a6d234c0f972bc42818c88dfaf"},
+    {file = "libcst-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:2d47de16d105e7dd5f4e01a428d9f4dc1e71efd74f79766daf54528ce37f23c3"},
+    {file = "libcst-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e6227562fc5c9c1efd15dfe90b0971ae254461b8b6b23c1b617139b6003de1c1"},
+    {file = "libcst-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3399e6c95df89921511b44d8c5bf6a75bcbc2d51f1f6429763609ba005c10f6b"},
+    {file = "libcst-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48601e3e590e2d6a7ab8c019cf3937c70511a78d778ab3333764531253acdb33"},
+    {file = "libcst-1.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f42797309bb725f0f000510d5463175ccd7155395f09b5e7723971b0007a976d"},
+    {file = "libcst-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb4e42ea107a37bff7f9fdbee9532d39f9ea77b89caa5c5112b37057b12e0838"},
+    {file = "libcst-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:9d0cc3c5a2a51fa7e1d579a828c0a2e46b2170024fd8b1a0691c8a52f3abb2d9"},
+    {file = "libcst-1.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7ece51d935bc9bf60b528473d2e5cc67cbb88e2f8146297e40ee2c7d80be6f13"},
+    {file = "libcst-1.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:81653dea1cdfa4c6520a7c5ffb95fa4d220cbd242e446c7a06d42d8636bfcbba"},
+    {file = "libcst-1.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6abce0e66bba2babfadc20530fd3688f672d565674336595b4623cd800b91ef"},
+    {file = "libcst-1.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5da9d7dc83801aba3b8d911f82dc1a375db0d508318bad79d9fb245374afe068"},
+    {file = "libcst-1.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c54aa66c86d8ece9c93156a2cf5ca512b0dce40142fe9e072c86af2bf892411"},
+    {file = "libcst-1.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:62e2682ee1567b6a89c91853865372bf34f178bfd237853d84df2b87b446e654"},
+    {file = "libcst-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b8ecdba8934632b4dadacb666cd3816627a6ead831b806336972ccc4ba7ca0e9"},
+    {file = "libcst-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8e54c777b8d27339b70f304d16fc8bc8674ef1bd34ed05ea874bf4921eb5a313"},
+    {file = "libcst-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:061d6855ef30efe38b8a292b7e5d57c8e820e71fc9ec9846678b60a934b53bbb"},
+    {file = "libcst-1.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb0abf627ee14903d05d0ad9b2c6865f1b21eb4081e2c7bea1033f85db2b8bae"},
+    {file = "libcst-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d024f44059a853b4b852cfc04fec33e346659d851371e46fc8e7c19de24d3da9"},
+    {file = "libcst-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:3c6a8faab9da48c5b371557d0999b4ca51f4f2cbd37ee8c2c4df0ac01c781465"},
+    {file = "libcst-1.4.0.tar.gz", hash = "sha256:449e0b16604f054fa7f27c3ffe86ea7ef6c409836fe68fe4e752a1894175db00"},
+]
+
+[package.dependencies]
+pyyaml = ">=5.2"
+
+[package.extras]
+dev = ["Sphinx (>=5.1.1)", "black (==23.12.1)", "build (>=0.10.0)", "coverage (>=4.5.4)", "fixit (==2.1.0)", "flake8 (==7.0.0)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.1.4)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<1.6)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.18)", "setuptools-rust (>=1.5.2)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==2.6.0)", "usort (==1.0.8.post1)"]
+
 [[package]]
 [[package]]
 name = "llvmlite"
 name = "llvmlite"
 version = "0.42.0"
 version = "0.42.0"
@@ -5666,55 +5734,134 @@ files = [
 
 
 [[package]]
 [[package]]
 name = "pydantic"
 name = "pydantic"
-version = "1.10.16"
-description = "Data validation and settings management using python type hints"
+version = "2.7.4"
+description = "Data validation using Python type hints"
 optional = false
 optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
 files = [
 files = [
-    {file = "pydantic-1.10.16-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1a539ac40551b01a85e899829aa43ca8036707474af8d74b48be288d4d2d2846"},
-    {file = "pydantic-1.10.16-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a4fcc7b0b8038dbda2dda642cff024032dfae24a7960cc58e57a39eb1949b9b"},
-    {file = "pydantic-1.10.16-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4660dd697de1ae2d4305a85161312611f64d5360663a9ba026cd6ad9e3fe14c3"},
-    {file = "pydantic-1.10.16-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:900a787c574f903a97d0bf52a43ff3b6cf4fa0119674bcfc0e5fd1056d388ad9"},
-    {file = "pydantic-1.10.16-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d30192a63e6d3334c3f0c0506dd6ae9f1dce7b2f8845518915291393a5707a22"},
-    {file = "pydantic-1.10.16-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:16cf23ed599ca5ca937e37ba50ab114e6b5c387eb43a6cc533701605ad1be611"},
-    {file = "pydantic-1.10.16-cp310-cp310-win_amd64.whl", hash = "sha256:8d23111f41d1e19334edd51438fd57933f3eee7d9d2fa8cc3f5eda515a272055"},
-    {file = "pydantic-1.10.16-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef287b8d7fc0e86a8bd1f902c61aff6ba9479c50563242fe88ba39692e98e1e0"},
-    {file = "pydantic-1.10.16-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b9ded699bfd3b3912d796ff388b0c607e6d35d41053d37aaf8fd6082c660de9a"},
-    {file = "pydantic-1.10.16-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:daeb199814333e4426c5e86d7fb610f4e230289f28cab90eb4de27330bef93cf"},
-    {file = "pydantic-1.10.16-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5973843f1fa99ec6c3ac8d1a8698ac9340b35e45cca6c3e5beb5c3bd1ef15de6"},
-    {file = "pydantic-1.10.16-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6b8a7788a8528a558828fe4a48783cafdcf2612d13c491594a8161dc721629c"},
-    {file = "pydantic-1.10.16-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8abaecf54dacc9d991dda93c3b880d41092a8924cde94eeb811d7d9ab55df7d8"},
-    {file = "pydantic-1.10.16-cp311-cp311-win_amd64.whl", hash = "sha256:ddc7b682fbd23f051edc419dc6977e11dd2dbdd0cef9d05f0e15d1387862d230"},
-    {file = "pydantic-1.10.16-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:067c2b5539f7839653ad8c3d1fc2f1343338da8677b7b2172abf3cd3fdc8f719"},
-    {file = "pydantic-1.10.16-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d1fc943583c046ecad0ff5d6281ee571b64e11b5503d9595febdce54f38b290"},
-    {file = "pydantic-1.10.16-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18548b30ccebe71d380b0886cc44ea5d80afbcc155e3518792f13677ad06097d"},
-    {file = "pydantic-1.10.16-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4e92292f9580fc5ea517618580fac24e9f6dc5657196e977c194a8e50e14f5a9"},
-    {file = "pydantic-1.10.16-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5da8bc4bb4f85b8c97cc7f11141fddbbd29eb25e843672e5807e19cc3d7c1b7f"},
-    {file = "pydantic-1.10.16-cp37-cp37m-win_amd64.whl", hash = "sha256:a04ee1ea34172b87707a6ecfcdb120d7656892206b7c4dbdb771a73e90179fcb"},
-    {file = "pydantic-1.10.16-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4fa86469fd46e732242c7acb83282d33f83591a7e06f840481327d5bf6d96112"},
-    {file = "pydantic-1.10.16-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:89c2783dc261726fe7a5ce1121bce29a2f7eb9b1e704c68df2b117604e3b346f"},
-    {file = "pydantic-1.10.16-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78e59fa919fa7a192f423d190d8660c35dd444efa9216662273f36826765424b"},
-    {file = "pydantic-1.10.16-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7e82a80068c77f4b074032e031e642530b6d45cb8121fc7c99faa31fb6c6b72"},
-    {file = "pydantic-1.10.16-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d82d5956cee27a30e26a5b88d00a6a2a15a4855e13c9baf50175976de0dc282c"},
-    {file = "pydantic-1.10.16-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b7b99424cc0970ff08deccb549b5a6ec1040c0b449eab91723e64df2bd8fdca"},
-    {file = "pydantic-1.10.16-cp38-cp38-win_amd64.whl", hash = "sha256:d97a35e1ba59442775201657171f601a2879e63517a55862a51f8d67cdfc0017"},
-    {file = "pydantic-1.10.16-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9d91f6866fd3e303c632207813ef6bc4d86055e21c5e5a0a311983a9ac5f0192"},
-    {file = "pydantic-1.10.16-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8d3c71d14c8bd26d2350c081908dbf59d5a6a8f9596d9ef2b09cc1e61c8662b"},
-    {file = "pydantic-1.10.16-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b73e6386b439b4881d79244e9fc1e32d1e31e8d784673f5d58a000550c94a6c0"},
-    {file = "pydantic-1.10.16-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f039881fb2ef86f6de6eacce6e71701b47500355738367413ccc1550b2a69cf"},
-    {file = "pydantic-1.10.16-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3895ddb26f22bdddee7e49741486aa7b389258c6f6771943e87fc00eabd79134"},
-    {file = "pydantic-1.10.16-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55b945da2756b5cef93d792521ad0d457fdf2f69fd5a2d10a27513f5281717dd"},
-    {file = "pydantic-1.10.16-cp39-cp39-win_amd64.whl", hash = "sha256:22dd265c77c3976a34be78409b128cb84629284dfd1b69d2fa1507a36f84dc8b"},
-    {file = "pydantic-1.10.16-py3-none-any.whl", hash = "sha256:aa2774ba5412fd1c5cb890d08e8b0a3bb5765898913ba1f61a65a4810f03cf29"},
-    {file = "pydantic-1.10.16.tar.gz", hash = "sha256:8bb388f6244809af69ee384900b10b677a69f1980fdc655ea419710cffcb5610"},
-]
-
-[package.dependencies]
-typing-extensions = ">=4.2.0"
-
-[package.extras]
-dotenv = ["python-dotenv (>=0.10.4)"]
-email = ["email-validator (>=1.0.3)"]
+    {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"},
+    {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"},
+]
+
+[package.dependencies]
+annotated-types = ">=0.4.0"
+pydantic-core = "2.18.4"
+typing-extensions = ">=4.6.1"
+
+[package.extras]
+email = ["email-validator (>=2.0.0)"]
+
+[[package]]
+name = "pydantic-core"
+version = "2.18.4"
+description = "Core functionality for Pydantic validation and serialization"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"},
+    {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"},
+    {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"},
+    {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"},
+    {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"},
+    {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"},
+    {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"},
+    {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"},
+    {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"},
+    {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"},
+    {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"},
+    {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"},
+    {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"},
+    {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"},
+    {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"},
+    {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"},
+    {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"},
+    {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"},
+    {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"},
+    {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"},
+    {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"},
+    {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"},
+    {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"},
+    {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"},
+    {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"},
+    {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"},
+    {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"},
+    {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"},
+    {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"},
+    {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"},
+    {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"},
+    {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"},
+    {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"},
+    {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"},
+    {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"},
+    {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"},
+    {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"},
+    {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"},
+    {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"},
+    {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"},
+    {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"},
+    {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"},
+    {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"},
+    {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"},
+    {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"},
+    {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"},
+    {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"},
+    {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"},
+    {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"},
+    {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"},
+    {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"},
+    {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"},
+    {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"},
+    {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"},
+    {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"},
+    {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"},
+    {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"},
+    {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"},
+    {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"},
+    {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"},
+    {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"},
+    {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"},
+    {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"},
+    {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"},
+    {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"},
+    {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"},
+    {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"},
+    {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"},
+    {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"},
+    {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"},
+    {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"},
+    {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"},
+    {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"},
+    {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"},
+    {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"},
+    {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"},
+    {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"},
+    {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"},
+    {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"},
+]
+
+[package.dependencies]
+typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
+
+[[package]]
+name = "pydantic-extra-types"
+version = "2.8.1"
+description = "Extra Pydantic types."
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "pydantic_extra_types-2.8.1-py3-none-any.whl", hash = "sha256:ca3fce71ee46bc1043bdf3d0e3c149a09ab162cb305c4ed8c501a5034a592dd6"},
+    {file = "pydantic_extra_types-2.8.1.tar.gz", hash = "sha256:c7cabe403234658207dcefed3489f2e8bfc8f4a8e305e7ab25ee29eceed65b39"},
+]
+
+[package.dependencies]
+pydantic = ">=2.5.2"
+
+[package.extras]
+all = ["pendulum (>=3.0.0,<4.0.0)", "phonenumbers (>=8,<9)", "pycountry (>=23)", "python-ulid (>=1,<2)", "python-ulid (>=1,<3)"]
+pendulum = ["pendulum (>=3.0.0,<4.0.0)"]
+phonenumbers = ["phonenumbers (>=8,<9)"]
+pycountry = ["pycountry (>=23)"]
+python-ulid = ["python-ulid (>=1,<2)", "python-ulid (>=1,<3)"]
 
 
 [[package]]
 [[package]]
 name = "pydub"
 name = "pydub"
@@ -8788,4 +8935,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
 [metadata]
 [metadata]
 lock-version = "2.0"
 lock-version = "2.0"
 python-versions = "^3.10"
 python-versions = "^3.10"
-content-hash = "509e7821d929dd859378b5b45285469dc6d6b358b2175df30188492fb8cea485"
+content-hash = "6845b0f3a5b5be84d32a9a79f23d389d1502cc70f5530becb313fa8b2268448f"

+ 3 - 1
api/pyproject.toml

@@ -167,7 +167,9 @@ azure-storage-blob = "12.13.0"
 azure-identity = "1.15.0"
 azure-identity = "1.15.0"
 lxml = "5.1.0"
 lxml = "5.1.0"
 xlrd = "~2.0.1"
 xlrd = "~2.0.1"
-pydantic = "~1.10.0"
+pydantic = "~2.7.3"
+pydantic_extra_types = "~2.8.0"
+bump-pydantic = "~0.8.0"
 pgvecto-rs = "0.1.4"
 pgvecto-rs = "0.1.4"
 firecrawl-py = "0.0.5"
 firecrawl-py = "0.0.5"
 oss2 = "2.18.5"
 oss2 = "2.18.5"

+ 3 - 1
api/requirements.txt

@@ -75,7 +75,9 @@ qrcode~=7.4.2
 azure-storage-blob==12.13.0
 azure-storage-blob==12.13.0
 azure-identity==1.15.0
 azure-identity==1.15.0
 lxml==5.1.0
 lxml==5.1.0
-pydantic~=1.10.0
+pydantic~=2.7.3
+pydantic_extra_types~=2.8.0
+bump-pydantic~=0.8.0
 pgvecto-rs==0.1.4
 pgvecto-rs==0.1.4
 firecrawl-py==0.0.5
 firecrawl-py==0.0.5
 oss2==2.18.5
 oss2==2.18.5

+ 8 - 2
api/services/entities/model_provider_entities.py

@@ -2,7 +2,7 @@ from enum import Enum
 from typing import Optional
 from typing import Optional
 
 
 from flask import current_app
 from flask import current_app
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
 
 
 from core.entities.model_entities import ModelWithProviderEntity, ProviderModelWithStatusEntity
 from core.entities.model_entities import ModelWithProviderEntity, ProviderModelWithStatusEntity
 from core.entities.provider_entities import QuotaConfiguration
 from core.entities.provider_entities import QuotaConfiguration
@@ -61,6 +61,9 @@ class ProviderResponse(BaseModel):
     custom_configuration: CustomConfigurationResponse
     custom_configuration: CustomConfigurationResponse
     system_configuration: SystemConfigurationResponse
     system_configuration: SystemConfigurationResponse
 
 
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
+
     def __init__(self, **data) -> None:
     def __init__(self, **data) -> None:
         super().__init__(**data)
         super().__init__(**data)
 
 
@@ -139,6 +142,9 @@ class DefaultModelResponse(BaseModel):
     model_type: ModelType
     model_type: ModelType
     provider: SimpleProviderEntityResponse
     provider: SimpleProviderEntityResponse
 
 
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
+
 
 
 class ModelWithProviderEntityResponse(ModelWithProviderEntity):
 class ModelWithProviderEntityResponse(ModelWithProviderEntity):
     """
     """
@@ -147,4 +153,4 @@ class ModelWithProviderEntityResponse(ModelWithProviderEntity):
     provider: SimpleProviderEntityResponse
     provider: SimpleProviderEntityResponse
 
 
     def __init__(self, model: ModelWithProviderEntity) -> None:
     def __init__(self, model: ModelWithProviderEntity) -> None:
-        super().__init__(**model.dict())
+        super().__init__(**model.model_dump())

+ 4 - 1
api/services/feature_service.py

@@ -1,5 +1,5 @@
 from flask import current_app
 from flask import current_app
-from pydantic import BaseModel
+from pydantic import BaseModel, ConfigDict
 
 
 from services.billing_service import BillingService
 from services.billing_service import BillingService
 from services.enterprise.enterprise_service import EnterpriseService
 from services.enterprise.enterprise_service import EnterpriseService
@@ -31,6 +31,9 @@ class FeatureModel(BaseModel):
     can_replace_logo: bool = False
     can_replace_logo: bool = False
     model_load_balancing_enabled: bool = False
     model_load_balancing_enabled: bool = False
 
 
+    # pydantic configs
+    model_config = ConfigDict(protected_namespaces=())
+
 
 
 class SystemFeatureModel(BaseModel):
 class SystemFeatureModel(BaseModel):
     sso_enforced_for_signin: bool = False
     sso_enforced_for_signin: bool = False

+ 1 - 1
api/tests/unit_tests/core/rag/datasource/vdb/milvus/test_milvus.py

@@ -17,7 +17,7 @@ def test_default_value():
         del config[key]
         del config[key]
         with pytest.raises(ValidationError) as e:
         with pytest.raises(ValidationError) as e:
             MilvusConfig(**config)
             MilvusConfig(**config)
-        assert e.value.errors()[1]['msg'] == f'config MILVUS_{key.upper()} is required'
+        assert e.value.errors()[0]['msg'] == f'Value error, config MILVUS_{key.upper()} is required'
 
 
     config = MilvusConfig(**valid_config)
     config = MilvusConfig(**valid_config)
     assert config.secure is False
     assert config.secure is False