瀏覽代碼

chore: bump minimum supported Python version to 3.11 (#10386)

Bowen Liang 5 月之前
父節點
當前提交
6c8e208ef3
共有 81 個文件被更改,包括 271 次插入300 次删除
  1. 1 1
      .github/actions/setup-poetry/action.yml
  2. 0 1
      .github/workflows/api-tests.yml
  3. 0 1
      .github/workflows/vdb-tests.yml
  4. 5 3
      api/app.py
  5. 2 2
      api/controllers/console/app/conversation.py
  6. 3 3
      api/controllers/console/app/site.py
  7. 1 1
      api/controllers/console/auth/activate.py
  8. 2 2
      api/controllers/console/auth/oauth.py
  9. 2 2
      api/controllers/console/datasets/data_source.py
  10. 9 9
      api/controllers/console/datasets/datasets_document.py
  11. 2 2
      api/controllers/console/datasets/datasets_segments.py
  12. 3 3
      api/controllers/console/explore/completion.py
  13. 2 2
      api/controllers/console/explore/installed_app.py
  14. 2 2
      api/controllers/console/workspace/account.py
  15. 2 2
      api/controllers/service_api/wraps.py
  16. 2 2
      api/core/agent/base_agent_runner.py
  17. 2 2
      api/core/app/app_config/entities.py
  18. 2 2
      api/core/app/apps/message_based_app_generator.py
  19. 2 2
      api/core/app/entities/queue_entities.py
  20. 8 8
      api/core/app/task_pipeline/workflow_cycle_manage.py
  21. 6 6
      api/core/entities/provider_configuration.py
  22. 6 6
      api/core/file/enums.py
  23. 2 2
      api/core/helper/code_executor/code_executor.py
  24. 14 14
      api/core/indexing_runner.py
  25. 3 3
      api/core/model_runtime/entities/message_entities.py
  26. 2 2
      api/core/model_runtime/entities/model_entities.py
  27. 2 2
      api/core/ops/entities/trace_entity.py
  28. 3 3
      api/core/ops/langfuse_trace/entities/langfuse_trace_entity.py
  29. 2 2
      api/core/ops/langsmith_trace/entities/langsmith_trace_entity.py
  30. 1 1
      api/core/prompt/simple_prompt_transform.py
  31. 2 2
      api/core/rag/datasource/keyword/keyword_type.py
  32. 2 2
      api/core/rag/datasource/vdb/vector_type.py
  33. 2 2
      api/core/rag/extractor/word_extractor.py
  34. 2 2
      api/core/rag/rerank/rerank_type.py
  35. 2 2
      api/core/tools/entities/tool_entities.py
  36. 2 2
      api/core/tools/provider/builtin/time/tools/current_time.py
  37. 2 2
      api/core/tools/tool/tool.py
  38. 3 3
      api/core/tools/tool_engine.py
  39. 2 2
      api/core/variables/types.py
  40. 2 2
      api/core/workflow/entities/node_entities.py
  41. 2 2
      api/core/workflow/enums.py
  42. 3 3
      api/core/workflow/graph_engine/entities/runtime_route_state.py
  43. 2 2
      api/core/workflow/nodes/enums.py
  44. 2 2
      api/core/workflow/nodes/iteration/entities.py
  45. 6 6
      api/core/workflow/nodes/iteration/iteration_node.py
  46. 2 2
      api/core/workflow/nodes/variable_assigner/node_data.py
  47. 1 1
      api/events/event_handlers/create_document_index.py
  48. 2 2
      api/events/event_handlers/update_provider_last_used_at_when_message_created.py
  49. 2 2
      api/extensions/storage/azure_blob_storage.py
  50. 2 2
      api/extensions/storage/storage_type.py
  51. 3 3
      api/libs/oauth_data_source.py
  52. 3 3
      api/models/account.py
  53. 1 1
      api/models/dataset.py
  54. 4 4
      api/models/enums.py
  55. 2 2
      api/models/model.py
  56. 4 4
      api/models/task.py
  57. 2 2
      api/models/workflow.py
  58. 46 75
      api/poetry.lock
  59. 2 2
      api/pyproject.toml
  60. 9 9
      api/services/account_service.py
  61. 1 1
      api/services/annotation_service.py
  62. 3 3
      api/services/app_dsl_service.py
  63. 6 6
      api/services/app_service.py
  64. 2 2
      api/services/auth/auth_type.py
  65. 2 2
      api/services/conversation_service.py
  66. 13 13
      api/services/dataset_service.py
  67. 2 2
      api/services/external_knowledge_service.py
  68. 2 2
      api/services/feature_service.py
  69. 3 3
      api/services/file_service.py
  70. 1 1
      api/services/model_load_balancing_service.py
  71. 2 2
      api/services/recommend_app/recommend_app_type.py
  72. 5 5
      api/services/workflow_service.py
  73. 1 1
      api/tasks/add_document_to_index_task.py
  74. 1 1
      api/tasks/annotation/enable_annotation_reply_task.py
  75. 2 2
      api/tasks/batch_create_segment_to_index_task.py
  76. 3 3
      api/tasks/create_segment_to_index_task.py
  77. 1 1
      api/tasks/document_indexing_sync_task.py
  78. 2 2
      api/tasks/document_indexing_task.py
  79. 1 1
      api/tasks/document_indexing_update_task.py
  80. 1 1
      api/tasks/enable_segment_to_index_task.py
  81. 3 3
      api/tests/unit_tests/core/workflow/nodes/answer/test_answer_stream_processor.py

+ 1 - 1
.github/actions/setup-poetry/action.yml

@@ -4,7 +4,7 @@ inputs:
   python-version:
     description: Python version to use and the Poetry installed with
     required: true
-    default: '3.10'
+    default: '3.11'
   poetry-version:
     description: Poetry version to set up
     required: true

+ 0 - 1
.github/workflows/api-tests.yml

@@ -20,7 +20,6 @@ jobs:
     strategy:
       matrix:
         python-version:
-          - "3.10"
           - "3.11"
           - "3.12"
 

+ 0 - 1
.github/workflows/vdb-tests.yml

@@ -20,7 +20,6 @@ jobs:
     strategy:
       matrix:
         python-version:
-          - "3.10"
           - "3.11"
           - "3.12"
 

+ 5 - 3
api/app.py

@@ -1,6 +1,11 @@
 import os
 import sys
 
+python_version = sys.version_info
+if not ((3, 11) <= python_version < (3, 13)):
+    print(f"Python 3.11 or 3.12 is required, current version is {python_version.major}.{python_version.minor}")
+    raise SystemExit(1)
+
 from configs import dify_config
 
 if not dify_config.DEBUG:
@@ -30,9 +35,6 @@ from models import account, dataset, model, source, task, tool, tools, web  # no
 
 # DO NOT REMOVE ABOVE
 
-if sys.version_info[:2] == (3, 10):
-    print("Warning: Python 3.10 will not be supported in the next version.")
-
 
 warnings.simplefilter("ignore", ResourceWarning)
 

+ 2 - 2
api/controllers/console/app/conversation.py

@@ -1,4 +1,4 @@
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 
 import pytz
 from flask_login import current_user
@@ -314,7 +314,7 @@ def _get_conversation(app_model, conversation_id):
         raise NotFound("Conversation Not Exists.")
 
     if not conversation.read_at:
-        conversation.read_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        conversation.read_at = datetime.now(UTC).replace(tzinfo=None)
         conversation.read_account_id = current_user.id
         db.session.commit()
 

+ 3 - 3
api/controllers/console/app/site.py

@@ -1,4 +1,4 @@
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 
 from flask_login import current_user
 from flask_restful import Resource, marshal_with, reqparse
@@ -75,7 +75,7 @@ class AppSite(Resource):
                 setattr(site, attr_name, value)
 
         site.updated_by = current_user.id
-        site.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        site.updated_at = datetime.now(UTC).replace(tzinfo=None)
         db.session.commit()
 
         return site
@@ -99,7 +99,7 @@ class AppSiteAccessTokenReset(Resource):
 
         site.code = Site.generate_code(16)
         site.updated_by = current_user.id
-        site.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        site.updated_at = datetime.now(UTC).replace(tzinfo=None)
         db.session.commit()
 
         return site

+ 1 - 1
api/controllers/console/auth/activate.py

@@ -65,7 +65,7 @@ class ActivateApi(Resource):
         account.timezone = args["timezone"]
         account.interface_theme = "light"
         account.status = AccountStatus.ACTIVE.value
-        account.initialized_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+        account.initialized_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
         db.session.commit()
 
         token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))

+ 2 - 2
api/controllers/console/auth/oauth.py

@@ -1,5 +1,5 @@
 import logging
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 from typing import Optional
 
 import requests
@@ -106,7 +106,7 @@ class OAuthCallback(Resource):
 
         if account.status == AccountStatus.PENDING.value:
             account.status = AccountStatus.ACTIVE.value
-            account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
             db.session.commit()
 
         try:

+ 2 - 2
api/controllers/console/datasets/data_source.py

@@ -83,7 +83,7 @@ class DataSourceApi(Resource):
         if action == "enable":
             if data_source_binding.disabled:
                 data_source_binding.disabled = False
-                data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+                data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
                 db.session.add(data_source_binding)
                 db.session.commit()
             else:
@@ -92,7 +92,7 @@ class DataSourceApi(Resource):
         if action == "disable":
             if not data_source_binding.disabled:
                 data_source_binding.disabled = True
-                data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+                data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
                 db.session.add(data_source_binding)
                 db.session.commit()
             else:

+ 9 - 9
api/controllers/console/datasets/datasets_document.py

@@ -1,6 +1,6 @@
 import logging
 from argparse import ArgumentTypeError
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 
 from flask import request
 from flask_login import current_user
@@ -665,7 +665,7 @@ class DocumentProcessingApi(DocumentResource):
                 raise InvalidActionError("Document not in indexing state.")
 
             document.paused_by = current_user.id
-            document.paused_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            document.paused_at = datetime.now(UTC).replace(tzinfo=None)
             document.is_paused = True
             db.session.commit()
 
@@ -745,7 +745,7 @@ class DocumentMetadataApi(DocumentResource):
                     document.doc_metadata[key] = value
 
         document.doc_type = doc_type
-        document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        document.updated_at = datetime.now(UTC).replace(tzinfo=None)
         db.session.commit()
 
         return {"result": "success", "message": "Document metadata updated."}, 200
@@ -787,7 +787,7 @@ class DocumentStatusApi(DocumentResource):
             document.enabled = True
             document.disabled_at = None
             document.disabled_by = None
-            document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            document.updated_at = datetime.now(UTC).replace(tzinfo=None)
             db.session.commit()
 
             # Set cache to prevent indexing the same document multiple times
@@ -804,9 +804,9 @@ class DocumentStatusApi(DocumentResource):
                 raise InvalidActionError("Document already disabled.")
 
             document.enabled = False
-            document.disabled_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            document.disabled_at = datetime.now(UTC).replace(tzinfo=None)
             document.disabled_by = current_user.id
-            document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            document.updated_at = datetime.now(UTC).replace(tzinfo=None)
             db.session.commit()
 
             # Set cache to prevent indexing the same document multiple times
@@ -821,9 +821,9 @@ class DocumentStatusApi(DocumentResource):
                 raise InvalidActionError("Document already archived.")
 
             document.archived = True
-            document.archived_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            document.archived_at = datetime.now(UTC).replace(tzinfo=None)
             document.archived_by = current_user.id
-            document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            document.updated_at = datetime.now(UTC).replace(tzinfo=None)
             db.session.commit()
 
             if document.enabled:
@@ -840,7 +840,7 @@ class DocumentStatusApi(DocumentResource):
             document.archived = False
             document.archived_at = None
             document.archived_by = None
-            document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            document.updated_at = datetime.now(UTC).replace(tzinfo=None)
             db.session.commit()
 
             # Set cache to prevent indexing the same document multiple times

+ 2 - 2
api/controllers/console/datasets/datasets_segments.py

@@ -1,5 +1,5 @@
 import uuid
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 
 import pandas as pd
 from flask import request
@@ -188,7 +188,7 @@ class DatasetDocumentSegmentApi(Resource):
                 raise InvalidActionError("Segment is already disabled.")
 
             segment.enabled = False
-            segment.disabled_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            segment.disabled_at = datetime.now(UTC).replace(tzinfo=None)
             segment.disabled_by = current_user.id
             db.session.commit()
 

+ 3 - 3
api/controllers/console/explore/completion.py

@@ -1,5 +1,5 @@
 import logging
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 
 from flask_login import current_user
 from flask_restful import reqparse
@@ -46,7 +46,7 @@ class CompletionApi(InstalledAppResource):
         streaming = args["response_mode"] == "streaming"
         args["auto_generate_name"] = False
 
-        installed_app.last_used_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        installed_app.last_used_at = datetime.now(UTC).replace(tzinfo=None)
         db.session.commit()
 
         try:
@@ -106,7 +106,7 @@ class ChatApi(InstalledAppResource):
 
         args["auto_generate_name"] = False
 
-        installed_app.last_used_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        installed_app.last_used_at = datetime.now(UTC).replace(tzinfo=None)
         db.session.commit()
 
         try:

+ 2 - 2
api/controllers/console/explore/installed_app.py

@@ -1,4 +1,4 @@
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 
 from flask_login import current_user
 from flask_restful import Resource, inputs, marshal_with, reqparse
@@ -81,7 +81,7 @@ class InstalledAppsListApi(Resource):
                 tenant_id=current_tenant_id,
                 app_owner_tenant_id=app.tenant_id,
                 is_pinned=False,
-                last_used_at=datetime.now(timezone.utc).replace(tzinfo=None),
+                last_used_at=datetime.now(UTC).replace(tzinfo=None),
             )
             db.session.add(new_installed_app)
             db.session.commit()

+ 2 - 2
api/controllers/console/workspace/account.py

@@ -60,7 +60,7 @@ class AccountInitApi(Resource):
                 raise InvalidInvitationCodeError()
 
             invitation_code.status = "used"
-            invitation_code.used_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            invitation_code.used_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             invitation_code.used_by_tenant_id = account.current_tenant_id
             invitation_code.used_by_account_id = account.id
 
@@ -68,7 +68,7 @@ class AccountInitApi(Resource):
         account.timezone = args["timezone"]
         account.interface_theme = "light"
         account.status = "active"
-        account.initialized_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+        account.initialized_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
         db.session.commit()
 
         return {"result": "success"}

+ 2 - 2
api/controllers/service_api/wraps.py

@@ -1,5 +1,5 @@
 from collections.abc import Callable
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 from enum import Enum
 from functools import wraps
 from typing import Optional
@@ -198,7 +198,7 @@ def validate_and_get_api_token(scope=None):
     if not api_token:
         raise Unauthorized("Access token is invalid")
 
-    api_token.last_used_at = datetime.now(timezone.utc).replace(tzinfo=None)
+    api_token.last_used_at = datetime.now(UTC).replace(tzinfo=None)
     db.session.commit()
 
     return api_token

+ 2 - 2
api/core/agent/base_agent_runner.py

@@ -2,7 +2,7 @@ import json
 import logging
 import uuid
 from collections.abc import Mapping, Sequence
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 from typing import Optional, Union, cast
 
 from core.agent.entities import AgentEntity, AgentToolEntity
@@ -412,7 +412,7 @@ class BaseAgentRunner(AppRunner):
             .first()
         )
 
-        db_variables.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        db_variables.updated_at = datetime.now(UTC).replace(tzinfo=None)
         db_variables.variables_str = json.dumps(jsonable_encoder(tool_variables.pool))
         db.session.commit()
         db.session.close()

+ 2 - 2
api/core/app/app_config/entities.py

@@ -1,5 +1,5 @@
 from collections.abc import Sequence
-from enum import Enum
+from enum import Enum, StrEnum
 from typing import Any, Optional
 
 from pydantic import BaseModel, Field, field_validator
@@ -88,7 +88,7 @@ class PromptTemplateEntity(BaseModel):
     advanced_completion_prompt_template: Optional[AdvancedCompletionPromptTemplateEntity] = None
 
 
-class VariableEntityType(str, Enum):
+class VariableEntityType(StrEnum):
     TEXT_INPUT = "text-input"
     SELECT = "select"
     PARAGRAPH = "paragraph"

+ 2 - 2
api/core/app/apps/message_based_app_generator.py

@@ -1,7 +1,7 @@
 import json
 import logging
 from collections.abc import Generator
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 from typing import Optional, Union
 
 from sqlalchemy import and_
@@ -200,7 +200,7 @@ class MessageBasedAppGenerator(BaseAppGenerator):
             db.session.commit()
             db.session.refresh(conversation)
         else:
-            conversation.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            conversation.updated_at = datetime.now(UTC).replace(tzinfo=None)
             db.session.commit()
 
         message = Message(

+ 2 - 2
api/core/app/entities/queue_entities.py

@@ -1,5 +1,5 @@
 from datetime import datetime
-from enum import Enum
+from enum import Enum, StrEnum
 from typing import Any, Optional
 
 from pydantic import BaseModel, field_validator
@@ -11,7 +11,7 @@ from core.workflow.nodes import NodeType
 from core.workflow.nodes.base import BaseNodeData
 
 
-class QueueEvent(str, Enum):
+class QueueEvent(StrEnum):
     """
     QueueEvent enum
     """

+ 8 - 8
api/core/app/task_pipeline/workflow_cycle_manage.py

@@ -1,7 +1,7 @@
 import json
 import time
 from collections.abc import Mapping, Sequence
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 from typing import Any, Optional, Union, cast
 
 from sqlalchemy.orm import Session
@@ -144,7 +144,7 @@ class WorkflowCycleManage:
         workflow_run.elapsed_time = time.perf_counter() - start_at
         workflow_run.total_tokens = total_tokens
         workflow_run.total_steps = total_steps
-        workflow_run.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None)
 
         db.session.commit()
         db.session.refresh(workflow_run)
@@ -191,7 +191,7 @@ class WorkflowCycleManage:
         workflow_run.elapsed_time = time.perf_counter() - start_at
         workflow_run.total_tokens = total_tokens
         workflow_run.total_steps = total_steps
-        workflow_run.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None)
 
         db.session.commit()
 
@@ -211,7 +211,7 @@ class WorkflowCycleManage:
         for workflow_node_execution in running_workflow_node_executions:
             workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value
             workflow_node_execution.error = error
-            workflow_node_execution.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            workflow_node_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
             workflow_node_execution.elapsed_time = (
                 workflow_node_execution.finished_at - workflow_node_execution.created_at
             ).total_seconds()
@@ -262,7 +262,7 @@ class WorkflowCycleManage:
                     NodeRunMetadataKey.ITERATION_ID: event.in_iteration_id,
                 }
             )
-            workflow_node_execution.created_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            workflow_node_execution.created_at = datetime.now(UTC).replace(tzinfo=None)
 
             session.add(workflow_node_execution)
             session.commit()
@@ -285,7 +285,7 @@ class WorkflowCycleManage:
         execution_metadata = (
             json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None
         )
-        finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        finished_at = datetime.now(UTC).replace(tzinfo=None)
         elapsed_time = (finished_at - event.start_at).total_seconds()
 
         db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution.id).update(
@@ -329,7 +329,7 @@ class WorkflowCycleManage:
         inputs = WorkflowEntry.handle_special_values(event.inputs)
         process_data = WorkflowEntry.handle_special_values(event.process_data)
         outputs = WorkflowEntry.handle_special_values(event.outputs)
-        finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        finished_at = datetime.now(UTC).replace(tzinfo=None)
         elapsed_time = (finished_at - event.start_at).total_seconds()
         execution_metadata = (
             json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None
@@ -657,7 +657,7 @@ class WorkflowCycleManage:
                 if event.error is None
                 else WorkflowNodeExecutionStatus.FAILED,
                 error=None,
-                elapsed_time=(datetime.now(timezone.utc).replace(tzinfo=None) - event.start_at).total_seconds(),
+                elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(),
                 total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0,
                 execution_metadata=event.metadata,
                 finished_at=int(time.time()),

+ 6 - 6
api/core/entities/provider_configuration.py

@@ -240,7 +240,7 @@ class ProviderConfiguration(BaseModel):
         if provider_record:
             provider_record.encrypted_config = json.dumps(credentials)
             provider_record.is_valid = True
-            provider_record.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            provider_record.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             db.session.commit()
         else:
             provider_record = Provider(
@@ -394,7 +394,7 @@ class ProviderConfiguration(BaseModel):
         if provider_model_record:
             provider_model_record.encrypted_config = json.dumps(credentials)
             provider_model_record.is_valid = True
-            provider_model_record.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            provider_model_record.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             db.session.commit()
         else:
             provider_model_record = ProviderModel(
@@ -468,7 +468,7 @@ class ProviderConfiguration(BaseModel):
 
         if model_setting:
             model_setting.enabled = True
-            model_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            model_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             db.session.commit()
         else:
             model_setting = ProviderModelSetting(
@@ -503,7 +503,7 @@ class ProviderConfiguration(BaseModel):
 
         if model_setting:
             model_setting.enabled = False
-            model_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            model_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             db.session.commit()
         else:
             model_setting = ProviderModelSetting(
@@ -570,7 +570,7 @@ class ProviderConfiguration(BaseModel):
 
         if model_setting:
             model_setting.load_balancing_enabled = True
-            model_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            model_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             db.session.commit()
         else:
             model_setting = ProviderModelSetting(
@@ -605,7 +605,7 @@ class ProviderConfiguration(BaseModel):
 
         if model_setting:
             model_setting.load_balancing_enabled = False
-            model_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            model_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             db.session.commit()
         else:
             model_setting = ProviderModelSetting(

+ 6 - 6
api/core/file/enums.py

@@ -1,7 +1,7 @@
-from enum import Enum
+from enum import StrEnum
 
 
-class FileType(str, Enum):
+class FileType(StrEnum):
     IMAGE = "image"
     DOCUMENT = "document"
     AUDIO = "audio"
@@ -16,7 +16,7 @@ class FileType(str, Enum):
         raise ValueError(f"No matching enum found for value '{value}'")
 
 
-class FileTransferMethod(str, Enum):
+class FileTransferMethod(StrEnum):
     REMOTE_URL = "remote_url"
     LOCAL_FILE = "local_file"
     TOOL_FILE = "tool_file"
@@ -29,7 +29,7 @@ class FileTransferMethod(str, Enum):
         raise ValueError(f"No matching enum found for value '{value}'")
 
 
-class FileBelongsTo(str, Enum):
+class FileBelongsTo(StrEnum):
     USER = "user"
     ASSISTANT = "assistant"
 
@@ -41,7 +41,7 @@ class FileBelongsTo(str, Enum):
         raise ValueError(f"No matching enum found for value '{value}'")
 
 
-class FileAttribute(str, Enum):
+class FileAttribute(StrEnum):
     TYPE = "type"
     SIZE = "size"
     NAME = "name"
@@ -51,5 +51,5 @@ class FileAttribute(str, Enum):
     EXTENSION = "extension"
 
 
-class ArrayFileAttribute(str, Enum):
+class ArrayFileAttribute(StrEnum):
     LENGTH = "length"

+ 2 - 2
api/core/helper/code_executor/code_executor.py

@@ -1,6 +1,6 @@
 import logging
 from collections.abc import Mapping
-from enum import Enum
+from enum import StrEnum
 from threading import Lock
 from typing import Any, Optional
 
@@ -31,7 +31,7 @@ class CodeExecutionResponse(BaseModel):
     data: Data
 
 
-class CodeLanguage(str, Enum):
+class CodeLanguage(StrEnum):
     PYTHON3 = "python3"
     JINJA2 = "jinja2"
     JAVASCRIPT = "javascript"

+ 14 - 14
api/core/indexing_runner.py

@@ -86,7 +86,7 @@ class IndexingRunner:
             except ProviderTokenNotInitError as e:
                 dataset_document.indexing_status = "error"
                 dataset_document.error = str(e.description)
-                dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+                dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
                 db.session.commit()
             except ObjectDeletedError:
                 logging.warning("Document deleted, document id: {}".format(dataset_document.id))
@@ -94,7 +94,7 @@ class IndexingRunner:
                 logging.exception("consume document failed")
                 dataset_document.indexing_status = "error"
                 dataset_document.error = str(e)
-                dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+                dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
                 db.session.commit()
 
     def run_in_splitting_status(self, dataset_document: DatasetDocument):
@@ -142,13 +142,13 @@ class IndexingRunner:
         except ProviderTokenNotInitError as e:
             dataset_document.indexing_status = "error"
             dataset_document.error = str(e.description)
-            dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             db.session.commit()
         except Exception as e:
             logging.exception("consume document failed")
             dataset_document.indexing_status = "error"
             dataset_document.error = str(e)
-            dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             db.session.commit()
 
     def run_in_indexing_status(self, dataset_document: DatasetDocument):
@@ -200,13 +200,13 @@ class IndexingRunner:
         except ProviderTokenNotInitError as e:
             dataset_document.indexing_status = "error"
             dataset_document.error = str(e.description)
-            dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             db.session.commit()
         except Exception as e:
             logging.exception("consume document failed")
             dataset_document.indexing_status = "error"
             dataset_document.error = str(e)
-            dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             db.session.commit()
 
     def indexing_estimate(
@@ -372,7 +372,7 @@ class IndexingRunner:
             after_indexing_status="splitting",
             extra_update_params={
                 DatasetDocument.word_count: sum(len(text_doc.page_content) for text_doc in text_docs),
-                DatasetDocument.parsing_completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+                DatasetDocument.parsing_completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
             },
         )
 
@@ -464,7 +464,7 @@ class IndexingRunner:
         doc_store.add_documents(documents)
 
         # update document status to indexing
-        cur_time = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+        cur_time = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
         self._update_document_index_status(
             document_id=dataset_document.id,
             after_indexing_status="indexing",
@@ -479,7 +479,7 @@ class IndexingRunner:
             dataset_document_id=dataset_document.id,
             update_params={
                 DocumentSegment.status: "indexing",
-                DocumentSegment.indexing_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+                DocumentSegment.indexing_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
             },
         )
 
@@ -680,7 +680,7 @@ class IndexingRunner:
             after_indexing_status="completed",
             extra_update_params={
                 DatasetDocument.tokens: tokens,
-                DatasetDocument.completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+                DatasetDocument.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
                 DatasetDocument.indexing_latency: indexing_end_at - indexing_start_at,
                 DatasetDocument.error: None,
             },
@@ -705,7 +705,7 @@ class IndexingRunner:
                     {
                         DocumentSegment.status: "completed",
                         DocumentSegment.enabled: True,
-                        DocumentSegment.completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+                        DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
                     }
                 )
 
@@ -738,7 +738,7 @@ class IndexingRunner:
                 {
                     DocumentSegment.status: "completed",
                     DocumentSegment.enabled: True,
-                    DocumentSegment.completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+                    DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
                 }
             )
 
@@ -849,7 +849,7 @@ class IndexingRunner:
         doc_store.add_documents(documents)
 
         # update document status to indexing
-        cur_time = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+        cur_time = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
         self._update_document_index_status(
             document_id=dataset_document.id,
             after_indexing_status="indexing",
@@ -864,7 +864,7 @@ class IndexingRunner:
             dataset_document_id=dataset_document.id,
             update_params={
                 DocumentSegment.status: "indexing",
-                DocumentSegment.indexing_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+                DocumentSegment.indexing_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
             },
         )
         pass

+ 3 - 3
api/core/model_runtime/entities/message_entities.py

@@ -1,6 +1,6 @@
 from abc import ABC
 from collections.abc import Sequence
-from enum import Enum
+from enum import Enum, StrEnum
 from typing import Literal, Optional
 
 from pydantic import BaseModel, Field, field_validator
@@ -49,7 +49,7 @@ class PromptMessageFunction(BaseModel):
     function: PromptMessageTool
 
 
-class PromptMessageContentType(str, Enum):
+class PromptMessageContentType(StrEnum):
     """
     Enum class for prompt message content type.
     """
@@ -95,7 +95,7 @@ class ImagePromptMessageContent(PromptMessageContent):
     Model class for image prompt message content.
     """
 
-    class DETAIL(str, Enum):
+    class DETAIL(StrEnum):
         LOW = "low"
         HIGH = "high"
 

+ 2 - 2
api/core/model_runtime/entities/model_entities.py

@@ -1,5 +1,5 @@
 from decimal import Decimal
-from enum import Enum
+from enum import Enum, StrEnum
 from typing import Any, Optional
 
 from pydantic import BaseModel, ConfigDict
@@ -92,7 +92,7 @@ class ModelFeature(Enum):
     AUDIO = "audio"
 
 
-class DefaultParameterName(str, Enum):
+class DefaultParameterName(StrEnum):
     """
     Enum class for parameter template variable.
     """

+ 2 - 2
api/core/ops/entities/trace_entity.py

@@ -1,5 +1,5 @@
 from datetime import datetime
-from enum import Enum
+from enum import StrEnum
 from typing import Any, Optional, Union
 
 from pydantic import BaseModel, ConfigDict, field_validator
@@ -122,7 +122,7 @@ trace_info_info_map = {
 }
 
 
-class TraceTaskName(str, Enum):
+class TraceTaskName(StrEnum):
     CONVERSATION_TRACE = "conversation"
     WORKFLOW_TRACE = "workflow"
     MESSAGE_TRACE = "message"

+ 3 - 3
api/core/ops/langfuse_trace/entities/langfuse_trace_entity.py

@@ -1,5 +1,5 @@
 from datetime import datetime
-from enum import Enum
+from enum import StrEnum
 from typing import Any, Optional, Union
 
 from pydantic import BaseModel, ConfigDict, Field, field_validator
@@ -39,7 +39,7 @@ def validate_input_output(v, field_name):
     return v
 
 
-class LevelEnum(str, Enum):
+class LevelEnum(StrEnum):
     DEBUG = "DEBUG"
     WARNING = "WARNING"
     ERROR = "ERROR"
@@ -178,7 +178,7 @@ class LangfuseSpan(BaseModel):
         return validate_input_output(v, field_name)
 
 
-class UnitEnum(str, Enum):
+class UnitEnum(StrEnum):
     CHARACTERS = "CHARACTERS"
     TOKENS = "TOKENS"
     SECONDS = "SECONDS"

+ 2 - 2
api/core/ops/langsmith_trace/entities/langsmith_trace_entity.py

@@ -1,5 +1,5 @@
 from datetime import datetime
-from enum import Enum
+from enum import StrEnum
 from typing import Any, Optional, Union
 
 from pydantic import BaseModel, Field, field_validator
@@ -8,7 +8,7 @@ from pydantic_core.core_schema import ValidationInfo
 from core.ops.utils import replace_text_with_content
 
 
-class LangSmithRunType(str, Enum):
+class LangSmithRunType(StrEnum):
     tool = "tool"
     chain = "chain"
     llm = "llm"

+ 1 - 1
api/core/prompt/simple_prompt_transform.py

@@ -23,7 +23,7 @@ if TYPE_CHECKING:
     from core.file.models import File
 
 
-class ModelMode(str, enum.Enum):
+class ModelMode(enum.StrEnum):
     COMPLETION = "completion"
     CHAT = "chat"
 

+ 2 - 2
api/core/rag/datasource/keyword/keyword_type.py

@@ -1,5 +1,5 @@
-from enum import Enum
+from enum import StrEnum
 
 
-class KeyWordType(str, Enum):
+class KeyWordType(StrEnum):
     JIEBA = "jieba"

+ 2 - 2
api/core/rag/datasource/vdb/vector_type.py

@@ -1,7 +1,7 @@
-from enum import Enum
+from enum import StrEnum
 
 
-class VectorType(str, Enum):
+class VectorType(StrEnum):
     ANALYTICDB = "analyticdb"
     CHROMA = "chroma"
     MILVUS = "milvus"

+ 2 - 2
api/core/rag/extractor/word_extractor.py

@@ -114,10 +114,10 @@ class WordExtractor(BaseExtractor):
                     mime_type=mime_type or "",
                     created_by=self.user_id,
                     created_by_role=CreatedByRole.ACCOUNT,
-                    created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+                    created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
                     used=True,
                     used_by=self.user_id,
-                    used_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+                    used_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
                 )
 
                 db.session.add(upload_file)

+ 2 - 2
api/core/rag/rerank/rerank_type.py

@@ -1,6 +1,6 @@
-from enum import Enum
+from enum import StrEnum
 
 
-class RerankMode(str, Enum):
+class RerankMode(StrEnum):
     RERANKING_MODEL = "reranking_model"
     WEIGHTED_SCORE = "weighted_score"

+ 2 - 2
api/core/tools/entities/tool_entities.py

@@ -1,4 +1,4 @@
-from enum import Enum
+from enum import Enum, StrEnum
 from typing import Any, Optional, Union, cast
 
 from pydantic import BaseModel, Field, field_validator
@@ -137,7 +137,7 @@ class ToolParameterOption(BaseModel):
 
 
 class ToolParameter(BaseModel):
-    class ToolParameterType(str, Enum):
+    class ToolParameterType(StrEnum):
         STRING = "string"
         NUMBER = "number"
         BOOLEAN = "boolean"

+ 2 - 2
api/core/tools/provider/builtin/time/tools/current_time.py

@@ -1,4 +1,4 @@
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 from typing import Any, Union
 
 from pytz import timezone as pytz_timezone
@@ -20,7 +20,7 @@ class CurrentTimeTool(BuiltinTool):
         tz = tool_parameters.get("timezone", "UTC")
         fm = tool_parameters.get("format") or "%Y-%m-%d %H:%M:%S %Z"
         if tz == "UTC":
-            return self.create_text_message(f"{datetime.now(timezone.utc).strftime(fm)}")
+            return self.create_text_message(f"{datetime.now(UTC).strftime(fm)}")
 
         try:
             tz = pytz_timezone(tz)

+ 2 - 2
api/core/tools/tool/tool.py

@@ -1,7 +1,7 @@
 from abc import ABC, abstractmethod
 from collections.abc import Mapping
 from copy import deepcopy
-from enum import Enum
+from enum import Enum, StrEnum
 from typing import TYPE_CHECKING, Any, Optional, Union
 
 from pydantic import BaseModel, ConfigDict, field_validator
@@ -62,7 +62,7 @@ class Tool(BaseModel, ABC):
     def __init__(self, **data: Any):
         super().__init__(**data)
 
-    class VariableKey(str, Enum):
+    class VariableKey(StrEnum):
         IMAGE = "image"
         DOCUMENT = "document"
         VIDEO = "video"

+ 3 - 3
api/core/tools/tool_engine.py

@@ -1,7 +1,7 @@
 import json
 from collections.abc import Mapping
 from copy import deepcopy
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 from mimetypes import guess_type
 from typing import Any, Optional, Union
 
@@ -158,7 +158,7 @@ class ToolEngine:
         """
         Invoke the tool with the given arguments.
         """
-        started_at = datetime.now(timezone.utc)
+        started_at = datetime.now(UTC)
         meta = ToolInvokeMeta(
             time_cost=0.0,
             error=None,
@@ -176,7 +176,7 @@ class ToolEngine:
             meta.error = str(e)
             raise ToolEngineInvokeError(meta)
         finally:
-            ended_at = datetime.now(timezone.utc)
+            ended_at = datetime.now(UTC)
             meta.time_cost = (ended_at - started_at).total_seconds()
 
         return meta, response

+ 2 - 2
api/core/variables/types.py

@@ -1,7 +1,7 @@
-from enum import Enum
+from enum import StrEnum
 
 
-class SegmentType(str, Enum):
+class SegmentType(StrEnum):
     NONE = "none"
     NUMBER = "number"
     STRING = "string"

+ 2 - 2
api/core/workflow/entities/node_entities.py

@@ -1,5 +1,5 @@
 from collections.abc import Mapping
-from enum import Enum
+from enum import StrEnum
 from typing import Any, Optional
 
 from pydantic import BaseModel
@@ -8,7 +8,7 @@ from core.model_runtime.entities.llm_entities import LLMUsage
 from models.workflow import WorkflowNodeExecutionStatus
 
 
-class NodeRunMetadataKey(str, Enum):
+class NodeRunMetadataKey(StrEnum):
     """
     Node Run Metadata Key.
     """

+ 2 - 2
api/core/workflow/enums.py

@@ -1,7 +1,7 @@
-from enum import Enum
+from enum import StrEnum
 
 
-class SystemVariableKey(str, Enum):
+class SystemVariableKey(StrEnum):
     """
     System Variables.
     """

+ 3 - 3
api/core/workflow/graph_engine/entities/runtime_route_state.py

@@ -1,5 +1,5 @@
 import uuid
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 from enum import Enum
 from typing import Optional
 
@@ -63,7 +63,7 @@ class RouteNodeState(BaseModel):
             raise Exception(f"Invalid route status {run_result.status}")
 
         self.node_run_result = run_result
-        self.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        self.finished_at = datetime.now(UTC).replace(tzinfo=None)
 
 
 class RuntimeRouteState(BaseModel):
@@ -81,7 +81,7 @@ class RuntimeRouteState(BaseModel):
 
         :param node_id: node id
         """
-        state = RouteNodeState(node_id=node_id, start_at=datetime.now(timezone.utc).replace(tzinfo=None))
+        state = RouteNodeState(node_id=node_id, start_at=datetime.now(UTC).replace(tzinfo=None))
         self.node_state_mapping[state.id] = state
         return state
 

+ 2 - 2
api/core/workflow/nodes/enums.py

@@ -1,7 +1,7 @@
-from enum import Enum
+from enum import StrEnum
 
 
-class NodeType(str, Enum):
+class NodeType(StrEnum):
     START = "start"
     END = "end"
     ANSWER = "answer"

+ 2 - 2
api/core/workflow/nodes/iteration/entities.py

@@ -1,4 +1,4 @@
-from enum import Enum
+from enum import StrEnum
 from typing import Any, Optional
 
 from pydantic import Field
@@ -6,7 +6,7 @@ from pydantic import Field
 from core.workflow.nodes.base import BaseIterationNodeData, BaseIterationState, BaseNodeData
 
 
-class ErrorHandleMode(str, Enum):
+class ErrorHandleMode(StrEnum):
     TERMINATED = "terminated"
     CONTINUE_ON_ERROR = "continue-on-error"
     REMOVE_ABNORMAL_OUTPUT = "remove-abnormal-output"

+ 6 - 6
api/core/workflow/nodes/iteration/iteration_node.py

@@ -2,7 +2,7 @@ import logging
 import uuid
 from collections.abc import Generator, Mapping, Sequence
 from concurrent.futures import Future, wait
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 from queue import Empty, Queue
 from typing import TYPE_CHECKING, Any, Optional, cast
 
@@ -135,7 +135,7 @@ class IterationNode(BaseNode[IterationNodeData]):
             thread_pool_id=self.thread_pool_id,
         )
 
-        start_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        start_at = datetime.now(UTC).replace(tzinfo=None)
 
         yield IterationRunStartedEvent(
             iteration_id=self.id,
@@ -367,7 +367,7 @@ class IterationNode(BaseNode[IterationNodeData]):
         """
         run single iteration
         """
-        iter_start_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        iter_start_at = datetime.now(UTC).replace(tzinfo=None)
 
         try:
             rst = graph_engine.run()
@@ -440,7 +440,7 @@ class IterationNode(BaseNode[IterationNodeData]):
                             variable_pool.add([self.node_id, "index"], next_index)
                             if next_index < len(iterator_list_value):
                                 variable_pool.add([self.node_id, "item"], iterator_list_value[next_index])
-                            duration = (datetime.now(timezone.utc).replace(tzinfo=None) - iter_start_at).total_seconds()
+                            duration = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds()
                             iter_run_map[iteration_run_id] = duration
                             yield IterationRunNextEvent(
                                 iteration_id=self.id,
@@ -461,7 +461,7 @@ class IterationNode(BaseNode[IterationNodeData]):
 
                             if next_index < len(iterator_list_value):
                                 variable_pool.add([self.node_id, "item"], iterator_list_value[next_index])
-                            duration = (datetime.now(timezone.utc).replace(tzinfo=None) - iter_start_at).total_seconds()
+                            duration = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds()
                             iter_run_map[iteration_run_id] = duration
                             yield IterationRunNextEvent(
                                 iteration_id=self.id,
@@ -503,7 +503,7 @@ class IterationNode(BaseNode[IterationNodeData]):
 
             if next_index < len(iterator_list_value):
                 variable_pool.add([self.node_id, "item"], iterator_list_value[next_index])
-            duration = (datetime.now(timezone.utc).replace(tzinfo=None) - iter_start_at).total_seconds()
+            duration = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds()
             iter_run_map[iteration_run_id] = duration
             yield IterationRunNextEvent(
                 iteration_id=self.id,

+ 2 - 2
api/core/workflow/nodes/variable_assigner/node_data.py

@@ -1,11 +1,11 @@
 from collections.abc import Sequence
-from enum import Enum
+from enum import StrEnum
 from typing import Optional
 
 from core.workflow.nodes.base import BaseNodeData
 
 
-class WriteMode(str, Enum):
+class WriteMode(StrEnum):
     OVER_WRITE = "over-write"
     APPEND = "append"
     CLEAR = "clear"

+ 1 - 1
api/events/event_handlers/create_document_index.py

@@ -33,7 +33,7 @@ def handle(sender, **kwargs):
             raise NotFound("Document not found")
 
         document.indexing_status = "parsing"
-        document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+        document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
         documents.append(document)
         db.session.add(document)
     db.session.commit()

+ 2 - 2
api/events/event_handlers/update_provider_last_used_at_when_message_created.py

@@ -1,4 +1,4 @@
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 
 from core.app.entities.app_invoke_entities import AgentChatAppGenerateEntity, ChatAppGenerateEntity
 from events.message_event import message_was_created
@@ -17,5 +17,5 @@ def handle(sender, **kwargs):
     db.session.query(Provider).filter(
         Provider.tenant_id == application_generate_entity.app_config.tenant_id,
         Provider.provider_name == application_generate_entity.model_conf.provider,
-    ).update({"last_used": datetime.now(timezone.utc).replace(tzinfo=None)})
+    ).update({"last_used": datetime.now(UTC).replace(tzinfo=None)})
     db.session.commit()

+ 2 - 2
api/extensions/storage/azure_blob_storage.py

@@ -1,5 +1,5 @@
 from collections.abc import Generator
-from datetime import datetime, timedelta, timezone
+from datetime import UTC, datetime, timedelta
 
 from azure.storage.blob import AccountSasPermissions, BlobServiceClient, ResourceTypes, generate_account_sas
 
@@ -67,7 +67,7 @@ class AzureBlobStorage(BaseStorage):
                 account_key=self.account_key,
                 resource_types=ResourceTypes(service=True, container=True, object=True),
                 permission=AccountSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True),
-                expiry=datetime.now(timezone.utc).replace(tzinfo=None) + timedelta(hours=1),
+                expiry=datetime.now(UTC).replace(tzinfo=None) + timedelta(hours=1),
             )
             redis_client.set(cache_key, sas_token, ex=3000)
         return BlobServiceClient(account_url=self.account_url, credential=sas_token)

+ 2 - 2
api/extensions/storage/storage_type.py

@@ -1,7 +1,7 @@
-from enum import Enum
+from enum import StrEnum
 
 
-class StorageType(str, Enum):
+class StorageType(StrEnum):
     ALIYUN_OSS = "aliyun-oss"
     AZURE_BLOB = "azure-blob"
     BAIDU_OBS = "baidu-obs"

+ 3 - 3
api/libs/oauth_data_source.py

@@ -70,7 +70,7 @@ class NotionOAuth(OAuthDataSource):
         if data_source_binding:
             data_source_binding.source_info = source_info
             data_source_binding.disabled = False
-            data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             db.session.commit()
         else:
             new_data_source_binding = DataSourceOauthBinding(
@@ -106,7 +106,7 @@ class NotionOAuth(OAuthDataSource):
         if data_source_binding:
             data_source_binding.source_info = source_info
             data_source_binding.disabled = False
-            data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             db.session.commit()
         else:
             new_data_source_binding = DataSourceOauthBinding(
@@ -141,7 +141,7 @@ class NotionOAuth(OAuthDataSource):
             }
             data_source_binding.source_info = new_source_info
             data_source_binding.disabled = False
-            data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             db.session.commit()
         else:
             raise ValueError("Data source binding not found")

+ 3 - 3
api/models/account.py

@@ -8,7 +8,7 @@ from extensions.ext_database import db
 from .types import StringUUID
 
 
-class AccountStatus(str, enum.Enum):
+class AccountStatus(enum.StrEnum):
     PENDING = "pending"
     UNINITIALIZED = "uninitialized"
     ACTIVE = "active"
@@ -121,12 +121,12 @@ class Account(UserMixin, db.Model):
         return self._current_tenant.current_role == TenantAccountRole.DATASET_OPERATOR
 
 
-class TenantStatus(str, enum.Enum):
+class TenantStatus(enum.StrEnum):
     NORMAL = "normal"
     ARCHIVE = "archive"
 
 
-class TenantAccountRole(str, enum.Enum):
+class TenantAccountRole(enum.StrEnum):
     OWNER = "owner"
     ADMIN = "admin"
     EDITOR = "editor"

+ 1 - 1
api/models/dataset.py

@@ -23,7 +23,7 @@ from .model import App, Tag, TagBinding, UploadFile
 from .types import StringUUID
 
 
-class DatasetPermissionEnum(str, enum.Enum):
+class DatasetPermissionEnum(enum.StrEnum):
     ONLY_ME = "only_me"
     ALL_TEAM = "all_team_members"
     PARTIAL_TEAM = "partial_members"

+ 4 - 4
api/models/enums.py

@@ -1,16 +1,16 @@
-from enum import Enum
+from enum import StrEnum
 
 
-class CreatedByRole(str, Enum):
+class CreatedByRole(StrEnum):
     ACCOUNT = "account"
     END_USER = "end_user"
 
 
-class UserFrom(str, Enum):
+class UserFrom(StrEnum):
     ACCOUNT = "account"
     END_USER = "end-user"
 
 
-class WorkflowRunTriggeredFrom(str, Enum):
+class WorkflowRunTriggeredFrom(StrEnum):
     DEBUGGING = "debugging"
     APP_RUN = "app-run"

+ 2 - 2
api/models/model.py

@@ -3,7 +3,7 @@ import re
 import uuid
 from collections.abc import Mapping
 from datetime import datetime
-from enum import Enum
+from enum import Enum, StrEnum
 from typing import Any, Literal, Optional
 
 import sqlalchemy as sa
@@ -32,7 +32,7 @@ class DifySetup(db.Model):
     setup_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))
 
 
-class AppMode(str, Enum):
+class AppMode(StrEnum):
     COMPLETION = "completion"
     WORKFLOW = "workflow"
     CHAT = "chat"

+ 4 - 4
api/models/task.py

@@ -1,4 +1,4 @@
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 
 from celery import states
 
@@ -16,8 +16,8 @@ class CeleryTask(db.Model):
     result = db.Column(db.PickleType, nullable=True)
     date_done = db.Column(
         db.DateTime,
-        default=lambda: datetime.now(timezone.utc).replace(tzinfo=None),
-        onupdate=lambda: datetime.now(timezone.utc).replace(tzinfo=None),
+        default=lambda: datetime.now(UTC).replace(tzinfo=None),
+        onupdate=lambda: datetime.now(UTC).replace(tzinfo=None),
         nullable=True,
     )
     traceback = db.Column(db.Text, nullable=True)
@@ -37,4 +37,4 @@ class CeleryTaskSet(db.Model):
     id = db.Column(db.Integer, db.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True)
     taskset_id = db.Column(db.String(155), unique=True)
     result = db.Column(db.PickleType, nullable=True)
-    date_done = db.Column(db.DateTime, default=lambda: datetime.now(timezone.utc).replace(tzinfo=None), nullable=True)
+    date_done = db.Column(db.DateTime, default=lambda: datetime.now(UTC).replace(tzinfo=None), nullable=True)

+ 2 - 2
api/models/workflow.py

@@ -1,6 +1,6 @@
 import json
 from collections.abc import Mapping, Sequence
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 from enum import Enum
 from typing import Any, Optional, Union
 
@@ -108,7 +108,7 @@ class Workflow(db.Model):
     )
     updated_by: Mapped[Optional[str]] = mapped_column(StringUUID)
     updated_at: Mapped[datetime] = mapped_column(
-        sa.DateTime, nullable=False, default=datetime.now(tz=timezone.utc), server_onupdate=func.current_timestamp()
+        sa.DateTime, nullable=False, default=datetime.now(tz=UTC), server_onupdate=func.current_timestamp()
     )
     _environment_variables: Mapped[str] = mapped_column(
         "environment_variables", db.Text, nullable=False, server_default="{}"

+ 46 - 75
api/poetry.lock

@@ -114,7 +114,6 @@ files = [
 [package.dependencies]
 aiohappyeyeballs = ">=2.3.0"
 aiosignal = ">=1.1.2"
-async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""}
 attrs = ">=17.3.0"
 frozenlist = ">=1.1.1"
 multidict = ">=4.5,<7.0"
@@ -483,10 +482,8 @@ files = [
 ]
 
 [package.dependencies]
-exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""}
 idna = ">=2.8"
 sniffio = ">=1.1"
-typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""}
 
 [package.extras]
 doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
@@ -519,9 +516,6 @@ files = [
     {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"},
 ]
 
-[package.dependencies]
-typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""}
-
 [package.extras]
 tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
 
@@ -951,6 +945,10 @@ files = [
     {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
     {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
     {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
+    {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"},
+    {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"},
+    {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"},
+    {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"},
     {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
     {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
     {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
@@ -963,8 +961,14 @@ files = [
     {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
     {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
     {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
+    {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"},
+    {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"},
+    {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"},
+    {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"},
     {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
     {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
+    {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"},
+    {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"},
     {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
     {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
     {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
@@ -975,8 +979,24 @@ files = [
     {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
     {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
     {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
+    {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"},
+    {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"},
+    {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"},
+    {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"},
     {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
     {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
+    {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"},
+    {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"},
+    {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"},
+    {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"},
+    {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"},
+    {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"},
+    {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"},
+    {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"},
+    {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"},
+    {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"},
+    {file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"},
+    {file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"},
     {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"},
     {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"},
     {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"},
@@ -986,6 +1006,10 @@ files = [
     {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"},
     {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"},
     {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"},
+    {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"},
+    {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"},
+    {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"},
+    {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"},
     {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"},
     {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"},
     {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"},
@@ -997,6 +1021,10 @@ files = [
     {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"},
     {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"},
     {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"},
+    {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"},
+    {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"},
+    {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"},
+    {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"},
     {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"},
     {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"},
     {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"},
@@ -1009,6 +1037,10 @@ files = [
     {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"},
     {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"},
     {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"},
+    {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"},
+    {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"},
+    {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"},
+    {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"},
     {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"},
     {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"},
     {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"},
@@ -1021,6 +1053,10 @@ files = [
     {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"},
     {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"},
     {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"},
+    {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"},
+    {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"},
+    {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"},
+    {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"},
     {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"},
     {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"},
     {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
@@ -1092,10 +1128,8 @@ files = [
 
 [package.dependencies]
 colorama = {version = "*", markers = "os_name == \"nt\""}
-importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""}
 packaging = ">=19.1"
 pyproject_hooks = "*"
-tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
 
 [package.extras]
 docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"]
@@ -2036,9 +2070,6 @@ files = [
     {file = "dataclass_wizard-0.28.0-py2.py3-none-any.whl", hash = "sha256:996fa46475b9192a48a057c34f04597bc97be5bc2f163b99cb1de6f778ca1f7f"},
 ]
 
-[package.dependencies]
-typing-extensions = {version = ">=4", markers = "python_version == \"3.9\" or python_version == \"3.10\""}
-
 [package.extras]
 dev = ["Sphinx (==7.4.7)", "Sphinx (==8.1.3)", "bump2version (==1.0.1)", "coverage (>=6.2)", "dataclass-factory (==2.16)", "dataclass-wizard[toml]", "dataclasses-json (==0.6.7)", "flake8 (>=3)", "jsons (==1.6.3)", "pip (>=21.3.1)", "pytest (==8.3.3)", "pytest-cov (==6.0.0)", "pytest-mock (>=3.6.1)", "pytimeparse (==1.1.8)", "sphinx-issues (==5.0.0)", "tomli (>=2,<3)", "tomli (>=2,<3)", "tomli-w (>=1,<2)", "tox (==4.23.2)", "twine (==5.1.1)", "watchdog[watchmedo] (==6.0.0)", "wheel (==0.45.0)"]
 timedelta = ["pytimeparse (>=1.1.7)"]
@@ -2409,20 +2440,6 @@ files = [
 [package.extras]
 tests = ["pytest"]
 
-[[package]]
-name = "exceptiongroup"
-version = "1.2.2"
-description = "Backport of PEP 654 (exception groups)"
-optional = false
-python-versions = ">=3.7"
-files = [
-    {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
-    {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
-]
-
-[package.extras]
-test = ["pytest (>=6)"]
-
 [[package]]
 name = "faker"
 version = "32.1.0"
@@ -3210,14 +3227,8 @@ files = [
 [package.dependencies]
 google-auth = ">=2.14.1,<3.0.dev0"
 googleapis-common-protos = ">=1.56.2,<2.0.dev0"
-grpcio = [
-    {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""},
-    {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""},
-]
-grpcio-status = [
-    {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""},
-    {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""},
-]
+grpcio = {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}
+grpcio-status = {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}
 proto-plus = ">=1.22.3,<2.0.0dev"
 protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0"
 requests = ">=2.18.0,<3.0.0.dev0"
@@ -5550,9 +5561,6 @@ files = [
     {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"},
 ]
 
-[package.dependencies]
-typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""}
-
 [[package]]
 name = "multiprocess"
 version = "0.70.17"
@@ -6410,7 +6418,6 @@ bottleneck = {version = ">=1.3.6", optional = true, markers = "extra == \"perfor
 numba = {version = ">=0.56.4", optional = true, markers = "extra == \"performance\""}
 numexpr = {version = ">=2.8.4", optional = true, markers = "extra == \"performance\""}
 numpy = [
-    {version = ">=1.22.4", markers = "python_version < \"3.11\""},
     {version = ">=1.23.2", markers = "python_version == \"3.11\""},
     {version = ">=1.26.0", markers = "python_version >= \"3.12\""},
 ]
@@ -6694,7 +6701,6 @@ files = [
 deprecation = ">=2.1.0,<3.0.0"
 httpx = {version = ">=0.26,<0.28", extras = ["http2"]}
 pydantic = ">=1.9,<3.0"
-strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""}
 
 [[package]]
 name = "posthog"
@@ -7426,9 +7432,6 @@ files = [
     {file = "pypdf-5.1.0.tar.gz", hash = "sha256:425a129abb1614183fd1aca6982f650b47f8026867c0ce7c4b9f281c443d2740"},
 ]
 
-[package.dependencies]
-typing_extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
-
 [package.extras]
 crypto = ["cryptography"]
 cryptodome = ["PyCryptodome"]
@@ -7517,11 +7520,9 @@ files = [
 
 [package.dependencies]
 colorama = {version = "*", markers = "sys_platform == \"win32\""}
-exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
 iniconfig = "*"
 packaging = "*"
 pluggy = ">=1.5,<2"
-tomli = {version = ">=1", markers = "python_version < \"3.11\""}
 
 [package.extras]
 dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
@@ -7559,7 +7560,6 @@ files = [
 
 [package.dependencies]
 pytest = ">=8.3.3"
-tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""}
 
 [package.extras]
 testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "pytest-mock (>=3.14)"]
@@ -8377,7 +8377,6 @@ files = [
 [package.dependencies]
 markdown-it-py = ">=2.2.0"
 pygments = ">=2.13.0,<3.0.0"
-typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""}
 
 [package.extras]
 jupyter = ["ipywidgets (>=7.5.1,<9)"]
@@ -9214,22 +9213,6 @@ httpx = {version = ">=0.26,<0.28", extras = ["http2"]}
 python-dateutil = ">=2.8.2,<3.0.0"
 typing-extensions = ">=4.2.0,<5.0.0"
 
-[[package]]
-name = "strenum"
-version = "0.4.15"
-description = "An Enum that inherits from str."
-optional = false
-python-versions = "*"
-files = [
-    {file = "StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659"},
-    {file = "StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff"},
-]
-
-[package.extras]
-docs = ["myst-parser[linkify]", "sphinx", "sphinx-rtd-theme"]
-release = ["twine"]
-test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]
-
 [[package]]
 name = "strictyaml"
 version = "1.7.3"
@@ -9636,17 +9619,6 @@ files = [
     {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
 ]
 
-[[package]]
-name = "tomli"
-version = "2.1.0"
-description = "A lil' TOML parser"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"},
-    {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"},
-]
-
 [[package]]
 name = "tos"
 version = "2.7.2"
@@ -10067,7 +10039,6 @@ h11 = ">=0.8"
 httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""}
 python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
 pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""}
-typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
 uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""}
 watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
 websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""}
@@ -11050,5 +11021,5 @@ cffi = ["cffi (>=1.11)"]
 
 [metadata]
 lock-version = "2.0"
-python-versions = ">=3.10,<3.13"
-content-hash = "152b8e11ceffaa482fee6920b7991f52427aa1ffed75614e78ec1065dd5f6898"
+python-versions = ">=3.11,<3.13"
+content-hash = "75175c3427d13c41d84374ff2bb6f5c6cb157e3783107f9d22fad15c9eb8c177"

+ 2 - 2
api/pyproject.toml

@@ -1,5 +1,5 @@
 [project]
-requires-python = ">=3.10,<3.13"
+requires-python = ">=3.11,<3.13"
 
 [build-system]
 requires = ["poetry-core"]
@@ -163,7 +163,7 @@ pydantic-settings = "~2.6.0"
 pydantic_extra_types = "~2.9.0"
 pyjwt = "~2.8.0"
 pypdfium2 = "~4.17.0"
-python = ">=3.10,<3.13"
+python = ">=3.11,<3.13"
 python-docx = "~1.1.0"
 python-dotenv = "1.0.0"
 pyyaml = "~6.0.1"

+ 9 - 9
api/services/account_service.py

@@ -4,7 +4,7 @@ import logging
 import random
 import secrets
 import uuid
-from datetime import datetime, timedelta, timezone
+from datetime import UTC, datetime, timedelta
 from hashlib import sha256
 from typing import Any, Optional
 
@@ -115,15 +115,15 @@ class AccountService:
             available_ta.current = True
             db.session.commit()
 
-        if datetime.now(timezone.utc).replace(tzinfo=None) - account.last_active_at > timedelta(minutes=10):
-            account.last_active_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        if datetime.now(UTC).replace(tzinfo=None) - account.last_active_at > timedelta(minutes=10):
+            account.last_active_at = datetime.now(UTC).replace(tzinfo=None)
             db.session.commit()
 
         return account
 
     @staticmethod
     def get_account_jwt_token(account: Account) -> str:
-        exp_dt = datetime.now(timezone.utc) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES)
+        exp_dt = datetime.now(UTC) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES)
         exp = int(exp_dt.timestamp())
         payload = {
             "user_id": account.id,
@@ -160,7 +160,7 @@ class AccountService:
 
         if account.status == AccountStatus.PENDING.value:
             account.status = AccountStatus.ACTIVE.value
-            account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
 
         db.session.commit()
 
@@ -253,7 +253,7 @@ class AccountService:
                 # If it exists, update the record
                 account_integrate.open_id = open_id
                 account_integrate.encrypted_token = ""  # todo
-                account_integrate.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+                account_integrate.updated_at = datetime.now(UTC).replace(tzinfo=None)
             else:
                 # If it does not exist, create a new record
                 account_integrate = AccountIntegrate(
@@ -288,7 +288,7 @@ class AccountService:
     @staticmethod
     def update_login_info(account: Account, *, ip_address: str) -> None:
         """Update last login time and ip"""
-        account.last_login_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        account.last_login_at = datetime.now(UTC).replace(tzinfo=None)
         account.last_login_ip = ip_address
         db.session.add(account)
         db.session.commit()
@@ -765,7 +765,7 @@ class RegisterService:
             )
 
             account.last_login_ip = ip_address
-            account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
 
             TenantService.create_owner_tenant_if_not_exist(account=account, is_setup=True)
 
@@ -805,7 +805,7 @@ class RegisterService:
                 is_setup=is_setup,
             )
             account.status = AccountStatus.ACTIVE.value if not status else status.value
-            account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
 
             if open_id is not None or provider is not None:
                 AccountService.link_account_integrate(provider, open_id, account)

+ 1 - 1
api/services/annotation_service.py

@@ -429,7 +429,7 @@ class AppAnnotationService:
             raise NotFound("App annotation not found")
         annotation_setting.score_threshold = args["score_threshold"]
         annotation_setting.updated_user_id = current_user.id
-        annotation_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+        annotation_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
         db.session.add(annotation_setting)
         db.session.commit()
 

+ 3 - 3
api/services/app_dsl_service.py

@@ -1,6 +1,6 @@
 import logging
 import uuid
-from enum import Enum
+from enum import StrEnum
 from typing import Optional
 from uuid import uuid4
 
@@ -25,12 +25,12 @@ IMPORT_INFO_REDIS_EXPIRY = 180  # 3 minutes
 CURRENT_DSL_VERSION = "0.1.3"
 
 
-class ImportMode(str, Enum):
+class ImportMode(StrEnum):
     YAML_CONTENT = "yaml-content"
     YAML_URL = "yaml-url"
 
 
-class ImportStatus(str, Enum):
+class ImportStatus(StrEnum):
     COMPLETED = "completed"
     COMPLETED_WITH_WARNINGS = "completed-with-warnings"
     PENDING = "pending"

+ 6 - 6
api/services/app_service.py

@@ -1,6 +1,6 @@
 import json
 import logging
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 from typing import cast
 
 from flask_login import current_user
@@ -223,7 +223,7 @@ class AppService:
         app.icon_background = args.get("icon_background")
         app.use_icon_as_answer_icon = args.get("use_icon_as_answer_icon", False)
         app.updated_by = current_user.id
-        app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        app.updated_at = datetime.now(UTC).replace(tzinfo=None)
         db.session.commit()
 
         if app.max_active_requests is not None:
@@ -240,7 +240,7 @@ class AppService:
         """
         app.name = name
         app.updated_by = current_user.id
-        app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        app.updated_at = datetime.now(UTC).replace(tzinfo=None)
         db.session.commit()
 
         return app
@@ -256,7 +256,7 @@ class AppService:
         app.icon = icon
         app.icon_background = icon_background
         app.updated_by = current_user.id
-        app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        app.updated_at = datetime.now(UTC).replace(tzinfo=None)
         db.session.commit()
 
         return app
@@ -273,7 +273,7 @@ class AppService:
 
         app.enable_site = enable_site
         app.updated_by = current_user.id
-        app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        app.updated_at = datetime.now(UTC).replace(tzinfo=None)
         db.session.commit()
 
         return app
@@ -290,7 +290,7 @@ class AppService:
 
         app.enable_api = enable_api
         app.updated_by = current_user.id
-        app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        app.updated_at = datetime.now(UTC).replace(tzinfo=None)
         db.session.commit()
 
         return app

+ 2 - 2
api/services/auth/auth_type.py

@@ -1,6 +1,6 @@
-from enum import Enum
+from enum import StrEnum
 
 
-class AuthType(str, Enum):
+class AuthType(StrEnum):
     FIRECRAWL = "firecrawl"
     JINA = "jinareader"

+ 2 - 2
api/services/conversation_service.py

@@ -1,4 +1,4 @@
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 from typing import Optional, Union
 
 from sqlalchemy import asc, desc, or_
@@ -104,7 +104,7 @@ class ConversationService:
             return cls.auto_generate_name(app_model, conversation)
         else:
             conversation.name = name
-            conversation.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            conversation.updated_at = datetime.now(UTC).replace(tzinfo=None)
             db.session.commit()
 
         return conversation

+ 13 - 13
api/services/dataset_service.py

@@ -600,7 +600,7 @@ class DocumentService:
         # update document to be paused
         document.is_paused = True
         document.paused_by = current_user.id
-        document.paused_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+        document.paused_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
 
         db.session.add(document)
         db.session.commit()
@@ -1072,7 +1072,7 @@ class DocumentService:
         document.parsing_completed_at = None
         document.cleaning_completed_at = None
         document.splitting_completed_at = None
-        document.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+        document.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
         document.created_from = created_from
         document.doc_form = document_data["doc_form"]
         db.session.add(document)
@@ -1409,8 +1409,8 @@ class SegmentService:
                 word_count=len(content),
                 tokens=tokens,
                 status="completed",
-                indexing_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
-                completed_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+                indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
+                completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
                 created_by=current_user.id,
             )
             if document.doc_form == "qa_model":
@@ -1429,7 +1429,7 @@ class SegmentService:
             except Exception as e:
                 logging.exception("create segment index failed")
                 segment_document.enabled = False
-                segment_document.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+                segment_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
                 segment_document.status = "error"
                 segment_document.error = str(e)
                 db.session.commit()
@@ -1481,8 +1481,8 @@ class SegmentService:
                     word_count=len(content),
                     tokens=tokens,
                     status="completed",
-                    indexing_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
-                    completed_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+                    indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
+                    completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
                     created_by=current_user.id,
                 )
                 if document.doc_form == "qa_model":
@@ -1508,7 +1508,7 @@ class SegmentService:
                 logging.exception("create segment index failed")
                 for segment_document in segment_data_list:
                     segment_document.enabled = False
-                    segment_document.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+                    segment_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
                     segment_document.status = "error"
                     segment_document.error = str(e)
             db.session.commit()
@@ -1526,7 +1526,7 @@ class SegmentService:
             if segment.enabled != action:
                 if not action:
                     segment.enabled = action
-                    segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+                    segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
                     segment.disabled_by = current_user.id
                     db.session.add(segment)
                     db.session.commit()
@@ -1585,10 +1585,10 @@ class SegmentService:
                 segment.word_count = len(content)
                 segment.tokens = tokens
                 segment.status = "completed"
-                segment.indexing_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
-                segment.completed_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+                segment.indexing_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
+                segment.completed_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
                 segment.updated_by = current_user.id
-                segment.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+                segment.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
                 segment.enabled = True
                 segment.disabled_at = None
                 segment.disabled_by = None
@@ -1608,7 +1608,7 @@ class SegmentService:
         except Exception as e:
             logging.exception("update segment index failed")
             segment.enabled = False
-            segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             segment.status = "error"
             segment.error = str(e)
             db.session.commit()

+ 2 - 2
api/services/external_knowledge_service.py

@@ -1,6 +1,6 @@
 import json
 from copy import deepcopy
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 from typing import Any, Optional, Union
 
 import httpx
@@ -99,7 +99,7 @@ class ExternalDatasetService:
         external_knowledge_api.description = args.get("description", "")
         external_knowledge_api.settings = json.dumps(args.get("settings"), ensure_ascii=False)
         external_knowledge_api.updated_by = user_id
-        external_knowledge_api.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        external_knowledge_api.updated_at = datetime.now(UTC).replace(tzinfo=None)
         db.session.commit()
 
         return external_knowledge_api

+ 2 - 2
api/services/feature_service.py

@@ -1,4 +1,4 @@
-from enum import Enum
+from enum import StrEnum
 
 from pydantic import BaseModel, ConfigDict
 
@@ -22,7 +22,7 @@ class LimitationModel(BaseModel):
     limit: int = 0
 
 
-class LicenseStatus(str, Enum):
+class LicenseStatus(StrEnum):
     NONE = "none"
     INACTIVE = "inactive"
     ACTIVE = "active"

+ 3 - 3
api/services/file_service.py

@@ -77,7 +77,7 @@ class FileService:
             mime_type=mimetype,
             created_by_role=(CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER),
             created_by=user.id,
-            created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+            created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
             used=False,
             hash=hashlib.sha3_256(content).hexdigest(),
             source_url=source_url,
@@ -123,10 +123,10 @@ class FileService:
             mime_type="text/plain",
             created_by=current_user.id,
             created_by_role=CreatedByRole.ACCOUNT,
-            created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+            created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
             used=True,
             used_by=current_user.id,
-            used_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+            used_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
         )
 
         db.session.add(upload_file)

+ 1 - 1
api/services/model_load_balancing_service.py

@@ -371,7 +371,7 @@ class ModelLoadBalancingService:
 
                 load_balancing_config.name = name
                 load_balancing_config.enabled = enabled
-                load_balancing_config.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+                load_balancing_config.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
                 db.session.commit()
 
                 self._clear_credentials_cache(tenant_id, config_id)

+ 2 - 2
api/services/recommend_app/recommend_app_type.py

@@ -1,7 +1,7 @@
-from enum import Enum
+from enum import StrEnum
 
 
-class RecommendAppType(str, Enum):
+class RecommendAppType(StrEnum):
     REMOTE = "remote"
     BUILDIN = "builtin"
     DATABASE = "db"

+ 5 - 5
api/services/workflow_service.py

@@ -1,7 +1,7 @@
 import json
 import time
 from collections.abc import Sequence
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 from typing import Optional
 
 from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
@@ -115,7 +115,7 @@ class WorkflowService:
             workflow.graph = json.dumps(graph)
             workflow.features = json.dumps(features)
             workflow.updated_by = account.id
-            workflow.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            workflow.updated_at = datetime.now(UTC).replace(tzinfo=None)
             workflow.environment_variables = environment_variables
             workflow.conversation_variables = conversation_variables
 
@@ -148,7 +148,7 @@ class WorkflowService:
             tenant_id=app_model.tenant_id,
             app_id=app_model.id,
             type=draft_workflow.type,
-            version=str(datetime.now(timezone.utc).replace(tzinfo=None)),
+            version=str(datetime.now(UTC).replace(tzinfo=None)),
             graph=draft_workflow.graph,
             features=draft_workflow.features,
             created_by=account.id,
@@ -257,8 +257,8 @@ class WorkflowService:
         workflow_node_execution.elapsed_time = time.perf_counter() - start_at
         workflow_node_execution.created_by_role = CreatedByRole.ACCOUNT.value
         workflow_node_execution.created_by = account.id
-        workflow_node_execution.created_at = datetime.now(timezone.utc).replace(tzinfo=None)
-        workflow_node_execution.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        workflow_node_execution.created_at = datetime.now(UTC).replace(tzinfo=None)
+        workflow_node_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
 
         if run_succeeded and node_run_result:
             # create workflow node execution

+ 1 - 1
api/tasks/add_document_to_index_task.py

@@ -74,7 +74,7 @@ def add_document_to_index_task(dataset_document_id: str):
     except Exception as e:
         logging.exception("add document to index failed")
         dataset_document.enabled = False
-        dataset_document.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+        dataset_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
         dataset_document.status = "error"
         dataset_document.error = str(e)
         db.session.commit()

+ 1 - 1
api/tasks/annotation/enable_annotation_reply_task.py

@@ -52,7 +52,7 @@ def enable_annotation_reply_task(
             annotation_setting.score_threshold = score_threshold
             annotation_setting.collection_binding_id = dataset_collection_binding.id
             annotation_setting.updated_user_id = user_id
-            annotation_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            annotation_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             db.session.add(annotation_setting)
         else:
             new_app_annotation_setting = AppAnnotationSetting(

+ 2 - 2
api/tasks/batch_create_segment_to_index_task.py

@@ -80,9 +80,9 @@ def batch_create_segment_to_index_task(
                 word_count=len(content),
                 tokens=tokens,
                 created_by=user_id,
-                indexing_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+                indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
                 status="completed",
-                completed_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+                completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
             )
             if dataset_document.doc_form == "qa_model":
                 segment_document.answer = segment["answer"]

+ 3 - 3
api/tasks/create_segment_to_index_task.py

@@ -38,7 +38,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
         # update segment status to indexing
         update_params = {
             DocumentSegment.status: "indexing",
-            DocumentSegment.indexing_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+            DocumentSegment.indexing_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
         }
         DocumentSegment.query.filter_by(id=segment.id).update(update_params)
         db.session.commit()
@@ -75,7 +75,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
         # update segment to completed
         update_params = {
             DocumentSegment.status: "completed",
-            DocumentSegment.completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+            DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
         }
         DocumentSegment.query.filter_by(id=segment.id).update(update_params)
         db.session.commit()
@@ -87,7 +87,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
     except Exception as e:
         logging.exception("create segment to index failed")
         segment.enabled = False
-        segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+        segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
         segment.status = "error"
         segment.error = str(e)
         db.session.commit()

+ 1 - 1
api/tasks/document_indexing_sync_task.py

@@ -67,7 +67,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
         # check the page is updated
         if last_edited_time != page_edited_time:
             document.indexing_status = "parsing"
-            document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             db.session.commit()
 
             # delete all document segment and index

+ 2 - 2
api/tasks/document_indexing_task.py

@@ -50,7 +50,7 @@ def document_indexing_task(dataset_id: str, document_ids: list):
             if document:
                 document.indexing_status = "error"
                 document.error = str(e)
-                document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+                document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
                 db.session.add(document)
         db.session.commit()
         return
@@ -64,7 +64,7 @@ def document_indexing_task(dataset_id: str, document_ids: list):
 
         if document:
             document.indexing_status = "parsing"
-            document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+            document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
             documents.append(document)
             db.session.add(document)
     db.session.commit()

+ 1 - 1
api/tasks/document_indexing_update_task.py

@@ -30,7 +30,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
         raise NotFound("Document not found")
 
     document.indexing_status = "parsing"
-    document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+    document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
     db.session.commit()
 
     # delete all document segment and index

+ 1 - 1
api/tasks/enable_segment_to_index_task.py

@@ -71,7 +71,7 @@ def enable_segment_to_index_task(segment_id: str):
     except Exception as e:
         logging.exception("enable segment to index failed")
         segment.enabled = False
-        segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+        segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
         segment.status = "error"
         segment.error = str(e)
         db.session.commit()

+ 3 - 3
api/tests/unit_tests/core/workflow/nodes/answer/test_answer_stream_processor.py

@@ -1,6 +1,6 @@
 import uuid
 from collections.abc import Generator
-from datetime import datetime, timezone
+from datetime import UTC, datetime, timezone
 
 from core.workflow.entities.variable_pool import VariablePool
 from core.workflow.enums import SystemVariableKey
@@ -29,7 +29,7 @@ def _recursive_process(graph: Graph, next_node_id: str) -> Generator[GraphEngine
 
 
 def _publish_events(graph: Graph, next_node_id: str) -> Generator[GraphEngineEvent, None, None]:
-    route_node_state = RouteNodeState(node_id=next_node_id, start_at=datetime.now(timezone.utc).replace(tzinfo=None))
+    route_node_state = RouteNodeState(node_id=next_node_id, start_at=datetime.now(UTC).replace(tzinfo=None))
 
     parallel_id = graph.node_parallel_mapping.get(next_node_id)
     parallel_start_node_id = None
@@ -68,7 +68,7 @@ def _publish_events(graph: Graph, next_node_id: str) -> Generator[GraphEngineEve
             )
 
     route_node_state.status = RouteNodeState.Status.SUCCESS
-    route_node_state.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
+    route_node_state.finished_at = datetime.now(UTC).replace(tzinfo=None)
     yield NodeRunSucceededEvent(
         id=node_execution_id,
         node_id=next_node_id,