data_source.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282
  1. import datetime
  2. import json
  3. from flask import request
  4. from flask_login import current_user # type: ignore
  5. from flask_restful import Resource, marshal_with, reqparse # type: ignore
  6. from sqlalchemy import select
  7. from sqlalchemy.orm import Session
  8. from werkzeug.exceptions import NotFound
  9. from controllers.console import api
  10. from controllers.console.wraps import account_initialization_required, setup_required
  11. from core.indexing_runner import IndexingRunner
  12. from core.rag.extractor.entity.extract_setting import ExtractSetting
  13. from core.rag.extractor.notion_extractor import NotionExtractor
  14. from extensions.ext_database import db
  15. from fields.data_source_fields import integrate_list_fields, integrate_notion_info_list_fields
  16. from libs.login import login_required
  17. from models import DataSourceOauthBinding, Document
  18. from services.dataset_service import DatasetService, DocumentService
  19. from tasks.document_indexing_sync_task import document_indexing_sync_task
  20. class DataSourceApi(Resource):
  21. @setup_required
  22. @login_required
  23. @account_initialization_required
  24. @marshal_with(integrate_list_fields)
  25. def get(self):
  26. # get workspace data source integrates
  27. data_source_integrates = (
  28. db.session.query(DataSourceOauthBinding)
  29. .filter(
  30. DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
  31. DataSourceOauthBinding.disabled == False,
  32. )
  33. .all()
  34. )
  35. base_url = request.url_root.rstrip("/")
  36. data_source_oauth_base_path = "/console/api/oauth/data-source"
  37. providers = ["notion"]
  38. integrate_data = []
  39. for provider in providers:
  40. # existing_integrate = next((ai for ai in data_source_integrates if ai.provider == provider), None)
  41. existing_integrates = filter(lambda item: item.provider == provider, data_source_integrates)
  42. if existing_integrates:
  43. for existing_integrate in list(existing_integrates):
  44. integrate_data.append(
  45. {
  46. "id": existing_integrate.id,
  47. "provider": provider,
  48. "created_at": existing_integrate.created_at,
  49. "is_bound": True,
  50. "disabled": existing_integrate.disabled,
  51. "source_info": existing_integrate.source_info,
  52. "link": f"{base_url}{data_source_oauth_base_path}/{provider}",
  53. }
  54. )
  55. else:
  56. integrate_data.append(
  57. {
  58. "id": None,
  59. "provider": provider,
  60. "created_at": None,
  61. "source_info": None,
  62. "is_bound": False,
  63. "disabled": None,
  64. "link": f"{base_url}{data_source_oauth_base_path}/{provider}",
  65. }
  66. )
  67. return {"data": integrate_data}, 200
  68. @setup_required
  69. @login_required
  70. @account_initialization_required
  71. def patch(self, binding_id, action):
  72. binding_id = str(binding_id)
  73. action = str(action)
  74. with Session(db.engine) as session:
  75. data_source_binding = session.execute(
  76. select(DataSourceOauthBinding).filter_by(id=binding_id)
  77. ).scalar_one_or_none()
  78. if data_source_binding is None:
  79. raise NotFound("Data source binding not found.")
  80. # enable binding
  81. if action == "enable":
  82. if data_source_binding.disabled:
  83. data_source_binding.disabled = False
  84. data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
  85. db.session.add(data_source_binding)
  86. db.session.commit()
  87. else:
  88. raise ValueError("Data source is not disabled.")
  89. # disable binding
  90. if action == "disable":
  91. if not data_source_binding.disabled:
  92. data_source_binding.disabled = True
  93. data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
  94. db.session.add(data_source_binding)
  95. db.session.commit()
  96. else:
  97. raise ValueError("Data source is disabled.")
  98. return {"result": "success"}, 200
  99. class DataSourceNotionListApi(Resource):
  100. @setup_required
  101. @login_required
  102. @account_initialization_required
  103. @marshal_with(integrate_notion_info_list_fields)
  104. def get(self):
  105. dataset_id = request.args.get("dataset_id", default=None, type=str)
  106. exist_page_ids = []
  107. with Session(db.engine) as session:
  108. # import notion in the exist dataset
  109. if dataset_id:
  110. dataset = DatasetService.get_dataset(dataset_id)
  111. if not dataset:
  112. raise NotFound("Dataset not found.")
  113. if dataset.data_source_type != "notion_import":
  114. raise ValueError("Dataset is not notion type.")
  115. documents = session.scalars(
  116. select(Document).filter_by(
  117. dataset_id=dataset_id,
  118. tenant_id=current_user.current_tenant_id,
  119. data_source_type="notion_import",
  120. enabled=True,
  121. )
  122. ).all()
  123. if documents:
  124. for document in documents:
  125. data_source_info = json.loads(document.data_source_info)
  126. exist_page_ids.append(data_source_info["notion_page_id"])
  127. # get all authorized pages
  128. data_source_bindings = session.scalars(
  129. select(DataSourceOauthBinding).filter_by(
  130. tenant_id=current_user.current_tenant_id, provider="notion", disabled=False
  131. )
  132. ).all()
  133. if not data_source_bindings:
  134. return {"notion_info": []}, 200
  135. pre_import_info_list = []
  136. for data_source_binding in data_source_bindings:
  137. source_info = data_source_binding.source_info
  138. pages = source_info["pages"]
  139. # Filter out already bound pages
  140. for page in pages:
  141. if page["page_id"] in exist_page_ids:
  142. page["is_bound"] = True
  143. else:
  144. page["is_bound"] = False
  145. pre_import_info = {
  146. "workspace_name": source_info["workspace_name"],
  147. "workspace_icon": source_info["workspace_icon"],
  148. "workspace_id": source_info["workspace_id"],
  149. "pages": pages,
  150. }
  151. pre_import_info_list.append(pre_import_info)
  152. return {"notion_info": pre_import_info_list}, 200
  153. class DataSourceNotionApi(Resource):
  154. @setup_required
  155. @login_required
  156. @account_initialization_required
  157. def get(self, workspace_id, page_id, page_type):
  158. workspace_id = str(workspace_id)
  159. page_id = str(page_id)
  160. with Session(db.engine) as session:
  161. data_source_binding = session.execute(
  162. select(DataSourceOauthBinding).filter(
  163. db.and_(
  164. DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
  165. DataSourceOauthBinding.provider == "notion",
  166. DataSourceOauthBinding.disabled == False,
  167. DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"',
  168. )
  169. )
  170. ).scalar_one_or_none()
  171. if not data_source_binding:
  172. raise NotFound("Data source binding not found.")
  173. extractor = NotionExtractor(
  174. notion_workspace_id=workspace_id,
  175. notion_obj_id=page_id,
  176. notion_page_type=page_type,
  177. notion_access_token=data_source_binding.access_token,
  178. tenant_id=current_user.current_tenant_id,
  179. )
  180. text_docs = extractor.extract()
  181. return {"content": "\n".join([doc.page_content for doc in text_docs])}, 200
  182. @setup_required
  183. @login_required
  184. @account_initialization_required
  185. def post(self):
  186. parser = reqparse.RequestParser()
  187. parser.add_argument("notion_info_list", type=list, required=True, nullable=True, location="json")
  188. parser.add_argument("process_rule", type=dict, required=True, nullable=True, location="json")
  189. parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json")
  190. parser.add_argument(
  191. "doc_language", type=str, default="English", required=False, nullable=False, location="json"
  192. )
  193. args = parser.parse_args()
  194. # validate args
  195. DocumentService.estimate_args_validate(args)
  196. notion_info_list = args["notion_info_list"]
  197. extract_settings = []
  198. for notion_info in notion_info_list:
  199. workspace_id = notion_info["workspace_id"]
  200. for page in notion_info["pages"]:
  201. extract_setting = ExtractSetting(
  202. datasource_type="notion_import",
  203. notion_info={
  204. "notion_workspace_id": workspace_id,
  205. "notion_obj_id": page["page_id"],
  206. "notion_page_type": page["type"],
  207. "tenant_id": current_user.current_tenant_id,
  208. },
  209. document_model=args["doc_form"],
  210. )
  211. extract_settings.append(extract_setting)
  212. indexing_runner = IndexingRunner()
  213. response = indexing_runner.indexing_estimate(
  214. current_user.current_tenant_id,
  215. extract_settings,
  216. args["process_rule"],
  217. args["doc_form"],
  218. args["doc_language"],
  219. )
  220. return response.model_dump(), 200
  221. class DataSourceNotionDatasetSyncApi(Resource):
  222. @setup_required
  223. @login_required
  224. @account_initialization_required
  225. def get(self, dataset_id):
  226. dataset_id_str = str(dataset_id)
  227. dataset = DatasetService.get_dataset(dataset_id_str)
  228. if dataset is None:
  229. raise NotFound("Dataset not found.")
  230. documents = DocumentService.get_document_by_dataset_id(dataset_id_str)
  231. for document in documents:
  232. document_indexing_sync_task.delay(dataset_id_str, document.id)
  233. return 200
  234. class DataSourceNotionDocumentSyncApi(Resource):
  235. @setup_required
  236. @login_required
  237. @account_initialization_required
  238. def get(self, dataset_id, document_id):
  239. dataset_id_str = str(dataset_id)
  240. document_id_str = str(document_id)
  241. dataset = DatasetService.get_dataset(dataset_id_str)
  242. if dataset is None:
  243. raise NotFound("Dataset not found.")
  244. document = DocumentService.get_document(dataset_id_str, document_id_str)
  245. if document is None:
  246. raise NotFound("Document not found.")
  247. document_indexing_sync_task.delay(dataset_id_str, document_id_str)
  248. return 200
  249. api.add_resource(DataSourceApi, "/data-source/integrates", "/data-source/integrates/<uuid:binding_id>/<string:action>")
  250. api.add_resource(DataSourceNotionListApi, "/notion/pre-import/pages")
  251. api.add_resource(
  252. DataSourceNotionApi,
  253. "/notion/workspaces/<uuid:workspace_id>/pages/<uuid:page_id>/<string:page_type>/preview",
  254. "/datasets/notion-indexing-estimate",
  255. )
  256. api.add_resource(DataSourceNotionDatasetSyncApi, "/datasets/<uuid:dataset_id>/notion/sync")
  257. api.add_resource(
  258. DataSourceNotionDocumentSyncApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/notion/sync"
  259. )