datasets.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368
  1. # -*- coding:utf-8 -*-
  2. from flask import request
  3. from flask_login import login_required, current_user
  4. from flask_restful import Resource, reqparse, fields, marshal, marshal_with
  5. from werkzeug.exceptions import NotFound, Forbidden
  6. import services
  7. from controllers.console import api
  8. from controllers.console.app.error import ProviderNotInitializeError
  9. from controllers.console.datasets.error import DatasetNameDuplicateError
  10. from controllers.console.setup import setup_required
  11. from controllers.console.wraps import account_initialization_required
  12. from core.indexing_runner import IndexingRunner
  13. from core.model_providers.error import LLMBadRequestError
  14. from core.model_providers.model_factory import ModelFactory
  15. from libs.helper import TimestampField
  16. from extensions.ext_database import db
  17. from models.dataset import DocumentSegment, Document
  18. from models.model import UploadFile
  19. from services.dataset_service import DatasetService, DocumentService
  20. dataset_detail_fields = {
  21. 'id': fields.String,
  22. 'name': fields.String,
  23. 'description': fields.String,
  24. 'provider': fields.String,
  25. 'permission': fields.String,
  26. 'data_source_type': fields.String,
  27. 'indexing_technique': fields.String,
  28. 'app_count': fields.Integer,
  29. 'document_count': fields.Integer,
  30. 'word_count': fields.Integer,
  31. 'created_by': fields.String,
  32. 'created_at': TimestampField,
  33. 'updated_by': fields.String,
  34. 'updated_at': TimestampField,
  35. }
  36. dataset_query_detail_fields = {
  37. "id": fields.String,
  38. "content": fields.String,
  39. "source": fields.String,
  40. "source_app_id": fields.String,
  41. "created_by_role": fields.String,
  42. "created_by": fields.String,
  43. "created_at": TimestampField
  44. }
  45. def _validate_name(name):
  46. if not name or len(name) < 1 or len(name) > 40:
  47. raise ValueError('Name must be between 1 to 40 characters.')
  48. return name
  49. def _validate_description_length(description):
  50. if len(description) > 400:
  51. raise ValueError('Description cannot exceed 400 characters.')
  52. return description
  53. class DatasetListApi(Resource):
  54. @setup_required
  55. @login_required
  56. @account_initialization_required
  57. def get(self):
  58. page = request.args.get('page', default=1, type=int)
  59. limit = request.args.get('limit', default=20, type=int)
  60. ids = request.args.getlist('ids')
  61. provider = request.args.get('provider', default="vendor")
  62. if ids:
  63. datasets, total = DatasetService.get_datasets_by_ids(ids, current_user.current_tenant_id)
  64. else:
  65. datasets, total = DatasetService.get_datasets(page, limit, provider,
  66. current_user.current_tenant_id, current_user)
  67. response = {
  68. 'data': marshal(datasets, dataset_detail_fields),
  69. 'has_more': len(datasets) == limit,
  70. 'limit': limit,
  71. 'total': total,
  72. 'page': page
  73. }
  74. return response, 200
  75. @setup_required
  76. @login_required
  77. @account_initialization_required
  78. def post(self):
  79. parser = reqparse.RequestParser()
  80. parser.add_argument('name', nullable=False, required=True,
  81. help='type is required. Name must be between 1 to 40 characters.',
  82. type=_validate_name)
  83. parser.add_argument('indexing_technique', type=str, location='json',
  84. choices=('high_quality', 'economy'),
  85. help='Invalid indexing technique.')
  86. args = parser.parse_args()
  87. # The role of the current user in the ta table must be admin or owner
  88. if current_user.current_tenant.current_role not in ['admin', 'owner']:
  89. raise Forbidden()
  90. try:
  91. ModelFactory.get_embedding_model(
  92. tenant_id=current_user.current_tenant_id
  93. )
  94. except LLMBadRequestError:
  95. raise ProviderNotInitializeError(
  96. f"No Embedding Model available. Please configure a valid provider "
  97. f"in the Settings -> Model Provider.")
  98. try:
  99. dataset = DatasetService.create_empty_dataset(
  100. tenant_id=current_user.current_tenant_id,
  101. name=args['name'],
  102. indexing_technique=args['indexing_technique'],
  103. account=current_user
  104. )
  105. except services.errors.dataset.DatasetNameDuplicateError:
  106. raise DatasetNameDuplicateError()
  107. return marshal(dataset, dataset_detail_fields), 201
  108. class DatasetApi(Resource):
  109. @setup_required
  110. @login_required
  111. @account_initialization_required
  112. def get(self, dataset_id):
  113. dataset_id_str = str(dataset_id)
  114. dataset = DatasetService.get_dataset(dataset_id_str)
  115. if dataset is None:
  116. raise NotFound("Dataset not found.")
  117. try:
  118. DatasetService.check_dataset_permission(
  119. dataset, current_user)
  120. except services.errors.account.NoPermissionError as e:
  121. raise Forbidden(str(e))
  122. return marshal(dataset, dataset_detail_fields), 200
  123. @setup_required
  124. @login_required
  125. @account_initialization_required
  126. def patch(self, dataset_id):
  127. dataset_id_str = str(dataset_id)
  128. parser = reqparse.RequestParser()
  129. parser.add_argument('name', nullable=False,
  130. help='type is required. Name must be between 1 to 40 characters.',
  131. type=_validate_name)
  132. parser.add_argument('description',
  133. location='json', store_missing=False,
  134. type=_validate_description_length)
  135. parser.add_argument('indexing_technique', type=str, location='json',
  136. choices=('high_quality', 'economy'),
  137. help='Invalid indexing technique.')
  138. parser.add_argument('permission', type=str, location='json', choices=(
  139. 'only_me', 'all_team_members'), help='Invalid permission.')
  140. args = parser.parse_args()
  141. # The role of the current user in the ta table must be admin or owner
  142. if current_user.current_tenant.current_role not in ['admin', 'owner']:
  143. raise Forbidden()
  144. dataset = DatasetService.update_dataset(
  145. dataset_id_str, args, current_user)
  146. if dataset is None:
  147. raise NotFound("Dataset not found.")
  148. return marshal(dataset, dataset_detail_fields), 200
  149. @setup_required
  150. @login_required
  151. @account_initialization_required
  152. def delete(self, dataset_id):
  153. dataset_id_str = str(dataset_id)
  154. # The role of the current user in the ta table must be admin or owner
  155. if current_user.current_tenant.current_role not in ['admin', 'owner']:
  156. raise Forbidden()
  157. if DatasetService.delete_dataset(dataset_id_str, current_user):
  158. return {'result': 'success'}, 204
  159. else:
  160. raise NotFound("Dataset not found.")
  161. class DatasetQueryApi(Resource):
  162. @setup_required
  163. @login_required
  164. @account_initialization_required
  165. def get(self, dataset_id):
  166. dataset_id_str = str(dataset_id)
  167. dataset = DatasetService.get_dataset(dataset_id_str)
  168. if dataset is None:
  169. raise NotFound("Dataset not found.")
  170. try:
  171. DatasetService.check_dataset_permission(dataset, current_user)
  172. except services.errors.account.NoPermissionError as e:
  173. raise Forbidden(str(e))
  174. page = request.args.get('page', default=1, type=int)
  175. limit = request.args.get('limit', default=20, type=int)
  176. dataset_queries, total = DatasetService.get_dataset_queries(
  177. dataset_id=dataset.id,
  178. page=page,
  179. per_page=limit
  180. )
  181. response = {
  182. 'data': marshal(dataset_queries, dataset_query_detail_fields),
  183. 'has_more': len(dataset_queries) == limit,
  184. 'limit': limit,
  185. 'total': total,
  186. 'page': page
  187. }
  188. return response, 200
  189. class DatasetIndexingEstimateApi(Resource):
  190. @setup_required
  191. @login_required
  192. @account_initialization_required
  193. def post(self):
  194. parser = reqparse.RequestParser()
  195. parser.add_argument('info_list', type=dict, required=True, nullable=True, location='json')
  196. parser.add_argument('process_rule', type=dict, required=True, nullable=True, location='json')
  197. parser.add_argument('doc_form', type=str, default='text_model', required=False, nullable=False, location='json')
  198. args = parser.parse_args()
  199. # validate args
  200. DocumentService.estimate_args_validate(args)
  201. if args['info_list']['data_source_type'] == 'upload_file':
  202. file_ids = args['info_list']['file_info_list']['file_ids']
  203. file_details = db.session.query(UploadFile).filter(
  204. UploadFile.tenant_id == current_user.current_tenant_id,
  205. UploadFile.id.in_(file_ids)
  206. ).all()
  207. if file_details is None:
  208. raise NotFound("File not found.")
  209. indexing_runner = IndexingRunner()
  210. try:
  211. response = indexing_runner.file_indexing_estimate(current_user.current_tenant_id, file_details,
  212. args['process_rule'], args['doc_form'])
  213. except LLMBadRequestError:
  214. raise ProviderNotInitializeError(
  215. f"No Embedding Model available. Please configure a valid provider "
  216. f"in the Settings -> Model Provider.")
  217. elif args['info_list']['data_source_type'] == 'notion_import':
  218. indexing_runner = IndexingRunner()
  219. try:
  220. response = indexing_runner.notion_indexing_estimate(current_user.current_tenant_id,
  221. args['info_list']['notion_info_list'],
  222. args['process_rule'], args['doc_form'])
  223. except LLMBadRequestError:
  224. raise ProviderNotInitializeError(
  225. f"No Embedding Model available. Please configure a valid provider "
  226. f"in the Settings -> Model Provider.")
  227. else:
  228. raise ValueError('Data source type not support')
  229. return response, 200
  230. class DatasetRelatedAppListApi(Resource):
  231. app_detail_kernel_fields = {
  232. 'id': fields.String,
  233. 'name': fields.String,
  234. 'mode': fields.String,
  235. 'icon': fields.String,
  236. 'icon_background': fields.String,
  237. }
  238. related_app_list = {
  239. 'data': fields.List(fields.Nested(app_detail_kernel_fields)),
  240. 'total': fields.Integer,
  241. }
  242. @setup_required
  243. @login_required
  244. @account_initialization_required
  245. @marshal_with(related_app_list)
  246. def get(self, dataset_id):
  247. dataset_id_str = str(dataset_id)
  248. dataset = DatasetService.get_dataset(dataset_id_str)
  249. if dataset is None:
  250. raise NotFound("Dataset not found.")
  251. try:
  252. DatasetService.check_dataset_permission(dataset, current_user)
  253. except services.errors.account.NoPermissionError as e:
  254. raise Forbidden(str(e))
  255. app_dataset_joins = DatasetService.get_related_apps(dataset.id)
  256. related_apps = []
  257. for app_dataset_join in app_dataset_joins:
  258. app_model = app_dataset_join.app
  259. if app_model:
  260. related_apps.append(app_model)
  261. return {
  262. 'data': related_apps,
  263. 'total': len(related_apps)
  264. }, 200
  265. class DatasetIndexingStatusApi(Resource):
  266. document_status_fields = {
  267. 'id': fields.String,
  268. 'indexing_status': fields.String,
  269. 'processing_started_at': TimestampField,
  270. 'parsing_completed_at': TimestampField,
  271. 'cleaning_completed_at': TimestampField,
  272. 'splitting_completed_at': TimestampField,
  273. 'completed_at': TimestampField,
  274. 'paused_at': TimestampField,
  275. 'error': fields.String,
  276. 'stopped_at': TimestampField,
  277. 'completed_segments': fields.Integer,
  278. 'total_segments': fields.Integer,
  279. }
  280. document_status_fields_list = {
  281. 'data': fields.List(fields.Nested(document_status_fields))
  282. }
  283. @setup_required
  284. @login_required
  285. @account_initialization_required
  286. def get(self, dataset_id):
  287. dataset_id = str(dataset_id)
  288. documents = db.session.query(Document).filter(
  289. Document.dataset_id == dataset_id,
  290. Document.tenant_id == current_user.current_tenant_id
  291. ).all()
  292. documents_status = []
  293. for document in documents:
  294. completed_segments = DocumentSegment.query.filter(DocumentSegment.completed_at.isnot(None),
  295. DocumentSegment.document_id == str(document.id),
  296. DocumentSegment.status != 're_segment').count()
  297. total_segments = DocumentSegment.query.filter(DocumentSegment.document_id == str(document.id),
  298. DocumentSegment.status != 're_segment').count()
  299. document.completed_segments = completed_segments
  300. document.total_segments = total_segments
  301. documents_status.append(marshal(document, self.document_status_fields))
  302. data = {
  303. 'data': documents_status
  304. }
  305. return data
  306. api.add_resource(DatasetListApi, '/datasets')
  307. api.add_resource(DatasetApi, '/datasets/<uuid:dataset_id>')
  308. api.add_resource(DatasetQueryApi, '/datasets/<uuid:dataset_id>/queries')
  309. api.add_resource(DatasetIndexingEstimateApi, '/datasets/indexing-estimate')
  310. api.add_resource(DatasetRelatedAppListApi, '/datasets/<uuid:dataset_id>/related-apps')
  311. api.add_resource(DatasetIndexingStatusApi, '/datasets/<uuid:dataset_id>/indexing-status')