datasets.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395
  1. # -*- coding:utf-8 -*-
  2. from flask import request
  3. from flask_login import current_user
  4. from core.login.login import login_required
  5. from flask_restful import Resource, reqparse, fields, marshal, marshal_with
  6. from werkzeug.exceptions import NotFound, Forbidden
  7. import services
  8. from controllers.console import api
  9. from controllers.console.app.error import ProviderNotInitializeError
  10. from controllers.console.datasets.error import DatasetNameDuplicateError
  11. from controllers.console.setup import setup_required
  12. from controllers.console.wraps import account_initialization_required
  13. from core.indexing_runner import IndexingRunner
  14. from core.model_providers.error import LLMBadRequestError, ProviderTokenNotInitError
  15. from core.model_providers.model_factory import ModelFactory
  16. from core.model_providers.models.entity.model_params import ModelType
  17. from libs.helper import TimestampField
  18. from extensions.ext_database import db
  19. from models.dataset import DocumentSegment, Document
  20. from models.model import UploadFile
  21. from services.dataset_service import DatasetService, DocumentService
  22. from services.provider_service import ProviderService
  23. dataset_detail_fields = {
  24. 'id': fields.String,
  25. 'name': fields.String,
  26. 'description': fields.String,
  27. 'provider': fields.String,
  28. 'permission': fields.String,
  29. 'data_source_type': fields.String,
  30. 'indexing_technique': fields.String,
  31. 'app_count': fields.Integer,
  32. 'document_count': fields.Integer,
  33. 'word_count': fields.Integer,
  34. 'created_by': fields.String,
  35. 'created_at': TimestampField,
  36. 'updated_by': fields.String,
  37. 'updated_at': TimestampField,
  38. 'embedding_model': fields.String,
  39. 'embedding_model_provider': fields.String,
  40. 'embedding_available': fields.Boolean
  41. }
  42. dataset_query_detail_fields = {
  43. "id": fields.String,
  44. "content": fields.String,
  45. "source": fields.String,
  46. "source_app_id": fields.String,
  47. "created_by_role": fields.String,
  48. "created_by": fields.String,
  49. "created_at": TimestampField
  50. }
  51. def _validate_name(name):
  52. if not name or len(name) < 1 or len(name) > 40:
  53. raise ValueError('Name must be between 1 to 40 characters.')
  54. return name
  55. def _validate_description_length(description):
  56. if len(description) > 400:
  57. raise ValueError('Description cannot exceed 400 characters.')
  58. return description
  59. class DatasetListApi(Resource):
  60. @setup_required
  61. @login_required
  62. @account_initialization_required
  63. def get(self):
  64. page = request.args.get('page', default=1, type=int)
  65. limit = request.args.get('limit', default=20, type=int)
  66. ids = request.args.getlist('ids')
  67. provider = request.args.get('provider', default="vendor")
  68. if ids:
  69. datasets, total = DatasetService.get_datasets_by_ids(ids, current_user.current_tenant_id)
  70. else:
  71. datasets, total = DatasetService.get_datasets(page, limit, provider,
  72. current_user.current_tenant_id, current_user)
  73. # check embedding setting
  74. provider_service = ProviderService()
  75. valid_model_list = provider_service.get_valid_model_list(current_user.current_tenant_id, ModelType.EMBEDDINGS.value)
  76. # if len(valid_model_list) == 0:
  77. # raise ProviderNotInitializeError(
  78. # f"No Embedding Model available. Please configure a valid provider "
  79. # f"in the Settings -> Model Provider.")
  80. model_names = [item['model_name'] for item in valid_model_list]
  81. data = marshal(datasets, dataset_detail_fields)
  82. for item in data:
  83. if item['embedding_model'] in model_names:
  84. item['embedding_available'] = True
  85. else:
  86. item['embedding_available'] = False
  87. response = {
  88. 'data': data,
  89. 'has_more': len(datasets) == limit,
  90. 'limit': limit,
  91. 'total': total,
  92. 'page': page
  93. }
  94. return response, 200
  95. @setup_required
  96. @login_required
  97. @account_initialization_required
  98. def post(self):
  99. parser = reqparse.RequestParser()
  100. parser.add_argument('name', nullable=False, required=True,
  101. help='type is required. Name must be between 1 to 40 characters.',
  102. type=_validate_name)
  103. parser.add_argument('indexing_technique', type=str, location='json',
  104. choices=('high_quality', 'economy'),
  105. help='Invalid indexing technique.')
  106. args = parser.parse_args()
  107. # The role of the current user in the ta table must be admin or owner
  108. if current_user.current_tenant.current_role not in ['admin', 'owner']:
  109. raise Forbidden()
  110. try:
  111. ModelFactory.get_embedding_model(
  112. tenant_id=current_user.current_tenant_id
  113. )
  114. except LLMBadRequestError:
  115. raise ProviderNotInitializeError(
  116. f"No Embedding Model available. Please configure a valid provider "
  117. f"in the Settings -> Model Provider.")
  118. try:
  119. dataset = DatasetService.create_empty_dataset(
  120. tenant_id=current_user.current_tenant_id,
  121. name=args['name'],
  122. indexing_technique=args['indexing_technique'],
  123. account=current_user
  124. )
  125. except services.errors.dataset.DatasetNameDuplicateError:
  126. raise DatasetNameDuplicateError()
  127. return marshal(dataset, dataset_detail_fields), 201
  128. class DatasetApi(Resource):
  129. @setup_required
  130. @login_required
  131. @account_initialization_required
  132. def get(self, dataset_id):
  133. dataset_id_str = str(dataset_id)
  134. dataset = DatasetService.get_dataset(dataset_id_str)
  135. if dataset is None:
  136. raise NotFound("Dataset not found.")
  137. try:
  138. DatasetService.check_dataset_permission(
  139. dataset, current_user)
  140. except services.errors.account.NoPermissionError as e:
  141. raise Forbidden(str(e))
  142. return marshal(dataset, dataset_detail_fields), 200
  143. @setup_required
  144. @login_required
  145. @account_initialization_required
  146. def patch(self, dataset_id):
  147. dataset_id_str = str(dataset_id)
  148. parser = reqparse.RequestParser()
  149. parser.add_argument('name', nullable=False,
  150. help='type is required. Name must be between 1 to 40 characters.',
  151. type=_validate_name)
  152. parser.add_argument('description',
  153. location='json', store_missing=False,
  154. type=_validate_description_length)
  155. parser.add_argument('indexing_technique', type=str, location='json',
  156. choices=('high_quality', 'economy'),
  157. help='Invalid indexing technique.')
  158. parser.add_argument('permission', type=str, location='json', choices=(
  159. 'only_me', 'all_team_members'), help='Invalid permission.')
  160. args = parser.parse_args()
  161. # The role of the current user in the ta table must be admin or owner
  162. if current_user.current_tenant.current_role not in ['admin', 'owner']:
  163. raise Forbidden()
  164. dataset = DatasetService.update_dataset(
  165. dataset_id_str, args, current_user)
  166. if dataset is None:
  167. raise NotFound("Dataset not found.")
  168. return marshal(dataset, dataset_detail_fields), 200
  169. @setup_required
  170. @login_required
  171. @account_initialization_required
  172. def delete(self, dataset_id):
  173. dataset_id_str = str(dataset_id)
  174. # The role of the current user in the ta table must be admin or owner
  175. if current_user.current_tenant.current_role not in ['admin', 'owner']:
  176. raise Forbidden()
  177. if DatasetService.delete_dataset(dataset_id_str, current_user):
  178. return {'result': 'success'}, 204
  179. else:
  180. raise NotFound("Dataset not found.")
  181. class DatasetQueryApi(Resource):
  182. @setup_required
  183. @login_required
  184. @account_initialization_required
  185. def get(self, dataset_id):
  186. dataset_id_str = str(dataset_id)
  187. dataset = DatasetService.get_dataset(dataset_id_str)
  188. if dataset is None:
  189. raise NotFound("Dataset not found.")
  190. try:
  191. DatasetService.check_dataset_permission(dataset, current_user)
  192. except services.errors.account.NoPermissionError as e:
  193. raise Forbidden(str(e))
  194. page = request.args.get('page', default=1, type=int)
  195. limit = request.args.get('limit', default=20, type=int)
  196. dataset_queries, total = DatasetService.get_dataset_queries(
  197. dataset_id=dataset.id,
  198. page=page,
  199. per_page=limit
  200. )
  201. response = {
  202. 'data': marshal(dataset_queries, dataset_query_detail_fields),
  203. 'has_more': len(dataset_queries) == limit,
  204. 'limit': limit,
  205. 'total': total,
  206. 'page': page
  207. }
  208. return response, 200
  209. class DatasetIndexingEstimateApi(Resource):
  210. @setup_required
  211. @login_required
  212. @account_initialization_required
  213. def post(self):
  214. parser = reqparse.RequestParser()
  215. parser.add_argument('info_list', type=dict, required=True, nullable=True, location='json')
  216. parser.add_argument('process_rule', type=dict, required=True, nullable=True, location='json')
  217. parser.add_argument('doc_form', type=str, default='text_model', required=False, nullable=False, location='json')
  218. parser.add_argument('dataset_id', type=str, required=False, nullable=False, location='json')
  219. parser.add_argument('doc_language', type=str, default='English', required=False, nullable=False, location='json')
  220. args = parser.parse_args()
  221. # validate args
  222. DocumentService.estimate_args_validate(args)
  223. if args['info_list']['data_source_type'] == 'upload_file':
  224. file_ids = args['info_list']['file_info_list']['file_ids']
  225. file_details = db.session.query(UploadFile).filter(
  226. UploadFile.tenant_id == current_user.current_tenant_id,
  227. UploadFile.id.in_(file_ids)
  228. ).all()
  229. if file_details is None:
  230. raise NotFound("File not found.")
  231. indexing_runner = IndexingRunner()
  232. try:
  233. response = indexing_runner.file_indexing_estimate(current_user.current_tenant_id, file_details,
  234. args['process_rule'], args['doc_form'],
  235. args['doc_language'], args['dataset_id'])
  236. except LLMBadRequestError:
  237. raise ProviderNotInitializeError(
  238. f"No Embedding Model available. Please configure a valid provider "
  239. f"in the Settings -> Model Provider.")
  240. except ProviderTokenNotInitError as ex:
  241. raise ProviderNotInitializeError(ex.description)
  242. elif args['info_list']['data_source_type'] == 'notion_import':
  243. indexing_runner = IndexingRunner()
  244. try:
  245. response = indexing_runner.notion_indexing_estimate(current_user.current_tenant_id,
  246. args['info_list']['notion_info_list'],
  247. args['process_rule'], args['doc_form'],
  248. args['doc_language'], args['dataset_id'])
  249. except LLMBadRequestError:
  250. raise ProviderNotInitializeError(
  251. f"No Embedding Model available. Please configure a valid provider "
  252. f"in the Settings -> Model Provider.")
  253. except ProviderTokenNotInitError as ex:
  254. raise ProviderNotInitializeError(ex.description)
  255. else:
  256. raise ValueError('Data source type not support')
  257. return response, 200
  258. class DatasetRelatedAppListApi(Resource):
  259. app_detail_kernel_fields = {
  260. 'id': fields.String,
  261. 'name': fields.String,
  262. 'mode': fields.String,
  263. 'icon': fields.String,
  264. 'icon_background': fields.String,
  265. }
  266. related_app_list = {
  267. 'data': fields.List(fields.Nested(app_detail_kernel_fields)),
  268. 'total': fields.Integer,
  269. }
  270. @setup_required
  271. @login_required
  272. @account_initialization_required
  273. @marshal_with(related_app_list)
  274. def get(self, dataset_id):
  275. dataset_id_str = str(dataset_id)
  276. dataset = DatasetService.get_dataset(dataset_id_str)
  277. if dataset is None:
  278. raise NotFound("Dataset not found.")
  279. try:
  280. DatasetService.check_dataset_permission(dataset, current_user)
  281. except services.errors.account.NoPermissionError as e:
  282. raise Forbidden(str(e))
  283. app_dataset_joins = DatasetService.get_related_apps(dataset.id)
  284. related_apps = []
  285. for app_dataset_join in app_dataset_joins:
  286. app_model = app_dataset_join.app
  287. if app_model:
  288. related_apps.append(app_model)
  289. return {
  290. 'data': related_apps,
  291. 'total': len(related_apps)
  292. }, 200
  293. class DatasetIndexingStatusApi(Resource):
  294. document_status_fields = {
  295. 'id': fields.String,
  296. 'indexing_status': fields.String,
  297. 'processing_started_at': TimestampField,
  298. 'parsing_completed_at': TimestampField,
  299. 'cleaning_completed_at': TimestampField,
  300. 'splitting_completed_at': TimestampField,
  301. 'completed_at': TimestampField,
  302. 'paused_at': TimestampField,
  303. 'error': fields.String,
  304. 'stopped_at': TimestampField,
  305. 'completed_segments': fields.Integer,
  306. 'total_segments': fields.Integer,
  307. }
  308. document_status_fields_list = {
  309. 'data': fields.List(fields.Nested(document_status_fields))
  310. }
  311. @setup_required
  312. @login_required
  313. @account_initialization_required
  314. def get(self, dataset_id):
  315. dataset_id = str(dataset_id)
  316. documents = db.session.query(Document).filter(
  317. Document.dataset_id == dataset_id,
  318. Document.tenant_id == current_user.current_tenant_id
  319. ).all()
  320. documents_status = []
  321. for document in documents:
  322. completed_segments = DocumentSegment.query.filter(DocumentSegment.completed_at.isnot(None),
  323. DocumentSegment.document_id == str(document.id),
  324. DocumentSegment.status != 're_segment').count()
  325. total_segments = DocumentSegment.query.filter(DocumentSegment.document_id == str(document.id),
  326. DocumentSegment.status != 're_segment').count()
  327. document.completed_segments = completed_segments
  328. document.total_segments = total_segments
  329. documents_status.append(marshal(document, self.document_status_fields))
  330. data = {
  331. 'data': documents_status
  332. }
  333. return data
  334. api.add_resource(DatasetListApi, '/datasets')
  335. api.add_resource(DatasetApi, '/datasets/<uuid:dataset_id>')
  336. api.add_resource(DatasetQueryApi, '/datasets/<uuid:dataset_id>/queries')
  337. api.add_resource(DatasetIndexingEstimateApi, '/datasets/indexing-estimate')
  338. api.add_resource(DatasetRelatedAppListApi, '/datasets/<uuid:dataset_id>/related-apps')
  339. api.add_resource(DatasetIndexingStatusApi, '/datasets/<uuid:dataset_id>/indexing-status')