datasets_segments.py 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426
  1. # -*- coding:utf-8 -*-
  2. import uuid
  3. from datetime import datetime
  4. from flask import request
  5. from flask_login import current_user
  6. from flask_restful import Resource, reqparse, marshal
  7. from werkzeug.exceptions import NotFound, Forbidden
  8. import services
  9. from controllers.console import api
  10. from controllers.console.app.error import ProviderNotInitializeError
  11. from controllers.console.datasets.error import InvalidActionError, NoFileUploadedError, TooManyFilesError
  12. from controllers.console.setup import setup_required
  13. from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check
  14. from core.model_providers.error import LLMBadRequestError, ProviderTokenNotInitError
  15. from core.model_providers.model_factory import ModelFactory
  16. from libs.login import login_required
  17. from extensions.ext_database import db
  18. from extensions.ext_redis import redis_client
  19. from fields.segment_fields import segment_fields
  20. from models.dataset import DocumentSegment
  21. from services.dataset_service import DatasetService, DocumentService, SegmentService
  22. from tasks.enable_segment_to_index_task import enable_segment_to_index_task
  23. from tasks.disable_segment_from_index_task import disable_segment_from_index_task
  24. from tasks.batch_create_segment_to_index_task import batch_create_segment_to_index_task
  25. import pandas as pd
  26. class DatasetDocumentSegmentListApi(Resource):
  27. @setup_required
  28. @login_required
  29. @account_initialization_required
  30. def get(self, dataset_id, document_id):
  31. dataset_id = str(dataset_id)
  32. document_id = str(document_id)
  33. dataset = DatasetService.get_dataset(dataset_id)
  34. if not dataset:
  35. raise NotFound('Dataset not found.')
  36. try:
  37. DatasetService.check_dataset_permission(dataset, current_user)
  38. except services.errors.account.NoPermissionError as e:
  39. raise Forbidden(str(e))
  40. document = DocumentService.get_document(dataset_id, document_id)
  41. if not document:
  42. raise NotFound('Document not found.')
  43. parser = reqparse.RequestParser()
  44. parser.add_argument('last_id', type=str, default=None, location='args')
  45. parser.add_argument('limit', type=int, default=20, location='args')
  46. parser.add_argument('status', type=str,
  47. action='append', default=[], location='args')
  48. parser.add_argument('hit_count_gte', type=int,
  49. default=None, location='args')
  50. parser.add_argument('enabled', type=str, default='all', location='args')
  51. parser.add_argument('keyword', type=str, default=None, location='args')
  52. args = parser.parse_args()
  53. last_id = args['last_id']
  54. limit = min(args['limit'], 100)
  55. status_list = args['status']
  56. hit_count_gte = args['hit_count_gte']
  57. keyword = args['keyword']
  58. query = DocumentSegment.query.filter(
  59. DocumentSegment.document_id == str(document_id),
  60. DocumentSegment.tenant_id == current_user.current_tenant_id
  61. )
  62. if last_id is not None:
  63. last_segment = DocumentSegment.query.get(str(last_id))
  64. if last_segment:
  65. query = query.filter(
  66. DocumentSegment.position > last_segment.position)
  67. else:
  68. return {'data': [], 'has_more': False, 'limit': limit}, 200
  69. if status_list:
  70. query = query.filter(DocumentSegment.status.in_(status_list))
  71. if hit_count_gte is not None:
  72. query = query.filter(DocumentSegment.hit_count >= hit_count_gte)
  73. if keyword:
  74. query = query.where(DocumentSegment.content.ilike(f'%{keyword}%'))
  75. if args['enabled'].lower() != 'all':
  76. if args['enabled'].lower() == 'true':
  77. query = query.filter(DocumentSegment.enabled == True)
  78. elif args['enabled'].lower() == 'false':
  79. query = query.filter(DocumentSegment.enabled == False)
  80. total = query.count()
  81. segments = query.order_by(DocumentSegment.position).limit(limit + 1).all()
  82. has_more = False
  83. if len(segments) > limit:
  84. has_more = True
  85. segments = segments[:-1]
  86. return {
  87. 'data': marshal(segments, segment_fields),
  88. 'doc_form': document.doc_form,
  89. 'has_more': has_more,
  90. 'limit': limit,
  91. 'total': total
  92. }, 200
  93. class DatasetDocumentSegmentApi(Resource):
  94. @setup_required
  95. @login_required
  96. @account_initialization_required
  97. @cloud_edition_billing_resource_check('vector_space')
  98. def patch(self, dataset_id, segment_id, action):
  99. dataset_id = str(dataset_id)
  100. dataset = DatasetService.get_dataset(dataset_id)
  101. if not dataset:
  102. raise NotFound('Dataset not found.')
  103. # check user's model setting
  104. DatasetService.check_dataset_model_setting(dataset)
  105. # The role of the current user in the ta table must be admin or owner
  106. if current_user.current_tenant.current_role not in ['admin', 'owner']:
  107. raise Forbidden()
  108. try:
  109. DatasetService.check_dataset_permission(dataset, current_user)
  110. except services.errors.account.NoPermissionError as e:
  111. raise Forbidden(str(e))
  112. if dataset.indexing_technique == 'high_quality':
  113. # check embedding model setting
  114. try:
  115. ModelFactory.get_embedding_model(
  116. tenant_id=current_user.current_tenant_id,
  117. model_provider_name=dataset.embedding_model_provider,
  118. model_name=dataset.embedding_model
  119. )
  120. except LLMBadRequestError:
  121. raise ProviderNotInitializeError(
  122. f"No Embedding Model available. Please configure a valid provider "
  123. f"in the Settings -> Model Provider.")
  124. except ProviderTokenNotInitError as ex:
  125. raise ProviderNotInitializeError(ex.description)
  126. segment = DocumentSegment.query.filter(
  127. DocumentSegment.id == str(segment_id),
  128. DocumentSegment.tenant_id == current_user.current_tenant_id
  129. ).first()
  130. if not segment:
  131. raise NotFound('Segment not found.')
  132. document_indexing_cache_key = 'document_{}_indexing'.format(segment.document_id)
  133. cache_result = redis_client.get(document_indexing_cache_key)
  134. if cache_result is not None:
  135. raise InvalidActionError("Document is being indexed, please try again later")
  136. indexing_cache_key = 'segment_{}_indexing'.format(segment.id)
  137. cache_result = redis_client.get(indexing_cache_key)
  138. if cache_result is not None:
  139. raise InvalidActionError("Segment is being indexed, please try again later")
  140. if action == "enable":
  141. if segment.enabled:
  142. raise InvalidActionError("Segment is already enabled.")
  143. segment.enabled = True
  144. segment.disabled_at = None
  145. segment.disabled_by = None
  146. db.session.commit()
  147. # Set cache to prevent indexing the same segment multiple times
  148. redis_client.setex(indexing_cache_key, 600, 1)
  149. enable_segment_to_index_task.delay(segment.id)
  150. return {'result': 'success'}, 200
  151. elif action == "disable":
  152. if not segment.enabled:
  153. raise InvalidActionError("Segment is already disabled.")
  154. segment.enabled = False
  155. segment.disabled_at = datetime.utcnow()
  156. segment.disabled_by = current_user.id
  157. db.session.commit()
  158. # Set cache to prevent indexing the same segment multiple times
  159. redis_client.setex(indexing_cache_key, 600, 1)
  160. disable_segment_from_index_task.delay(segment.id)
  161. return {'result': 'success'}, 200
  162. else:
  163. raise InvalidActionError()
  164. class DatasetDocumentSegmentAddApi(Resource):
  165. @setup_required
  166. @login_required
  167. @account_initialization_required
  168. @cloud_edition_billing_resource_check('vector_space')
  169. def post(self, dataset_id, document_id):
  170. # check dataset
  171. dataset_id = str(dataset_id)
  172. dataset = DatasetService.get_dataset(dataset_id)
  173. if not dataset:
  174. raise NotFound('Dataset not found.')
  175. # check document
  176. document_id = str(document_id)
  177. document = DocumentService.get_document(dataset_id, document_id)
  178. if not document:
  179. raise NotFound('Document not found.')
  180. # The role of the current user in the ta table must be admin or owner
  181. if current_user.current_tenant.current_role not in ['admin', 'owner']:
  182. raise Forbidden()
  183. # check embedding model setting
  184. if dataset.indexing_technique == 'high_quality':
  185. try:
  186. ModelFactory.get_embedding_model(
  187. tenant_id=current_user.current_tenant_id,
  188. model_provider_name=dataset.embedding_model_provider,
  189. model_name=dataset.embedding_model
  190. )
  191. except LLMBadRequestError:
  192. raise ProviderNotInitializeError(
  193. f"No Embedding Model available. Please configure a valid provider "
  194. f"in the Settings -> Model Provider.")
  195. except ProviderTokenNotInitError as ex:
  196. raise ProviderNotInitializeError(ex.description)
  197. try:
  198. DatasetService.check_dataset_permission(dataset, current_user)
  199. except services.errors.account.NoPermissionError as e:
  200. raise Forbidden(str(e))
  201. # validate args
  202. parser = reqparse.RequestParser()
  203. parser.add_argument('content', type=str, required=True, nullable=False, location='json')
  204. parser.add_argument('answer', type=str, required=False, nullable=True, location='json')
  205. parser.add_argument('keywords', type=list, required=False, nullable=True, location='json')
  206. args = parser.parse_args()
  207. SegmentService.segment_create_args_validate(args, document)
  208. segment = SegmentService.create_segment(args, document, dataset)
  209. return {
  210. 'data': marshal(segment, segment_fields),
  211. 'doc_form': document.doc_form
  212. }, 200
  213. class DatasetDocumentSegmentUpdateApi(Resource):
  214. @setup_required
  215. @login_required
  216. @account_initialization_required
  217. @cloud_edition_billing_resource_check('vector_space')
  218. def patch(self, dataset_id, document_id, segment_id):
  219. # check dataset
  220. dataset_id = str(dataset_id)
  221. dataset = DatasetService.get_dataset(dataset_id)
  222. if not dataset:
  223. raise NotFound('Dataset not found.')
  224. # check user's model setting
  225. DatasetService.check_dataset_model_setting(dataset)
  226. # check document
  227. document_id = str(document_id)
  228. document = DocumentService.get_document(dataset_id, document_id)
  229. if not document:
  230. raise NotFound('Document not found.')
  231. if dataset.indexing_technique == 'high_quality':
  232. # check embedding model setting
  233. try:
  234. ModelFactory.get_embedding_model(
  235. tenant_id=current_user.current_tenant_id,
  236. model_provider_name=dataset.embedding_model_provider,
  237. model_name=dataset.embedding_model
  238. )
  239. except LLMBadRequestError:
  240. raise ProviderNotInitializeError(
  241. f"No Embedding Model available. Please configure a valid provider "
  242. f"in the Settings -> Model Provider.")
  243. except ProviderTokenNotInitError as ex:
  244. raise ProviderNotInitializeError(ex.description)
  245. # check segment
  246. segment_id = str(segment_id)
  247. segment = DocumentSegment.query.filter(
  248. DocumentSegment.id == str(segment_id),
  249. DocumentSegment.tenant_id == current_user.current_tenant_id
  250. ).first()
  251. if not segment:
  252. raise NotFound('Segment not found.')
  253. # The role of the current user in the ta table must be admin or owner
  254. if current_user.current_tenant.current_role not in ['admin', 'owner']:
  255. raise Forbidden()
  256. try:
  257. DatasetService.check_dataset_permission(dataset, current_user)
  258. except services.errors.account.NoPermissionError as e:
  259. raise Forbidden(str(e))
  260. # validate args
  261. parser = reqparse.RequestParser()
  262. parser.add_argument('content', type=str, required=True, nullable=False, location='json')
  263. parser.add_argument('answer', type=str, required=False, nullable=True, location='json')
  264. parser.add_argument('keywords', type=list, required=False, nullable=True, location='json')
  265. args = parser.parse_args()
  266. SegmentService.segment_create_args_validate(args, document)
  267. segment = SegmentService.update_segment(args, segment, document, dataset)
  268. return {
  269. 'data': marshal(segment, segment_fields),
  270. 'doc_form': document.doc_form
  271. }, 200
  272. @setup_required
  273. @login_required
  274. @account_initialization_required
  275. def delete(self, dataset_id, document_id, segment_id):
  276. # check dataset
  277. dataset_id = str(dataset_id)
  278. dataset = DatasetService.get_dataset(dataset_id)
  279. if not dataset:
  280. raise NotFound('Dataset not found.')
  281. # check user's model setting
  282. DatasetService.check_dataset_model_setting(dataset)
  283. # check document
  284. document_id = str(document_id)
  285. document = DocumentService.get_document(dataset_id, document_id)
  286. if not document:
  287. raise NotFound('Document not found.')
  288. # check segment
  289. segment_id = str(segment_id)
  290. segment = DocumentSegment.query.filter(
  291. DocumentSegment.id == str(segment_id),
  292. DocumentSegment.tenant_id == current_user.current_tenant_id
  293. ).first()
  294. if not segment:
  295. raise NotFound('Segment not found.')
  296. # The role of the current user in the ta table must be admin or owner
  297. if current_user.current_tenant.current_role not in ['admin', 'owner']:
  298. raise Forbidden()
  299. try:
  300. DatasetService.check_dataset_permission(dataset, current_user)
  301. except services.errors.account.NoPermissionError as e:
  302. raise Forbidden(str(e))
  303. SegmentService.delete_segment(segment, document, dataset)
  304. return {'result': 'success'}, 200
  305. class DatasetDocumentSegmentBatchImportApi(Resource):
  306. @setup_required
  307. @login_required
  308. @account_initialization_required
  309. @cloud_edition_billing_resource_check('vector_space')
  310. def post(self, dataset_id, document_id):
  311. # check dataset
  312. dataset_id = str(dataset_id)
  313. dataset = DatasetService.get_dataset(dataset_id)
  314. if not dataset:
  315. raise NotFound('Dataset not found.')
  316. # check document
  317. document_id = str(document_id)
  318. document = DocumentService.get_document(dataset_id, document_id)
  319. if not document:
  320. raise NotFound('Document not found.')
  321. # get file from request
  322. file = request.files['file']
  323. # check file
  324. if 'file' not in request.files:
  325. raise NoFileUploadedError()
  326. if len(request.files) > 1:
  327. raise TooManyFilesError()
  328. # check file type
  329. if not file.filename.endswith('.csv'):
  330. raise ValueError("Invalid file type. Only CSV files are allowed")
  331. try:
  332. # Skip the first row
  333. df = pd.read_csv(file)
  334. result = []
  335. for index, row in df.iterrows():
  336. if document.doc_form == 'qa_model':
  337. data = {'content': row[0], 'answer': row[1]}
  338. else:
  339. data = {'content': row[0]}
  340. result.append(data)
  341. if len(result) == 0:
  342. raise ValueError("The CSV file is empty.")
  343. # async job
  344. job_id = str(uuid.uuid4())
  345. indexing_cache_key = 'segment_batch_import_{}'.format(str(job_id))
  346. # send batch add segments task
  347. redis_client.setnx(indexing_cache_key, 'waiting')
  348. batch_create_segment_to_index_task.delay(str(job_id), result, dataset_id, document_id,
  349. current_user.current_tenant_id, current_user.id)
  350. except Exception as e:
  351. return {'error': str(e)}, 500
  352. return {
  353. 'job_id': job_id,
  354. 'job_status': 'waiting'
  355. }, 200
  356. @setup_required
  357. @login_required
  358. @account_initialization_required
  359. def get(self, job_id):
  360. job_id = str(job_id)
  361. indexing_cache_key = 'segment_batch_import_{}'.format(job_id)
  362. cache_result = redis_client.get(indexing_cache_key)
  363. if cache_result is None:
  364. raise ValueError("The job is not exist.")
  365. return {
  366. 'job_id': job_id,
  367. 'job_status': cache_result.decode()
  368. }, 200
  369. api.add_resource(DatasetDocumentSegmentListApi,
  370. '/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments')
  371. api.add_resource(DatasetDocumentSegmentApi,
  372. '/datasets/<uuid:dataset_id>/segments/<uuid:segment_id>/<string:action>')
  373. api.add_resource(DatasetDocumentSegmentAddApi,
  374. '/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segment')
  375. api.add_resource(DatasetDocumentSegmentUpdateApi,
  376. '/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/<uuid:segment_id>')
  377. api.add_resource(DatasetDocumentSegmentBatchImportApi,
  378. '/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/batch_import',
  379. '/datasets/batch_import_status/<uuid:job_id>')