enable_segment_to_index_task.py 3.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596
  1. import datetime
  2. import logging
  3. import time
  4. import click
  5. from celery import shared_task # type: ignore
  6. from werkzeug.exceptions import NotFound
  7. from core.rag.index_processor.constant.index_type import IndexType
  8. from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
  9. from core.rag.models.document import ChildDocument, Document
  10. from extensions.ext_database import db
  11. from extensions.ext_redis import redis_client
  12. from models.dataset import DocumentSegment
  13. @shared_task(queue="dataset")
  14. def enable_segment_to_index_task(segment_id: str):
  15. """
  16. Async enable segment to index
  17. :param segment_id:
  18. Usage: enable_segment_to_index_task.delay(segment_id)
  19. """
  20. logging.info(click.style("Start enable segment to index: {}".format(segment_id), fg="green"))
  21. start_at = time.perf_counter()
  22. segment = db.session.query(DocumentSegment).filter(DocumentSegment.id == segment_id).first()
  23. if not segment:
  24. raise NotFound("Segment not found")
  25. if segment.status != "completed":
  26. raise NotFound("Segment is not completed, enable action is not allowed.")
  27. indexing_cache_key = "segment_{}_indexing".format(segment.id)
  28. try:
  29. document = Document(
  30. page_content=segment.content,
  31. metadata={
  32. "doc_id": segment.index_node_id,
  33. "doc_hash": segment.index_node_hash,
  34. "document_id": segment.document_id,
  35. "dataset_id": segment.dataset_id,
  36. },
  37. )
  38. dataset = segment.dataset
  39. if not dataset:
  40. logging.info(click.style("Segment {} has no dataset, pass.".format(segment.id), fg="cyan"))
  41. return
  42. dataset_document = segment.document
  43. if not dataset_document:
  44. logging.info(click.style("Segment {} has no document, pass.".format(segment.id), fg="cyan"))
  45. return
  46. if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed":
  47. logging.info(click.style("Segment {} document status is invalid, pass.".format(segment.id), fg="cyan"))
  48. return
  49. index_processor = IndexProcessorFactory(dataset_document.doc_form).init_index_processor()
  50. if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX:
  51. child_chunks = segment.child_chunks
  52. if child_chunks:
  53. child_documents = []
  54. for child_chunk in child_chunks:
  55. child_document = ChildDocument(
  56. page_content=child_chunk.content,
  57. metadata={
  58. "doc_id": child_chunk.index_node_id,
  59. "doc_hash": child_chunk.index_node_hash,
  60. "document_id": segment.document_id,
  61. "dataset_id": segment.dataset_id,
  62. },
  63. )
  64. child_documents.append(child_document)
  65. document.children = child_documents
  66. # save vector index
  67. index_processor.load(dataset, [document])
  68. end_at = time.perf_counter()
  69. logging.info(
  70. click.style("Segment enabled to index: {} latency: {}".format(segment.id, end_at - start_at), fg="green")
  71. )
  72. except Exception as e:
  73. logging.exception("enable segment to index failed")
  74. segment.enabled = False
  75. segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
  76. segment.status = "error"
  77. segment.error = str(e)
  78. db.session.commit()
  79. finally:
  80. redis_client.delete(indexing_cache_key)