create_document_index.py 1.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748
  1. from events.dataset_event import dataset_was_deleted
  2. from events.event_handlers.document_index_event import document_index_created
  3. from tasks.clean_dataset_task import clean_dataset_task
  4. import datetime
  5. import logging
  6. import time
  7. import click
  8. from celery import shared_task
  9. from werkzeug.exceptions import NotFound
  10. from core.indexing_runner import IndexingRunner, DocumentIsPausedException
  11. from extensions.ext_database import db
  12. from models.dataset import Document
  13. @document_index_created.connect
  14. def handle(sender, **kwargs):
  15. dataset_id = sender
  16. document_ids = kwargs.get('document_ids', None)
  17. documents = []
  18. start_at = time.perf_counter()
  19. for document_id in document_ids:
  20. logging.info(click.style('Start process document: {}'.format(document_id), fg='green'))
  21. document = db.session.query(Document).filter(
  22. Document.id == document_id,
  23. Document.dataset_id == dataset_id
  24. ).first()
  25. if not document:
  26. raise NotFound('Document not found')
  27. document.indexing_status = 'parsing'
  28. document.processing_started_at = datetime.datetime.utcnow()
  29. documents.append(document)
  30. db.session.add(document)
  31. db.session.commit()
  32. try:
  33. indexing_runner = IndexingRunner()
  34. indexing_runner.run(documents)
  35. end_at = time.perf_counter()
  36. logging.info(click.style('Processed dataset: {} latency: {}'.format(dataset_id, end_at - start_at), fg='green'))
  37. except DocumentIsPausedException as ex:
  38. logging.info(click.style(str(ex), fg='yellow'))
  39. except Exception:
  40. pass