statemachines.py 1.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748
  1. __package__ = 'archivebox.crawls'
  2. from statemachine import State, StateMachine
  3. from crawls.models import Crawl
  4. # State Machine Definitions
  5. #################################################
  6. class CrawlMachine(StateMachine, strict_states=True):
  7. """State machine for managing Crawl lifecycle."""
  8. model: Crawl
  9. # States
  10. queued = State(value=Crawl.StatusChoices.QUEUED, initial=True)
  11. started = State(value=Crawl.StatusChoices.STARTED)
  12. sealed = State(value=Crawl.StatusChoices.SEALED, final=True)
  13. # Tick Event
  14. tick = (
  15. queued.to.itself(unless='can_start', internal=True) |
  16. queued.to(started, cond='can_start') |
  17. started.to.itself(unless='is_finished', internal=True) |
  18. started.to(sealed, cond='is_finished')
  19. )
  20. def __init__(self, crawl, *args, **kwargs):
  21. self.crawl = crawl
  22. super().__init__(crawl, *args, **kwargs)
  23. def can_start(self) -> bool:
  24. return self.crawl.seed and self.crawl.seed.uri
  25. def is_finished(self) -> bool:
  26. return not self.crawl.has_pending_archiveresults()
  27. def on_started(self):
  28. self.crawl.create_root_snapshot()
  29. self.crawl.bump_retry_at(seconds=10)
  30. self.crawl.save()
  31. def on_sealed(self):
  32. self.crawl.retry_at = None
  33. self.crawl.save()