main.py 7.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218
  1. import os
  2. import re
  3. import shutil
  4. from typing import List, Optional, Iterable
  5. from .schema import Link
  6. from .util import enforce_types, TimedProgress
  7. from .index import (
  8. links_after_timestamp,
  9. load_main_index,
  10. write_main_index,
  11. )
  12. from .archive_methods import archive_link
  13. from .config import (
  14. stderr,
  15. ANSI,
  16. ONLY_NEW,
  17. OUTPUT_DIR,
  18. SOURCES_DIR,
  19. ARCHIVE_DIR,
  20. DATABASE_DIR,
  21. DATABASE_FILE,
  22. check_dependencies,
  23. check_data_folder,
  24. setup_django,
  25. )
  26. from .logs import (
  27. log_archiving_started,
  28. log_archiving_paused,
  29. log_archiving_finished,
  30. log_removal_started,
  31. log_removal_finished,
  32. log_list_started,
  33. log_list_finished,
  34. )
  35. @enforce_types
  36. def init():
  37. os.makedirs(OUTPUT_DIR, exist_ok=True)
  38. harmless_files = {'.DS_Store', '.venv', 'venv', 'virtualenv', '.virtualenv', 'sources', 'archive', 'database', 'logs', 'static'}
  39. is_empty = not len(set(os.listdir(OUTPUT_DIR)) - harmless_files)
  40. existing_index = os.path.exists(os.path.join(OUTPUT_DIR, 'index.json'))
  41. if is_empty:
  42. stderr('{green}[+] Initializing new archive directory: {}{reset}'.format(OUTPUT_DIR, **ANSI))
  43. write_main_index([], out_dir=OUTPUT_DIR, finished=True)
  44. else:
  45. if existing_index:
  46. stderr('{green}[√] You already have an ArchiveBox collection in the current folder.{reset}'.format(**ANSI))
  47. stderr(f' {OUTPUT_DIR}')
  48. stderr(f' > index.html')
  49. stderr(f' > index.json')
  50. else:
  51. stderr(
  52. ("{red}[X] This folder already has files in it. You must run init inside a completely empty directory.{reset}"
  53. "\n\n"
  54. " {lightred}Hint:{reset} To import a data folder created by an older version of ArchiveBox, \n"
  55. " just cd into the folder and run the archivebox command to pick up where you left off.\n\n"
  56. " (Always make sure your data folder is backed up first before updating ArchiveBox)"
  57. ).format(OUTPUT_DIR, **ANSI)
  58. )
  59. raise SystemExit(1)
  60. os.makedirs(SOURCES_DIR, exist_ok=True)
  61. stderr(f' > sources/')
  62. os.makedirs(ARCHIVE_DIR, exist_ok=True)
  63. stderr(f' > archive/')
  64. os.makedirs(DATABASE_DIR, exist_ok=True)
  65. setup_django()
  66. from django.core.management import call_command
  67. from django.contrib.auth.models import User
  68. stderr(f' > database/')
  69. stderr('\n{green}[+] Running Django migrations...{reset}'.format(**ANSI))
  70. call_command("makemigrations", interactive=False)
  71. call_command("migrate", interactive=False)
  72. if not User.objects.filter(is_superuser=True).exists():
  73. stderr('{green}[+] Creating admin user account...{reset}'.format(**ANSI))
  74. call_command("createsuperuser", interactive=True)
  75. stderr('\n{green}------------------------------------------------------------{reset}'.format(**ANSI))
  76. stderr('{green}[√] Done. ArchiveBox collection is set up in current folder.{reset}'.format(**ANSI))
  77. stderr(' To add new links, you can run:')
  78. stderr(" archivebox add 'https://example.com'")
  79. stderr()
  80. stderr(' For more usage and examples, run:')
  81. stderr(' archivebox help')
  82. @enforce_types
  83. def update_archive_data(import_path: Optional[str]=None, resume: Optional[float]=None, only_new: bool=False) -> List[Link]:
  84. """The main ArchiveBox entrancepoint. Everything starts here."""
  85. check_dependencies()
  86. check_data_folder()
  87. # Step 1: Load list of links from the existing index
  88. # merge in and dedupe new links from import_path
  89. all_links, new_links = load_main_index(out_dir=OUTPUT_DIR, import_path=import_path)
  90. # Step 2: Write updated index with deduped old and new links back to disk
  91. write_main_index(links=list(all_links), out_dir=OUTPUT_DIR)
  92. # Step 3: Run the archive methods for each link
  93. links = new_links if ONLY_NEW else all_links
  94. log_archiving_started(len(links), resume)
  95. idx: int = 0
  96. link: Link = None # type: ignore
  97. try:
  98. for idx, link in enumerate(links_after_timestamp(links, resume)):
  99. archive_link(link, out_dir=link.link_dir)
  100. except KeyboardInterrupt:
  101. log_archiving_paused(len(links), idx, link.timestamp if link else '0')
  102. raise SystemExit(0)
  103. except:
  104. print()
  105. raise
  106. log_archiving_finished(len(links))
  107. # Step 4: Re-write links index with updated titles, icons, and resources
  108. all_links, _ = load_main_index(out_dir=OUTPUT_DIR)
  109. write_main_index(links=list(all_links), out_dir=OUTPUT_DIR, finished=True)
  110. return all_links
  111. LINK_FILTERS = {
  112. 'exact': lambda link, pattern: (link.url == pattern) or (link.base_url == pattern),
  113. 'substring': lambda link, pattern: pattern in link.url,
  114. 'regex': lambda link, pattern: bool(re.match(pattern, link.url)),
  115. 'domain': lambda link, pattern: link.domain == pattern,
  116. }
  117. @enforce_types
  118. def link_matches_filter(link: Link, filter_patterns: List[str], filter_type: str='exact') -> bool:
  119. for pattern in filter_patterns:
  120. if LINK_FILTERS[filter_type](link, pattern):
  121. return True
  122. return False
  123. @enforce_types
  124. def list_archive_data(filter_patterns: Optional[List[str]]=None, filter_type: str='exact',
  125. after: Optional[float]=None, before: Optional[float]=None) -> Iterable[Link]:
  126. all_links, _ = load_main_index(out_dir=OUTPUT_DIR)
  127. for link in all_links:
  128. if after is not None and float(link.timestamp) < after:
  129. continue
  130. if before is not None and float(link.timestamp) > before:
  131. continue
  132. if filter_patterns:
  133. if link_matches_filter(link, filter_patterns, filter_type):
  134. yield link
  135. else:
  136. yield link
  137. @enforce_types
  138. def remove_archive_links(filter_patterns: List[str], filter_type: str='exact',
  139. after: Optional[float]=None, before: Optional[float]=None,
  140. yes: bool=False, delete: bool=False) -> List[Link]:
  141. check_dependencies()
  142. check_data_folder()
  143. log_list_started(filter_patterns, filter_type)
  144. timer = TimedProgress(360, prefix=' ')
  145. try:
  146. links = list(list_archive_data(
  147. filter_patterns=filter_patterns,
  148. filter_type=filter_type,
  149. after=after,
  150. before=before,
  151. ))
  152. finally:
  153. timer.end()
  154. if not len(links):
  155. log_removal_finished(0, 0)
  156. raise SystemExit(1)
  157. log_list_finished(links)
  158. log_removal_started(links, yes=yes, delete=delete)
  159. timer = TimedProgress(360, prefix=' ')
  160. try:
  161. to_keep = []
  162. all_links, _ = load_main_index(out_dir=OUTPUT_DIR)
  163. for link in all_links:
  164. should_remove = (
  165. (after is not None and float(link.timestamp) < after)
  166. or (before is not None and float(link.timestamp) > before)
  167. or link_matches_filter(link, filter_patterns, filter_type)
  168. )
  169. if not should_remove:
  170. to_keep.append(link)
  171. elif should_remove and delete:
  172. shutil.rmtree(link.link_dir)
  173. finally:
  174. timer.end()
  175. write_main_index(links=to_keep, out_dir=OUTPUT_DIR, finished=True)
  176. log_removal_finished(len(all_links), len(to_keep))
  177. return to_keep