main.py 39 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114
  1. __package__ = 'archivebox'
  2. import re
  3. import os
  4. import sys
  5. import shutil
  6. from typing import Dict, List, Optional, Set, Tuple, Iterable, IO
  7. from crontab import CronTab, CronSlices
  8. from .cli import (
  9. list_subcommands,
  10. run_subcommand,
  11. display_first,
  12. meta_cmds,
  13. main_cmds,
  14. archive_cmds,
  15. )
  16. from .index.schema import Link
  17. from .util import (
  18. enforce_types,
  19. TimedProgress,
  20. get_dir_size,
  21. human_readable_size,
  22. save_stdin_to_sources,
  23. save_file_to_sources,
  24. links_to_csv,
  25. to_json,
  26. folders_to_str,
  27. )
  28. from .index import (
  29. links_after_timestamp,
  30. load_main_index,
  31. import_new_links,
  32. write_main_index,
  33. link_matches_filter,
  34. get_indexed_folders,
  35. get_archived_folders,
  36. get_unarchived_folders,
  37. get_present_folders,
  38. get_valid_folders,
  39. get_invalid_folders,
  40. get_duplicate_folders,
  41. get_orphaned_folders,
  42. get_corrupted_folders,
  43. get_unrecognized_folders,
  44. fix_invalid_folder_locations,
  45. )
  46. from .index.json import (
  47. parse_json_main_index,
  48. parse_json_links_details,
  49. )
  50. from .index.sql import parse_sql_main_index, get_admins, apply_migrations
  51. from .index.html import parse_html_main_index
  52. from .extractors import archive_link
  53. from .config import (
  54. stderr,
  55. ConfigDict,
  56. ANSI,
  57. IS_TTY,
  58. USER,
  59. ARCHIVEBOX_BINARY,
  60. ONLY_NEW,
  61. OUTPUT_DIR,
  62. SOURCES_DIR,
  63. ARCHIVE_DIR,
  64. LOGS_DIR,
  65. CONFIG_FILE,
  66. ARCHIVE_DIR_NAME,
  67. SOURCES_DIR_NAME,
  68. LOGS_DIR_NAME,
  69. STATIC_DIR_NAME,
  70. JSON_INDEX_FILENAME,
  71. HTML_INDEX_FILENAME,
  72. SQL_INDEX_FILENAME,
  73. ROBOTS_TXT_FILENAME,
  74. FAVICON_FILENAME,
  75. check_dependencies,
  76. check_data_folder,
  77. write_config_file,
  78. setup_django,
  79. VERSION,
  80. CODE_LOCATIONS,
  81. EXTERNAL_LOCATIONS,
  82. DATA_LOCATIONS,
  83. DEPENDENCIES,
  84. load_all_config,
  85. CONFIG,
  86. USER_CONFIG,
  87. get_real_name,
  88. )
  89. from .cli.logging import (
  90. log_archiving_started,
  91. log_archiving_paused,
  92. log_archiving_finished,
  93. log_removal_started,
  94. log_removal_finished,
  95. log_list_started,
  96. log_list_finished,
  97. )
  98. ALLOWED_IN_OUTPUT_DIR = {
  99. '.DS_Store',
  100. '.venv',
  101. 'venv',
  102. 'virtualenv',
  103. '.virtualenv',
  104. ARCHIVE_DIR_NAME,
  105. SOURCES_DIR_NAME,
  106. LOGS_DIR_NAME,
  107. STATIC_DIR_NAME,
  108. SQL_INDEX_FILENAME,
  109. JSON_INDEX_FILENAME,
  110. HTML_INDEX_FILENAME,
  111. ROBOTS_TXT_FILENAME,
  112. FAVICON_FILENAME,
  113. }
  114. @enforce_types
  115. def help(out_dir: str=OUTPUT_DIR) -> None:
  116. """Print the ArchiveBox help message and usage"""
  117. all_subcommands = list_subcommands()
  118. COMMANDS_HELP_TEXT = '\n '.join(
  119. f'{cmd.ljust(20)} {summary}'
  120. for cmd, summary in all_subcommands.items()
  121. if cmd in meta_cmds
  122. ) + '\n\n ' + '\n '.join(
  123. f'{cmd.ljust(20)} {summary}'
  124. for cmd, summary in all_subcommands.items()
  125. if cmd in main_cmds
  126. ) + '\n\n ' + '\n '.join(
  127. f'{cmd.ljust(20)} {summary}'
  128. for cmd, summary in all_subcommands.items()
  129. if cmd in archive_cmds
  130. ) + '\n\n ' + '\n '.join(
  131. f'{cmd.ljust(20)} {summary}'
  132. for cmd, summary in all_subcommands.items()
  133. if cmd not in display_first
  134. )
  135. if os.path.exists(os.path.join(out_dir, JSON_INDEX_FILENAME)):
  136. print('''{green}ArchiveBox v{}: The self-hosted internet archive.{reset}
  137. {lightred}Active data directory:{reset}
  138. {}
  139. {lightred}Usage:{reset}
  140. archivebox [command] [--help] [--version] [...args]
  141. {lightred}Commands:{reset}
  142. {}
  143. {lightred}Example Use:{reset}
  144. mkdir my-archive; cd my-archive/
  145. archivebox init
  146. archivebox info
  147. archivebox add https://example.com/some/page
  148. archivebox add --depth=1 ~/Downloads/bookmarks_export.html
  149. archivebox list --sort=timestamp --csv=timestamp,url,is_archived
  150. archivebox schedule --every=week https://example.com/some/feed.rss
  151. archivebox update --resume=15109948213.123
  152. {lightred}Documentation:{reset}
  153. https://github.com/pirate/ArchiveBox/wiki
  154. '''.format(VERSION, out_dir, COMMANDS_HELP_TEXT, **ANSI))
  155. else:
  156. print('{green}Welcome to ArchiveBox v{}!{reset}'.format(VERSION, **ANSI))
  157. print()
  158. print('To import an existing archive (from a previous version of ArchiveBox):')
  159. print(' 1. cd into your data dir OUTPUT_DIR (usually ArchiveBox/output) and run:')
  160. print(' 2. archivebox init')
  161. print()
  162. print('To start a new archive:')
  163. print(' 1. Create an empty directory, then cd into it and run:')
  164. print(' 2. archivebox init')
  165. print()
  166. print('For more information, see the documentation here:')
  167. print(' https://github.com/pirate/ArchiveBox/wiki')
  168. @enforce_types
  169. def version(quiet: bool=False,
  170. out_dir: str=OUTPUT_DIR) -> None:
  171. """Print the ArchiveBox version and dependency information"""
  172. if quiet:
  173. print(VERSION)
  174. else:
  175. print('ArchiveBox v{}'.format(VERSION))
  176. print()
  177. print('{white}[i] Dependency versions:{reset}'.format(**ANSI))
  178. for name, dependency in DEPENDENCIES.items():
  179. print(printable_dependency_version(name, dependency))
  180. print()
  181. print('{white}[i] Code locations:{reset}'.format(**ANSI))
  182. for name, folder in CODE_LOCATIONS.items():
  183. print(printable_folder_status(name, folder))
  184. print()
  185. print('{white}[i] External locations:{reset}'.format(**ANSI))
  186. for name, folder in EXTERNAL_LOCATIONS.items():
  187. print(printable_folder_status(name, folder))
  188. print()
  189. print('{white}[i] Data locations:{reset}'.format(**ANSI))
  190. for name, folder in DATA_LOCATIONS.items():
  191. print(printable_folder_status(name, folder))
  192. print()
  193. check_dependencies()
  194. @enforce_types
  195. def run(subcommand: str,
  196. subcommand_args: Optional[List[str]],
  197. stdin: Optional[IO]=None,
  198. out_dir: str=OUTPUT_DIR) -> None:
  199. """Run a given ArchiveBox subcommand with the given list of args"""
  200. run_subcommand(
  201. subcommand=subcommand,
  202. subcommand_args=subcommand_args,
  203. stdin=stdin,
  204. pwd=out_dir,
  205. )
  206. @enforce_types
  207. def init(out_dir: str=OUTPUT_DIR) -> None:
  208. """Initialize a new ArchiveBox collection in the current directory"""
  209. os.makedirs(out_dir, exist_ok=True)
  210. is_empty = not len(set(os.listdir(out_dir)) - ALLOWED_IN_OUTPUT_DIR)
  211. existing_index = os.path.exists(os.path.join(out_dir, JSON_INDEX_FILENAME))
  212. if is_empty and not existing_index:
  213. print('{green}[+] Initializing a new ArchiveBox collection in this folder...{reset}'.format(**ANSI))
  214. print(f' {out_dir}')
  215. print('{green}------------------------------------------------------------------{reset}'.format(**ANSI))
  216. elif existing_index:
  217. print('{green}[*] Updating existing ArchiveBox collection in this folder...{reset}'.format(**ANSI))
  218. print(f' {out_dir}')
  219. print('{green}------------------------------------------------------------------{reset}'.format(**ANSI))
  220. else:
  221. stderr(
  222. ("{red}[X] This folder appears to already have files in it, but no index.json is present.{reset}\n\n"
  223. " You must run init in a completely empty directory, or an existing data folder.\n\n"
  224. " {lightred}Hint:{reset} To import an existing data folder make sure to cd into the folder first, \n"
  225. " then run and run 'archivebox init' to pick up where you left off.\n\n"
  226. " (Always make sure your data folder is backed up first before updating ArchiveBox)"
  227. ).format(out_dir, **ANSI)
  228. )
  229. raise SystemExit(1)
  230. if existing_index:
  231. print('\n{green}[*] Verifying archive folder structure...{reset}'.format(**ANSI))
  232. else:
  233. print('\n{green}[+] Building archive folder structure...{reset}'.format(**ANSI))
  234. os.makedirs(SOURCES_DIR, exist_ok=True)
  235. print(f' √ {SOURCES_DIR}')
  236. os.makedirs(ARCHIVE_DIR, exist_ok=True)
  237. print(f' √ {ARCHIVE_DIR}')
  238. os.makedirs(LOGS_DIR, exist_ok=True)
  239. print(f' √ {LOGS_DIR}')
  240. write_config_file({}, out_dir=out_dir)
  241. print(f' √ {CONFIG_FILE}')
  242. if os.path.exists(os.path.join(out_dir, SQL_INDEX_FILENAME)):
  243. print('\n{green}[*] Verifying main SQL index and running migrations...{reset}'.format(**ANSI))
  244. else:
  245. print('\n{green}[+] Building main SQL index and running migrations...{reset}'.format(**ANSI))
  246. setup_django(out_dir, check_db=False)
  247. from django.conf import settings
  248. assert settings.DATABASE_FILE == os.path.join(out_dir, SQL_INDEX_FILENAME)
  249. print(f' √ {settings.DATABASE_FILE}')
  250. print()
  251. for migration_line in apply_migrations(out_dir):
  252. print(f' {migration_line}')
  253. assert os.path.exists(settings.DATABASE_FILE)
  254. # from django.contrib.auth.models import User
  255. # if IS_TTY and not User.objects.filter(is_superuser=True).exists():
  256. # print('{green}[+] Creating admin user account...{reset}'.format(**ANSI))
  257. # call_command("createsuperuser", interactive=True)
  258. print()
  259. print('{green}[*] Collecting links from any existing indexes and archive folders...{reset}'.format(**ANSI))
  260. all_links: Dict[str, Link] = {}
  261. if existing_index:
  262. all_links = {
  263. link.url: link
  264. for link in load_main_index(out_dir=out_dir, warn=False)
  265. }
  266. print(' √ Loaded {} links from existing main index.'.format(len(all_links)))
  267. # Links in data folders that dont match their timestamp
  268. fixed, cant_fix = fix_invalid_folder_locations(out_dir=out_dir)
  269. if fixed:
  270. print(' {lightyellow}√ Fixed {} data directory locations that didn\'t match their link timestamps.{reset}'.format(len(fixed), **ANSI))
  271. if cant_fix:
  272. print(' {lightyellow}! Could not fix {} data directory locations due to conflicts with existing folders.{reset}'.format(len(cant_fix), **ANSI))
  273. # Links in JSON index but not in main index
  274. orphaned_json_links = {
  275. link.url: link
  276. for link in parse_json_main_index(out_dir)
  277. if link.url not in all_links
  278. }
  279. if orphaned_json_links:
  280. all_links.update(orphaned_json_links)
  281. print(' {lightyellow}√ Added {} orphaned links from existing JSON index...{reset}'.format(len(orphaned_json_links), **ANSI))
  282. # Links in SQL index but not in main index
  283. orphaned_sql_links = {
  284. link.url: link
  285. for link in parse_sql_main_index(out_dir)
  286. if link.url not in all_links
  287. }
  288. if orphaned_sql_links:
  289. all_links.update(orphaned_sql_links)
  290. print(' {lightyellow}√ Added {} orphaned links from existing SQL index...{reset}'.format(len(orphaned_sql_links), **ANSI))
  291. # Links in data dir indexes but not in main index
  292. orphaned_data_dir_links = {
  293. link.url: link
  294. for link in parse_json_links_details(out_dir)
  295. if link.url not in all_links
  296. }
  297. if orphaned_data_dir_links:
  298. all_links.update(orphaned_data_dir_links)
  299. print(' {lightyellow}√ Added {} orphaned links from existing archive directories.{reset}'.format(len(orphaned_data_dir_links), **ANSI))
  300. # Links in invalid/duplicate data dirs
  301. invalid_folders = {
  302. folder: link
  303. for folder, link in get_invalid_folders(all_links.values(), out_dir=out_dir).items()
  304. }
  305. if invalid_folders:
  306. print(' {lightyellow}! Skipped adding {} invalid link data directories.{reset}'.format(len(invalid_folders), **ANSI))
  307. print(' X ' + '\n X '.join(f'{folder} {link}' for folder, link in invalid_folders.items()))
  308. print()
  309. print(' {lightred}Hint:{reset} For more information about the link data directories that were skipped, run:'.format(**ANSI))
  310. print(' archivebox info')
  311. print(' archivebox list --status=invalid')
  312. write_main_index(list(all_links.values()), out_dir=out_dir)
  313. print('\n{green}------------------------------------------------------------------{reset}'.format(**ANSI))
  314. if existing_index:
  315. print('{green}[√] Done. Verified and updated the existing ArchiveBox collection.{reset}'.format(**ANSI))
  316. else:
  317. print('{green}[√] Done. A new ArchiveBox collection was initialized ({} links).{reset}'.format(len(all_links), **ANSI))
  318. print()
  319. print(' To view your archive index, open:')
  320. print(' {}'.format(os.path.join(out_dir, HTML_INDEX_FILENAME)))
  321. print()
  322. print(' To add new links, you can run:')
  323. print(" archivebox add 'https://example.com'")
  324. print()
  325. print(' For more usage and examples, run:')
  326. print(' archivebox help')
  327. @enforce_types
  328. def info(out_dir: str=OUTPUT_DIR) -> None:
  329. """Print out some info and statistics about the archive collection"""
  330. check_data_folder(out_dir=out_dir)
  331. print('{green}[*] Scanning archive collection main index...{reset}'.format(**ANSI))
  332. print(f' {out_dir}/*')
  333. num_bytes, num_dirs, num_files = get_dir_size(out_dir, recursive=False, pattern='index.')
  334. size = human_readable_size(num_bytes)
  335. print(f' Size: {size} across {num_files} files')
  336. print()
  337. links = list(load_main_index(out_dir=out_dir))
  338. num_json_links = len(links)
  339. num_sql_links = sum(1 for link in parse_sql_main_index(out_dir=out_dir))
  340. num_html_links = sum(1 for url in parse_html_main_index(out_dir=out_dir))
  341. num_link_details = sum(1 for link in parse_json_links_details(out_dir=out_dir))
  342. users = get_admins().values_list('username', flat=True)
  343. print(f' > JSON Main Index: {num_json_links} links'.ljust(36), f'(found in {JSON_INDEX_FILENAME})')
  344. print(f' > SQL Main Index: {num_sql_links} links'.ljust(36), f'(found in {SQL_INDEX_FILENAME})')
  345. print(f' > HTML Main Index: {num_html_links} links'.ljust(36), f'(found in {HTML_INDEX_FILENAME})')
  346. print(f' > JSON Link Details: {num_link_details} links'.ljust(36), f'(found in {ARCHIVE_DIR_NAME}/*/index.json)')
  347. print(f' > Admin: {len(users)} users {", ".join(users)}'.ljust(36), f'(found in {SQL_INDEX_FILENAME})')
  348. if num_html_links != len(links) or num_sql_links != len(links):
  349. print()
  350. print(' {lightred}Hint:{reset} You can fix index count differences automatically by running:'.format(**ANSI))
  351. print(' archivebox init')
  352. if not users:
  353. print()
  354. print(' {lightred}Hint:{reset} You can create an admin user by running:'.format(**ANSI))
  355. print(' archivebox manage createsuperuser')
  356. print()
  357. print('{green}[*] Scanning archive collection link data directories...{reset}'.format(**ANSI))
  358. print(f' {ARCHIVE_DIR}/*')
  359. num_bytes, num_dirs, num_files = get_dir_size(ARCHIVE_DIR)
  360. size = human_readable_size(num_bytes)
  361. print(f' Size: {size} across {num_files} files in {num_dirs} directories')
  362. print()
  363. num_indexed = len(get_indexed_folders(links, out_dir=out_dir))
  364. num_archived = len(get_archived_folders(links, out_dir=out_dir))
  365. num_unarchived = len(get_unarchived_folders(links, out_dir=out_dir))
  366. print(f' > indexed: {num_indexed}'.ljust(36), f'({get_indexed_folders.__doc__})')
  367. print(f' > archived: {num_archived}'.ljust(36), f'({get_archived_folders.__doc__})')
  368. print(f' > unarchived: {num_unarchived}'.ljust(36), f'({get_unarchived_folders.__doc__})')
  369. num_present = len(get_present_folders(links, out_dir=out_dir))
  370. num_valid = len(get_valid_folders(links, out_dir=out_dir))
  371. print()
  372. print(f' > present: {num_present}'.ljust(36), f'({get_present_folders.__doc__})')
  373. print(f' > valid: {num_valid}'.ljust(36), f'({get_valid_folders.__doc__})')
  374. duplicate = get_duplicate_folders(links, out_dir=out_dir)
  375. orphaned = get_orphaned_folders(links, out_dir=out_dir)
  376. corrupted = get_corrupted_folders(links, out_dir=out_dir)
  377. unrecognized = get_unrecognized_folders(links, out_dir=out_dir)
  378. num_invalid = len({**duplicate, **orphaned, **corrupted, **unrecognized})
  379. print(f' > invalid: {num_invalid}'.ljust(36), f'({get_invalid_folders.__doc__})')
  380. print(f' > duplicate: {len(duplicate)}'.ljust(36), f'({get_duplicate_folders.__doc__})')
  381. print(f' > orphaned: {len(orphaned)}'.ljust(36), f'({get_orphaned_folders.__doc__})')
  382. print(f' > corrupted: {len(corrupted)}'.ljust(36), f'({get_corrupted_folders.__doc__})')
  383. print(f' > unrecognized: {len(unrecognized)}'.ljust(36), f'({get_unrecognized_folders.__doc__})')
  384. if num_indexed:
  385. print()
  386. print(' {lightred}Hint:{reset} You can list link data directories by status like so:'.format(**ANSI))
  387. print(' archivebox list --status=<status> (e.g. indexed, corrupted, archived, etc.)')
  388. if orphaned:
  389. print()
  390. print(' {lightred}Hint:{reset} To automatically import orphaned data directories into the main index, run:'.format(**ANSI))
  391. print(' archivebox init')
  392. if num_invalid:
  393. print()
  394. print(' {lightred}Hint:{reset} You may need to manually remove or fix some invalid data directories, afterwards make sure to run:'.format(**ANSI))
  395. print(' archivebox init')
  396. print()
  397. @enforce_types
  398. def add(import_str: Optional[str]=None,
  399. import_path: Optional[str]=None,
  400. update_all: bool=not ONLY_NEW,
  401. index_only: bool=False,
  402. out_dir: str=OUTPUT_DIR) -> List[Link]:
  403. """Add a new URL or list of URLs to your archive"""
  404. check_data_folder(out_dir=out_dir)
  405. if import_str and import_path:
  406. stderr(
  407. '[X] You should pass either an import path as an argument, '
  408. 'or pass a list of links via stdin, but not both.\n',
  409. color='red',
  410. )
  411. raise SystemExit(2)
  412. elif import_str:
  413. import_path = save_stdin_to_sources(import_str, out_dir=out_dir)
  414. else:
  415. import_path = save_file_to_sources(import_path, out_dir=out_dir)
  416. check_dependencies()
  417. # Step 1: Load list of links from the existing index
  418. # merge in and dedupe new links from import_path
  419. all_links: List[Link] = []
  420. new_links: List[Link] = []
  421. all_links = load_main_index(out_dir=out_dir)
  422. if import_path:
  423. all_links, new_links = import_new_links(all_links, import_path, out_dir=out_dir)
  424. # Step 2: Write updated index with deduped old and new links back to disk
  425. write_main_index(links=all_links, out_dir=out_dir)
  426. if index_only:
  427. return all_links
  428. # Step 3: Run the archive methods for each link
  429. links = all_links if update_all else new_links
  430. log_archiving_started(len(links))
  431. idx: int = 0
  432. link: Link = None # type: ignore
  433. try:
  434. for idx, link in enumerate(links):
  435. archive_link(link, out_dir=link.link_dir)
  436. except KeyboardInterrupt:
  437. log_archiving_paused(len(links), idx, link.timestamp if link else '0')
  438. raise SystemExit(0)
  439. except:
  440. print()
  441. raise
  442. log_archiving_finished(len(links))
  443. # Step 4: Re-write links index with updated titles, icons, and resources
  444. all_links = load_main_index(out_dir=out_dir)
  445. write_main_index(links=list(all_links), out_dir=out_dir, finished=True)
  446. return all_links
  447. @enforce_types
  448. def remove(filter_str: Optional[str]=None,
  449. filter_patterns: Optional[List[str]]=None,
  450. filter_type: str='exact',
  451. after: Optional[float]=None,
  452. before: Optional[float]=None,
  453. yes: bool=False,
  454. delete: bool=False,
  455. out_dir: str=OUTPUT_DIR) -> List[Link]:
  456. """Remove the specified URLs from the archive"""
  457. check_data_folder(out_dir=out_dir)
  458. if filter_str and filter_patterns:
  459. stderr(
  460. '[X] You should pass either a pattern as an argument, '
  461. 'or pass a list of patterns via stdin, but not both.\n',
  462. color='red',
  463. )
  464. raise SystemExit(2)
  465. elif not (filter_str or filter_patterns):
  466. stderr(
  467. '[X] You should pass either a pattern as an argument, '
  468. 'or pass a list of patterns via stdin.',
  469. color='red',
  470. )
  471. stderr()
  472. stderr(' {lightred}Hint:{reset} To remove all urls you can run:'.format(**ANSI))
  473. stderr(" archivebox remove --filter-type=regex '.*'")
  474. stderr()
  475. raise SystemExit(2)
  476. elif filter_str:
  477. filter_patterns = [ptn.strip() for ptn in filter_str.split('\n')]
  478. log_list_started(filter_patterns, filter_type)
  479. timer = TimedProgress(360, prefix=' ')
  480. try:
  481. links = list(list_links(
  482. filter_patterns=filter_patterns,
  483. filter_type=filter_type,
  484. after=after,
  485. before=before,
  486. ))
  487. finally:
  488. timer.end()
  489. if not len(links):
  490. log_removal_finished(0, 0)
  491. raise SystemExit(1)
  492. log_list_finished(links)
  493. log_removal_started(links, yes=yes, delete=delete)
  494. timer = TimedProgress(360, prefix=' ')
  495. try:
  496. to_keep = []
  497. all_links = load_main_index(out_dir=out_dir)
  498. for link in all_links:
  499. should_remove = (
  500. (after is not None and float(link.timestamp) < after)
  501. or (before is not None and float(link.timestamp) > before)
  502. or link_matches_filter(link, filter_patterns, filter_type)
  503. )
  504. if not should_remove:
  505. to_keep.append(link)
  506. elif should_remove and delete:
  507. shutil.rmtree(link.link_dir, ignore_errors=True)
  508. finally:
  509. timer.end()
  510. write_main_index(links=to_keep, out_dir=out_dir, finished=True)
  511. log_removal_finished(len(all_links), len(to_keep))
  512. return to_keep
  513. @enforce_types
  514. def update(resume: Optional[float]=None,
  515. only_new: bool=ONLY_NEW,
  516. index_only: bool=False,
  517. overwrite: bool=False,
  518. filter_patterns_str: Optional[str]=None,
  519. filter_patterns: Optional[List[str]]=None,
  520. filter_type: Optional[str]=None,
  521. status: Optional[str]=None,
  522. after: Optional[str]=None,
  523. before: Optional[str]=None,
  524. out_dir: str=OUTPUT_DIR) -> List[Link]:
  525. """Import any new links from subscriptions and retry any previously failed/skipped links"""
  526. check_dependencies()
  527. check_data_folder(out_dir=out_dir)
  528. # Step 1: Load list of links from the existing index
  529. # merge in and dedupe new links from import_path
  530. all_links: List[Link] = []
  531. new_links: List[Link] = []
  532. all_links = load_main_index(out_dir=out_dir)
  533. # Step 2: Write updated index with deduped old and new links back to disk
  534. write_main_index(links=list(all_links), out_dir=out_dir)
  535. # Step 3: Filter for selected_links
  536. matching_links = list_links(
  537. filter_patterns=filter_patterns,
  538. filter_type=filter_type,
  539. before=before,
  540. after=after,
  541. )
  542. matching_folders = list_folders(
  543. links=list(matching_links),
  544. status=status,
  545. out_dir=out_dir,
  546. )
  547. all_links = [link for link in matching_folders.values() if link]
  548. if index_only:
  549. return all_links
  550. # Step 3: Run the archive methods for each link
  551. links = new_links if only_new else all_links
  552. log_archiving_started(len(links), resume)
  553. idx: int = 0
  554. link: Link = None # type: ignore
  555. try:
  556. for idx, link in enumerate(links_after_timestamp(links, resume)):
  557. archive_link(link, overwrite=overwrite, out_dir=link.link_dir)
  558. except KeyboardInterrupt:
  559. log_archiving_paused(len(links), idx, link.timestamp if link else '0')
  560. raise SystemExit(0)
  561. except:
  562. print()
  563. raise
  564. log_archiving_finished(len(links))
  565. # Step 4: Re-write links index with updated titles, icons, and resources
  566. all_links = load_main_index(out_dir=out_dir)
  567. write_main_index(links=list(all_links), out_dir=out_dir, finished=True)
  568. return all_links
  569. @enforce_types
  570. def list_all(filter_patterns_str: Optional[str]=None,
  571. filter_patterns: Optional[List[str]]=None,
  572. filter_type: str='exact',
  573. status: Optional[str]=None,
  574. after: Optional[float]=None,
  575. before: Optional[float]=None,
  576. sort: Optional[str]=None,
  577. csv: Optional[str]=None,
  578. json: bool=False,
  579. out_dir: str=OUTPUT_DIR) -> Iterable[Link]:
  580. """List, filter, and export information about archive entries"""
  581. check_data_folder(out_dir=out_dir)
  582. if filter_patterns and filter_patterns_str:
  583. stderr(
  584. '[X] You should either pass filter patterns as an arguments '
  585. 'or via stdin, but not both.\n',
  586. color='red',
  587. )
  588. raise SystemExit(2)
  589. elif filter_patterns_str:
  590. filter_patterns = filter_patterns_str.split('\n')
  591. links = list_links(
  592. filter_patterns=filter_patterns,
  593. filter_type=filter_type,
  594. before=before,
  595. after=after,
  596. )
  597. if sort:
  598. links = sorted(links, key=lambda link: getattr(link, sort))
  599. folders = list_folders(
  600. links=list(links),
  601. status=status,
  602. out_dir=out_dir,
  603. )
  604. if csv:
  605. print(links_to_csv(folders.values(), csv_cols=csv.split(','), header=True))
  606. elif json:
  607. print(to_json(folders.values(), indent=4, sort_keys=True))
  608. else:
  609. print(folders_to_str(folders))
  610. raise SystemExit(not folders)
  611. @enforce_types
  612. def list_links(filter_patterns: Optional[List[str]]=None,
  613. filter_type: str='exact',
  614. after: Optional[float]=None,
  615. before: Optional[float]=None,
  616. out_dir: str=OUTPUT_DIR) -> Iterable[Link]:
  617. check_data_folder(out_dir=out_dir)
  618. all_links = load_main_index(out_dir=out_dir)
  619. for link in all_links:
  620. if after is not None and float(link.timestamp) < after:
  621. continue
  622. if before is not None and float(link.timestamp) > before:
  623. continue
  624. if filter_patterns:
  625. if link_matches_filter(link, filter_patterns, filter_type):
  626. yield link
  627. else:
  628. yield link
  629. @enforce_types
  630. def list_folders(links: List[Link],
  631. status: str,
  632. out_dir: str=OUTPUT_DIR) -> Dict[str, Optional[Link]]:
  633. check_data_folder()
  634. if status == 'indexed':
  635. return get_indexed_folders(links, out_dir=out_dir)
  636. elif status == 'archived':
  637. return get_archived_folders(links, out_dir=out_dir)
  638. elif status == 'unarchived':
  639. return get_unarchived_folders(links, out_dir=out_dir)
  640. elif status == 'present':
  641. return get_present_folders(links, out_dir=out_dir)
  642. elif status == 'valid':
  643. return get_valid_folders(links, out_dir=out_dir)
  644. elif status == 'invalid':
  645. return get_invalid_folders(links, out_dir=out_dir)
  646. elif status == 'duplicate':
  647. return get_duplicate_folders(links, out_dir=out_dir)
  648. elif status == 'orphaned':
  649. return get_orphaned_folders(links, out_dir=out_dir)
  650. elif status == 'corrupted':
  651. return get_corrupted_folders(links, out_dir=out_dir)
  652. elif status == 'unrecognized':
  653. return get_unrecognized_folders(links, out_dir=out_dir)
  654. raise ValueError('Status not recognized.')
  655. @enforce_types
  656. def config(config_options_str: Optional[str]=None,
  657. config_options: Optional[List[str]]=None,
  658. get: bool=False,
  659. set: bool=False,
  660. reset: bool=False,
  661. out_dir: str=OUTPUT_DIR) -> None:
  662. """Get and set your ArchiveBox project configuration values"""
  663. check_data_folder(out_dir=out_dir)
  664. if config_options and config_options_str:
  665. stderr(
  666. '[X] You should either pass config values as an arguments '
  667. 'or via stdin, but not both.\n',
  668. color='red',
  669. )
  670. raise SystemExit(2)
  671. elif config_options_str:
  672. config_options = stdin_raw_text.split('\n')
  673. config_options = config_options or []
  674. no_args = not (get or set or reset or config_options)
  675. matching_config: ConfigDict = {}
  676. if get or no_args:
  677. if config_options:
  678. config_options = [get_real_name(key) for key in config_options]
  679. matching_config = {key: CONFIG[key] for key in config_options if key in CONFIG}
  680. failed_config = [key for key in config_options if key not in CONFIG]
  681. if failed_config:
  682. stderr()
  683. stderr('[X] These options failed to get', color='red')
  684. stderr(' {}'.format('\n '.join(config_options)))
  685. raise SystemExit(1)
  686. else:
  687. matching_config = CONFIG
  688. print(printable_config(matching_config))
  689. raise SystemExit(not matching_config)
  690. elif set:
  691. new_config = {}
  692. failed_options = []
  693. for line in config_options:
  694. if line.startswith('#') or not line.strip():
  695. continue
  696. if '=' not in line:
  697. stderr('[X] Config KEY=VALUE must have an = sign in it', color='red')
  698. stderr(f' {line}')
  699. raise SystemExit(2)
  700. raw_key, val = line.split('=')
  701. raw_key = raw_key.upper().strip()
  702. key = get_real_name(raw_key)
  703. if key != raw_key:
  704. stderr(f'[i] Note: The config option {raw_key} has been renamed to {key}, please use the new name going forwards.', color='lightyellow')
  705. if key in CONFIG:
  706. new_config[key] = val.strip()
  707. else:
  708. failed_options.append(line)
  709. if new_config:
  710. before = CONFIG
  711. matching_config = write_config_file(new_config, out_dir=OUTPUT_DIR)
  712. after = load_all_config()
  713. print(printable_config(matching_config))
  714. side_effect_changes: ConfigDict = {}
  715. for key, val in after.items():
  716. if key in USER_CONFIG and (before[key] != after[key]) and (key not in matching_config):
  717. side_effect_changes[key] = after[key]
  718. if side_effect_changes:
  719. stderr()
  720. stderr('[i] Note: This change also affected these other options that depended on it:', color='lightyellow')
  721. print(' {}'.format(printable_config(side_effect_changes, prefix=' ')))
  722. if failed_options:
  723. stderr()
  724. stderr('[X] These options failed to set:', color='red')
  725. stderr(' {}'.format('\n '.join(failed_options)))
  726. raise SystemExit(bool(failed_options))
  727. elif reset:
  728. stderr('[X] This command is not implemented yet.', color='red')
  729. stderr(' Please manually remove the relevant lines from your config file:')
  730. stderr(f' {CONFIG_FILE}')
  731. raise SystemExit(2)
  732. else:
  733. stderr('[X] You must pass either --get or --set, or no arguments to get the whole config.', color='red')
  734. stderr(' archivebox config')
  735. stderr(' archivebox config --get SOME_KEY')
  736. stderr(' archivebox config --set SOME_KEY=SOME_VALUE')
  737. raise SystemExit(2)
  738. CRON_COMMENT = 'archivebox_schedule'
  739. @enforce_types
  740. def schedule(add: bool=False,
  741. show: bool=False,
  742. clear: bool=False,
  743. foreground: bool=False,
  744. run_all: bool=False,
  745. quiet: bool=False,
  746. every: Optional[str]=None,
  747. import_path: Optional[str]=None,
  748. out_dir: str=OUTPUT_DIR):
  749. """Set ArchiveBox to regularly import URLs at specific times using cron"""
  750. check_data_folder(out_dir=out_dir)
  751. os.makedirs(os.path.join(out_dir, LOGS_DIR_NAME), exist_ok=True)
  752. cron = CronTab(user=True)
  753. cron = dedupe_jobs(cron)
  754. existing_jobs = list(cron.find_comment(CRON_COMMENT))
  755. if foreground or run_all:
  756. if import_path or (not existing_jobs):
  757. stderr('{red}[X] You must schedule some jobs first before running in foreground mode.{reset}'.format(**ANSI))
  758. stderr(' archivebox schedule --every=hour https://example.com/some/rss/feed.xml')
  759. raise SystemExit(1)
  760. print('{green}[*] Running {} ArchiveBox jobs in foreground task scheduler...{reset}'.format(len(existing_jobs), **ANSI))
  761. if run_all:
  762. try:
  763. for job in existing_jobs:
  764. sys.stdout.write(f' > {job.command}')
  765. sys.stdout.flush()
  766. job.run()
  767. sys.stdout.write(f'\r √ {job.command}\n')
  768. except KeyboardInterrupt:
  769. print('\n{green}[√] Stopped.{reset}'.format(**ANSI))
  770. raise SystemExit(1)
  771. if foreground:
  772. try:
  773. for result in cron.run_scheduler():
  774. print(result)
  775. except KeyboardInterrupt:
  776. print('\n{green}[√] Stopped.{reset}'.format(**ANSI))
  777. raise SystemExit(1)
  778. elif show:
  779. if existing_jobs:
  780. print('\n'.join(str(cmd) for cmd in existing_jobs))
  781. else:
  782. stderr('{red}[X] There are no ArchiveBox cron jobs scheduled for your user ({}).{reset}'.format(USER, **ANSI))
  783. stderr(' To schedule a new job, run:')
  784. stderr(' archivebox schedule --every=[timeperiod] https://example.com/some/rss/feed.xml')
  785. raise SystemExit(0)
  786. elif clear:
  787. print(cron.remove_all(comment=CRON_COMMENT))
  788. cron.write()
  789. raise SystemExit(0)
  790. elif every:
  791. quoted = lambda s: f'"{s}"' if s and ' ' in s else s
  792. cmd = [
  793. 'cd',
  794. quoted(out_dir),
  795. '&&',
  796. quoted(ARCHIVEBOX_BINARY),
  797. *(['add', f'"{import_path}"'] if import_path else ['update']),
  798. '2>&1',
  799. '>',
  800. quoted(os.path.join(LOGS_DIR, 'archivebox.log')),
  801. ]
  802. new_job = cron.new(command=' '.join(cmd), comment=CRON_COMMENT)
  803. if every in ('minute', 'hour', 'day', 'week', 'month', 'year'):
  804. set_every = getattr(new_job.every(), every)
  805. set_every()
  806. elif CronSlices.is_valid(every):
  807. new_job.setall(every)
  808. else:
  809. stderr('{red}[X] Got invalid timeperiod for cron task.{reset}'.format(**ANSI))
  810. stderr(' It must be one of minute/hour/day/week/month')
  811. stderr(' or a quoted cron-format schedule like:')
  812. stderr(' archivebox init --every=day https://example.com/some/rss/feed.xml')
  813. stderr(' archivebox init --every="0/5 * * * *" https://example.com/some/rss/feed.xml')
  814. raise SystemExit(1)
  815. cron = dedupe_jobs(cron)
  816. cron.write()
  817. total_runs = sum(j.frequency_per_year() for j in cron)
  818. existing_jobs = list(cron.find_comment(CRON_COMMENT))
  819. print()
  820. print('{green}[√] Scheduled new ArchiveBox cron job for user: {} ({} jobs are active).{reset}'.format(USER, len(existing_jobs), **ANSI))
  821. print('\n'.join(f' > {cmd}' if str(cmd) == str(new_job) else f' {cmd}' for cmd in existing_jobs))
  822. if total_runs > 60 and not quiet:
  823. stderr()
  824. stderr('{lightyellow}[!] With the current cron config, ArchiveBox is estimated to run >{} times per year.{reset}'.format(total_runs, **ANSI))
  825. stderr(f' Congrats on being an enthusiastic internet archiver! 👌')
  826. stderr()
  827. stderr(' Make sure you have enough storage space available to hold all the data.')
  828. stderr(' Using a compressed/deduped filesystem like ZFS is recommended if you plan on archiving a lot.')
  829. raise SystemExit(0)
  830. @enforce_types
  831. def server(runserver_args: Optional[List[str]]=None,
  832. reload: bool=False,
  833. debug: bool=False,
  834. out_dir: str=OUTPUT_DIR) -> None:
  835. """Run the ArchiveBox HTTP server"""
  836. runserver_args = runserver_args or []
  837. check_data_folder(out_dir=out_dir)
  838. setup_django(out_dir)
  839. from django.core.management import call_command
  840. from django.contrib.auth.models import User
  841. if IS_TTY and not User.objects.filter(is_superuser=True).exists():
  842. print('{lightyellow}[!] No admin users exist yet, you will not be able to edit links in the UI.{reset}'.format(**ANSI))
  843. print()
  844. print(' To create an admin user, run:')
  845. print(' archivebox manage createsuperuser')
  846. print()
  847. print('{green}[+] Starting ArchiveBox webserver...{reset}'.format(**ANSI))
  848. if not reload:
  849. runserver_args.append('--noreload')
  850. call_command("runserver", *runserver_args)
  851. @enforce_types
  852. def manage(args: Optional[List[str]]=None, out_dir: str=OUTPUT_DIR) -> None:
  853. """Run an ArchiveBox Django management command"""
  854. check_data_folder(out_dir=out_dir)
  855. setup_django(out_dir)
  856. from django.core.management import execute_from_command_line
  857. execute_from_command_line([f'{ARCHIVEBOX_BINARY} manage', *(args or ['help'])])
  858. def shell(out_dir: str=OUTPUT_DIR) -> None:
  859. check_data_folder(out_dir=out_dir)
  860. setup_django(OUTPUT_DIR)
  861. from django.core.management import call_command
  862. call_command("shell_plus")
  863. # Helpers
  864. def printable_config(config: ConfigDict, prefix: str='') -> str:
  865. return f'\n{prefix}'.join(
  866. f'{key}={val}'
  867. for key, val in config.items()
  868. if not (isinstance(val, dict) or callable(val))
  869. )
  870. def dedupe_jobs(cron: CronTab) -> CronTab:
  871. deduped: Set[Tuple[str, str]] = set()
  872. for job in list(cron):
  873. unique_tuple = (str(job.slices), job.command)
  874. if unique_tuple not in deduped:
  875. deduped.add(unique_tuple)
  876. cron.remove(job)
  877. for schedule, command in deduped:
  878. job = cron.new(command=command, comment=CRON_COMMENT)
  879. job.setall(schedule)
  880. job.enable()
  881. return cron
  882. def print_folder_status(name, folder):
  883. if folder['enabled']:
  884. if folder['is_valid']:
  885. color, symbol, note = 'green', '√', 'valid'
  886. else:
  887. color, symbol, note, num_files = 'red', 'X', 'invalid', '?'
  888. else:
  889. color, symbol, note, num_files = 'lightyellow', '-', 'disabled', '-'
  890. if folder['path']:
  891. if os.path.exists(folder['path']):
  892. num_files = (
  893. f'{len(os.listdir(folder["path"]))} files'
  894. if os.path.isdir(folder['path']) else
  895. human_readable_size(os.path.getsize(folder['path']))
  896. )
  897. else:
  898. num_files = 'missing'
  899. if ' ' in folder['path']:
  900. folder['path'] = f'"{folder["path"]}"'
  901. print(
  902. ANSI[color],
  903. symbol,
  904. ANSI['reset'],
  905. name.ljust(22),
  906. (folder["path"] or '').ljust(76),
  907. num_files.ljust(14),
  908. ANSI[color],
  909. note,
  910. ANSI['reset'],
  911. )
  912. def print_dependency_version(name, dependency):
  913. if dependency['enabled']:
  914. if dependency['is_valid']:
  915. color, symbol, note = 'green', '√', 'valid'
  916. version = 'v' + re.search(r'[\d\.]+', dependency['version'])[0]
  917. else:
  918. color, symbol, note, version = 'red', 'X', 'invalid', '?'
  919. else:
  920. color, symbol, note, version = 'lightyellow', '-', 'disabled', '-'
  921. if ' ' in dependency["path"]:
  922. dependency["path"] = f'"{dependency["path"]}"'
  923. print(
  924. ANSI[color],
  925. symbol,
  926. ANSI['reset'],
  927. name.ljust(22),
  928. (dependency["path"] or '').ljust(76),
  929. version.ljust(14),
  930. ANSI[color],
  931. note,
  932. ANSI['reset'],
  933. )