main.py 40 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108
  1. __package__ = 'archivebox'
  2. import os
  3. import sys
  4. import shutil
  5. import platform
  6. from pathlib import Path
  7. from datetime import date
  8. from typing import Dict, List, Optional, Iterable, IO, Union
  9. from crontab import CronTab, CronSlices
  10. from django.db.models import QuerySet
  11. from .cli import (
  12. list_subcommands,
  13. run_subcommand,
  14. display_first,
  15. meta_cmds,
  16. main_cmds,
  17. archive_cmds,
  18. )
  19. from .parsers import (
  20. save_text_as_source,
  21. save_file_as_source,
  22. parse_links_memory,
  23. )
  24. from .index.schema import Link
  25. from .util import enforce_types # type: ignore
  26. from .system import get_dir_size, dedupe_cron_jobs, CRON_COMMENT
  27. from .index import (
  28. load_main_index,
  29. get_empty_snapshot_queryset,
  30. parse_links_from_source,
  31. dedupe_links,
  32. write_main_index,
  33. snapshot_filter,
  34. get_indexed_folders,
  35. get_archived_folders,
  36. get_unarchived_folders,
  37. get_present_folders,
  38. get_valid_folders,
  39. get_invalid_folders,
  40. get_duplicate_folders,
  41. get_orphaned_folders,
  42. get_corrupted_folders,
  43. get_unrecognized_folders,
  44. fix_invalid_folder_locations,
  45. )
  46. from .index.json import (
  47. parse_json_main_index,
  48. parse_json_links_details,
  49. )
  50. from .index.sql import (
  51. get_admins,
  52. apply_migrations,
  53. remove_from_sql_main_index,
  54. )
  55. from .extractors import archive_links, archive_link, ignore_methods
  56. from .config import (
  57. stderr,
  58. hint,
  59. ConfigDict,
  60. ANSI,
  61. IS_TTY,
  62. IN_DOCKER,
  63. USER,
  64. ARCHIVEBOX_BINARY,
  65. ONLY_NEW,
  66. OUTPUT_DIR,
  67. SOURCES_DIR,
  68. ARCHIVE_DIR,
  69. LOGS_DIR,
  70. CONFIG_FILE,
  71. ARCHIVE_DIR_NAME,
  72. SOURCES_DIR_NAME,
  73. LOGS_DIR_NAME,
  74. STATIC_DIR_NAME,
  75. JSON_INDEX_FILENAME,
  76. HTML_INDEX_FILENAME,
  77. SQL_INDEX_FILENAME,
  78. ROBOTS_TXT_FILENAME,
  79. FAVICON_FILENAME,
  80. check_dependencies,
  81. check_data_folder,
  82. write_config_file,
  83. setup_django,
  84. VERSION,
  85. CODE_LOCATIONS,
  86. EXTERNAL_LOCATIONS,
  87. DATA_LOCATIONS,
  88. DEPENDENCIES,
  89. load_all_config,
  90. CONFIG,
  91. USER_CONFIG,
  92. get_real_name,
  93. )
  94. from .logging_util import (
  95. TERM_WIDTH,
  96. TimedProgress,
  97. log_importing_started,
  98. log_crawl_started,
  99. log_removal_started,
  100. log_removal_finished,
  101. log_list_started,
  102. log_list_finished,
  103. printable_config,
  104. printable_folders,
  105. printable_filesize,
  106. printable_folder_status,
  107. printable_dependency_version,
  108. )
  109. ALLOWED_IN_OUTPUT_DIR = {
  110. 'lost+found',
  111. '.DS_Store',
  112. '.venv',
  113. 'venv',
  114. 'virtualenv',
  115. '.virtualenv',
  116. 'node_modules',
  117. 'package-lock.json',
  118. ARCHIVE_DIR_NAME,
  119. SOURCES_DIR_NAME,
  120. LOGS_DIR_NAME,
  121. STATIC_DIR_NAME,
  122. SQL_INDEX_FILENAME,
  123. JSON_INDEX_FILENAME,
  124. HTML_INDEX_FILENAME,
  125. ROBOTS_TXT_FILENAME,
  126. FAVICON_FILENAME,
  127. }
  128. @enforce_types
  129. def help(out_dir: Path=OUTPUT_DIR) -> None:
  130. """Print the ArchiveBox help message and usage"""
  131. all_subcommands = list_subcommands()
  132. COMMANDS_HELP_TEXT = '\n '.join(
  133. f'{cmd.ljust(20)} {summary}'
  134. for cmd, summary in all_subcommands.items()
  135. if cmd in meta_cmds
  136. ) + '\n\n ' + '\n '.join(
  137. f'{cmd.ljust(20)} {summary}'
  138. for cmd, summary in all_subcommands.items()
  139. if cmd in main_cmds
  140. ) + '\n\n ' + '\n '.join(
  141. f'{cmd.ljust(20)} {summary}'
  142. for cmd, summary in all_subcommands.items()
  143. if cmd in archive_cmds
  144. ) + '\n\n ' + '\n '.join(
  145. f'{cmd.ljust(20)} {summary}'
  146. for cmd, summary in all_subcommands.items()
  147. if cmd not in display_first
  148. )
  149. if (Path(out_dir) / SQL_INDEX_FILENAME).exists():
  150. print('''{green}ArchiveBox v{}: The self-hosted internet archive.{reset}
  151. {lightred}Active data directory:{reset}
  152. {}
  153. {lightred}Usage:{reset}
  154. archivebox [command] [--help] [--version] [...args]
  155. {lightred}Commands:{reset}
  156. {}
  157. {lightred}Example Use:{reset}
  158. mkdir my-archive; cd my-archive/
  159. archivebox init
  160. archivebox status
  161. archivebox add https://example.com/some/page
  162. archivebox add --depth=1 ~/Downloads/bookmarks_export.html
  163. archivebox list --sort=timestamp --csv=timestamp,url,is_archived
  164. archivebox schedule --every=day https://example.com/some/feed.rss
  165. archivebox update --resume=15109948213.123
  166. {lightred}Documentation:{reset}
  167. https://github.com/ArchiveBox/ArchiveBox/wiki
  168. '''.format(VERSION, out_dir, COMMANDS_HELP_TEXT, **ANSI))
  169. else:
  170. print('{green}Welcome to ArchiveBox v{}!{reset}'.format(VERSION, **ANSI))
  171. print()
  172. if IN_DOCKER:
  173. print('When using Docker, you need to mount a volume to use as your data dir:')
  174. print(' docker run -v /some/path:/data archivebox ...')
  175. print()
  176. print('To import an existing archive (from a previous version of ArchiveBox):')
  177. print(' 1. cd into your data dir OUTPUT_DIR (usually ArchiveBox/output) and run:')
  178. print(' 2. archivebox init')
  179. print()
  180. print('To start a new archive:')
  181. print(' 1. Create an empty directory, then cd into it and run:')
  182. print(' 2. archivebox init')
  183. print()
  184. print('For more information, see the documentation here:')
  185. print(' https://github.com/ArchiveBox/ArchiveBox/wiki')
  186. @enforce_types
  187. def version(quiet: bool=False,
  188. out_dir: Path=OUTPUT_DIR) -> None:
  189. """Print the ArchiveBox version and dependency information"""
  190. if quiet:
  191. print(VERSION)
  192. else:
  193. print('ArchiveBox v{}'.format(VERSION))
  194. p = platform.uname()
  195. print(p.system, p.release, p.machine)
  196. print()
  197. print('{white}[i] Dependency versions:{reset}'.format(**ANSI))
  198. for name, dependency in DEPENDENCIES.items():
  199. print(printable_dependency_version(name, dependency))
  200. print()
  201. print('{white}[i] Source-code locations:{reset}'.format(**ANSI))
  202. for name, folder in CODE_LOCATIONS.items():
  203. print(printable_folder_status(name, folder))
  204. print()
  205. print('{white}[i] Secrets locations:{reset}'.format(**ANSI))
  206. for name, folder in EXTERNAL_LOCATIONS.items():
  207. print(printable_folder_status(name, folder))
  208. print()
  209. if DATA_LOCATIONS['OUTPUT_DIR']['is_valid']:
  210. print('{white}[i] Data locations:{reset}'.format(**ANSI))
  211. for name, folder in DATA_LOCATIONS.items():
  212. print(printable_folder_status(name, folder))
  213. else:
  214. print()
  215. print('{white}[i] Data locations:{reset}'.format(**ANSI))
  216. print()
  217. check_dependencies()
  218. @enforce_types
  219. def run(subcommand: str,
  220. subcommand_args: Optional[List[str]],
  221. stdin: Optional[IO]=None,
  222. out_dir: Path=OUTPUT_DIR) -> None:
  223. """Run a given ArchiveBox subcommand with the given list of args"""
  224. run_subcommand(
  225. subcommand=subcommand,
  226. subcommand_args=subcommand_args,
  227. stdin=stdin,
  228. pwd=out_dir,
  229. )
  230. @enforce_types
  231. def init(force: bool=False, out_dir: Path=OUTPUT_DIR) -> None:
  232. """Initialize a new ArchiveBox collection in the current directory"""
  233. Path(out_dir).mkdir(exist_ok=True)
  234. is_empty = not len(set(os.listdir(out_dir)) - ALLOWED_IN_OUTPUT_DIR)
  235. if (Path(out_dir) / JSON_INDEX_FILENAME).exists():
  236. stderr("[!] This folder contains a JSON index. It is deprecated, and will no longer be kept up to date automatically.", color="lightyellow")
  237. stderr(" You can run `archivebox list --json --with-headers > index.json` to manually generate it.", color="lightyellow")
  238. existing_index = (Path(out_dir) / SQL_INDEX_FILENAME).exists()
  239. if is_empty and not existing_index:
  240. print('{green}[+] Initializing a new ArchiveBox collection in this folder...{reset}'.format(**ANSI))
  241. print(f' {out_dir}')
  242. print('{green}------------------------------------------------------------------{reset}'.format(**ANSI))
  243. elif existing_index:
  244. print('{green}[*] Updating existing ArchiveBox collection in this folder...{reset}'.format(**ANSI))
  245. print(f' {out_dir}')
  246. print('{green}------------------------------------------------------------------{reset}'.format(**ANSI))
  247. else:
  248. if force:
  249. stderr('[!] This folder appears to already have files in it, but no index.sqlite3 is present.', color='lightyellow')
  250. stderr(' Because --force was passed, ArchiveBox will initialize anyway (which may overwrite existing files).')
  251. else:
  252. stderr(
  253. ("{red}[X] This folder appears to already have files in it, but no index.sqlite3 present.{reset}\n\n"
  254. " You must run init in a completely empty directory, or an existing data folder.\n\n"
  255. " {lightred}Hint:{reset} To import an existing data folder make sure to cd into the folder first, \n"
  256. " then run and run 'archivebox init' to pick up where you left off.\n\n"
  257. " (Always make sure your data folder is backed up first before updating ArchiveBox)"
  258. ).format(out_dir, **ANSI)
  259. )
  260. raise SystemExit(2)
  261. if existing_index:
  262. print('\n{green}[*] Verifying archive folder structure...{reset}'.format(**ANSI))
  263. else:
  264. print('\n{green}[+] Building archive folder structure...{reset}'.format(**ANSI))
  265. Path(SOURCES_DIR).mkdir(exist_ok=True)
  266. print(f' √ {SOURCES_DIR}')
  267. Path(ARCHIVE_DIR).mkdir(exist_ok=True)
  268. print(f' √ {ARCHIVE_DIR}')
  269. Path(LOGS_DIR).mkdir(exist_ok=True)
  270. print(f' √ {LOGS_DIR}')
  271. write_config_file({}, out_dir=out_dir)
  272. print(f' √ {CONFIG_FILE}')
  273. if (Path(out_dir) / SQL_INDEX_FILENAME).exists():
  274. print('\n{green}[*] Verifying main SQL index and running migrations...{reset}'.format(**ANSI))
  275. else:
  276. print('\n{green}[+] Building main SQL index and running migrations...{reset}'.format(**ANSI))
  277. setup_django(out_dir, check_db=False)
  278. DATABASE_FILE = Path(out_dir) / SQL_INDEX_FILENAME
  279. print(f' √ {DATABASE_FILE}')
  280. print()
  281. for migration_line in apply_migrations(out_dir):
  282. print(f' {migration_line}')
  283. assert DATABASE_FILE.exists()
  284. # from django.contrib.auth.models import User
  285. # if IS_TTY and not User.objects.filter(is_superuser=True).exists():
  286. # print('{green}[+] Creating admin user account...{reset}'.format(**ANSI))
  287. # call_command("createsuperuser", interactive=True)
  288. print()
  289. print('{green}[*] Collecting links from any existing indexes and archive folders...{reset}'.format(**ANSI))
  290. all_links = get_empty_snapshot_queryset()
  291. pending_links: Dict[str, Link] = {}
  292. if existing_index:
  293. all_links = load_main_index(out_dir=out_dir, warn=False)
  294. print(' √ Loaded {} links from existing main index.'.format(all_links.count()))
  295. # Links in data folders that dont match their timestamp
  296. fixed, cant_fix = fix_invalid_folder_locations(out_dir=out_dir)
  297. if fixed:
  298. print(' {lightyellow}√ Fixed {} data directory locations that didn\'t match their link timestamps.{reset}'.format(len(fixed), **ANSI))
  299. if cant_fix:
  300. print(' {lightyellow}! Could not fix {} data directory locations due to conflicts with existing folders.{reset}'.format(len(cant_fix), **ANSI))
  301. # Links in JSON index but not in main index
  302. orphaned_json_links = {
  303. link.url: link
  304. for link in parse_json_main_index(out_dir)
  305. if not all_links.filter(url=link.url).exists()
  306. }
  307. if orphaned_json_links:
  308. pending_links.update(orphaned_json_links)
  309. print(' {lightyellow}√ Added {} orphaned links from existing JSON index...{reset}'.format(len(orphaned_json_links), **ANSI))
  310. # Links in data dir indexes but not in main index
  311. orphaned_data_dir_links = {
  312. link.url: link
  313. for link in parse_json_links_details(out_dir)
  314. if not all_links.filter(url=link.url).exists()
  315. }
  316. if orphaned_data_dir_links:
  317. pending_links.update(orphaned_data_dir_links)
  318. print(' {lightyellow}√ Added {} orphaned links from existing archive directories.{reset}'.format(len(orphaned_data_dir_links), **ANSI))
  319. # Links in invalid/duplicate data dirs
  320. invalid_folders = {
  321. folder: link
  322. for folder, link in get_invalid_folders(all_links, out_dir=out_dir).items()
  323. }
  324. if invalid_folders:
  325. print(' {lightyellow}! Skipped adding {} invalid link data directories.{reset}'.format(len(invalid_folders), **ANSI))
  326. print(' X ' + '\n X '.join(f'{folder} {link}' for folder, link in invalid_folders.items()))
  327. print()
  328. print(' {lightred}Hint:{reset} For more information about the link data directories that were skipped, run:'.format(**ANSI))
  329. print(' archivebox status')
  330. print(' archivebox list --status=invalid')
  331. write_main_index(list(pending_links.values()), out_dir=out_dir, finished=True)
  332. print('\n{green}------------------------------------------------------------------{reset}'.format(**ANSI))
  333. if existing_index:
  334. print('{green}[√] Done. Verified and updated the existing ArchiveBox collection.{reset}'.format(**ANSI))
  335. else:
  336. print('{green}[√] Done. A new ArchiveBox collection was initialized ({} links).{reset}'.format(len(all_links), **ANSI))
  337. print()
  338. print(' {lightred}Hint:{reset} To view your archive index, run:'.format(**ANSI))
  339. print(' archivebox server # then visit http://127.0.0.1:8000')
  340. print()
  341. print(' To add new links, you can run:')
  342. print(" archivebox add ~/some/path/or/url/to/list_of_links.txt")
  343. print()
  344. print(' For more usage and examples, run:')
  345. print(' archivebox help')
  346. json_index = Path(out_dir) / JSON_INDEX_FILENAME
  347. html_index = Path(out_dir) / HTML_INDEX_FILENAME
  348. index_name = f"{date.today()}_index_old"
  349. if json_index.exists():
  350. json_index.rename(f"{index_name}.json")
  351. if html_index.exists():
  352. html_index.rename(f"{index_name}.html")
  353. @enforce_types
  354. def status(out_dir: Path=OUTPUT_DIR) -> None:
  355. """Print out some info and statistics about the archive collection"""
  356. check_data_folder(out_dir=out_dir)
  357. from core.models import Snapshot
  358. from django.contrib.auth import get_user_model
  359. User = get_user_model()
  360. print('{green}[*] Scanning archive main index...{reset}'.format(**ANSI))
  361. print(ANSI['lightyellow'], f' {out_dir}/*', ANSI['reset'])
  362. num_bytes, num_dirs, num_files = get_dir_size(out_dir, recursive=False, pattern='index.')
  363. size = printable_filesize(num_bytes)
  364. print(f' Index size: {size} across {num_files} files')
  365. print()
  366. links = load_main_index(out_dir=out_dir)
  367. num_sql_links = links.count()
  368. num_link_details = sum(1 for link in parse_json_links_details(out_dir=out_dir))
  369. print(f' > SQL Main Index: {num_sql_links} links'.ljust(36), f'(found in {SQL_INDEX_FILENAME})')
  370. print(f' > JSON Link Details: {num_link_details} links'.ljust(36), f'(found in {ARCHIVE_DIR_NAME}/*/index.json)')
  371. print()
  372. print('{green}[*] Scanning archive data directories...{reset}'.format(**ANSI))
  373. print(ANSI['lightyellow'], f' {ARCHIVE_DIR}/*', ANSI['reset'])
  374. num_bytes, num_dirs, num_files = get_dir_size(ARCHIVE_DIR)
  375. size = printable_filesize(num_bytes)
  376. print(f' Size: {size} across {num_files} files in {num_dirs} directories')
  377. print(ANSI['black'])
  378. num_indexed = len(get_indexed_folders(links, out_dir=out_dir))
  379. num_archived = len(get_archived_folders(links, out_dir=out_dir))
  380. num_unarchived = len(get_unarchived_folders(links, out_dir=out_dir))
  381. print(f' > indexed: {num_indexed}'.ljust(36), f'({get_indexed_folders.__doc__})')
  382. print(f' > archived: {num_archived}'.ljust(36), f'({get_archived_folders.__doc__})')
  383. print(f' > unarchived: {num_unarchived}'.ljust(36), f'({get_unarchived_folders.__doc__})')
  384. num_present = len(get_present_folders(links, out_dir=out_dir))
  385. num_valid = len(get_valid_folders(links, out_dir=out_dir))
  386. print()
  387. print(f' > present: {num_present}'.ljust(36), f'({get_present_folders.__doc__})')
  388. print(f' > valid: {num_valid}'.ljust(36), f'({get_valid_folders.__doc__})')
  389. duplicate = get_duplicate_folders(links, out_dir=out_dir)
  390. orphaned = get_orphaned_folders(links, out_dir=out_dir)
  391. corrupted = get_corrupted_folders(links, out_dir=out_dir)
  392. unrecognized = get_unrecognized_folders(links, out_dir=out_dir)
  393. num_invalid = len({**duplicate, **orphaned, **corrupted, **unrecognized})
  394. print(f' > invalid: {num_invalid}'.ljust(36), f'({get_invalid_folders.__doc__})')
  395. print(f' > duplicate: {len(duplicate)}'.ljust(36), f'({get_duplicate_folders.__doc__})')
  396. print(f' > orphaned: {len(orphaned)}'.ljust(36), f'({get_orphaned_folders.__doc__})')
  397. print(f' > corrupted: {len(corrupted)}'.ljust(36), f'({get_corrupted_folders.__doc__})')
  398. print(f' > unrecognized: {len(unrecognized)}'.ljust(36), f'({get_unrecognized_folders.__doc__})')
  399. print(ANSI['reset'])
  400. if num_indexed:
  401. print(' {lightred}Hint:{reset} You can list link data directories by status like so:'.format(**ANSI))
  402. print(' archivebox list --status=<status> (e.g. indexed, corrupted, archived, etc.)')
  403. if orphaned:
  404. print(' {lightred}Hint:{reset} To automatically import orphaned data directories into the main index, run:'.format(**ANSI))
  405. print(' archivebox init')
  406. if num_invalid:
  407. print(' {lightred}Hint:{reset} You may need to manually remove or fix some invalid data directories, afterwards make sure to run:'.format(**ANSI))
  408. print(' archivebox init')
  409. print()
  410. print('{green}[*] Scanning recent archive changes and user logins:{reset}'.format(**ANSI))
  411. print(ANSI['lightyellow'], f' {LOGS_DIR}/*', ANSI['reset'])
  412. users = get_admins().values_list('username', flat=True)
  413. print(f' UI users {len(users)}: {", ".join(users)}')
  414. last_login = User.objects.order_by('last_login').last()
  415. if last_login:
  416. print(f' Last UI login: {last_login.username} @ {str(last_login.last_login)[:16]}')
  417. last_updated = Snapshot.objects.order_by('updated').last()
  418. if last_updated:
  419. print(f' Last changes: {str(last_updated.updated)[:16]}')
  420. if not users:
  421. print()
  422. print(' {lightred}Hint:{reset} You can create an admin user by running:'.format(**ANSI))
  423. print(' archivebox manage createsuperuser')
  424. print()
  425. for snapshot in links.order_by('-updated')[:10]:
  426. if not snapshot.updated:
  427. continue
  428. print(
  429. ANSI['black'],
  430. (
  431. f' > {str(snapshot.updated)[:16]} '
  432. f'[{snapshot.num_outputs} {("X", "√")[snapshot.is_archived]} {printable_filesize(snapshot.archive_size)}] '
  433. f'"{snapshot.title}": {snapshot.url}'
  434. )[:TERM_WIDTH()],
  435. ANSI['reset'],
  436. )
  437. print(ANSI['black'], ' ...', ANSI['reset'])
  438. @enforce_types
  439. def oneshot(url: str, out_dir: Path=OUTPUT_DIR):
  440. """
  441. Create a single URL archive folder with an index.json and index.html, and all the archive method outputs.
  442. You can run this to archive single pages without needing to create a whole collection with archivebox init.
  443. """
  444. oneshot_link, _ = parse_links_memory([url])
  445. if len(oneshot_link) > 1:
  446. stderr(
  447. '[X] You should pass a single url to the oneshot command',
  448. color='red'
  449. )
  450. raise SystemExit(2)
  451. methods = ignore_methods(['title'])
  452. archive_link(oneshot_link[0], out_dir=out_dir, methods=methods, skip_index=True)
  453. return oneshot_link
  454. @enforce_types
  455. def add(urls: Union[str, List[str]],
  456. depth: int=0,
  457. update_all: bool=not ONLY_NEW,
  458. index_only: bool=False,
  459. overwrite: bool=False,
  460. init: bool=False,
  461. out_dir: Path=OUTPUT_DIR,
  462. extractors: str="") -> List[Link]:
  463. """Add a new URL or list of URLs to your archive"""
  464. assert depth in (0, 1), 'Depth must be 0 or 1 (depth >1 is not supported yet)'
  465. extractors = extractors.split(",") if extractors else []
  466. if init:
  467. run_subcommand('init', stdin=None, pwd=out_dir)
  468. # Load list of links from the existing index
  469. check_data_folder(out_dir=out_dir)
  470. check_dependencies()
  471. new_links: List[Link] = []
  472. all_links = load_main_index(out_dir=out_dir)
  473. log_importing_started(urls=urls, depth=depth, index_only=index_only)
  474. if isinstance(urls, str):
  475. # save verbatim stdin to sources
  476. write_ahead_log = save_text_as_source(urls, filename='{ts}-import.txt', out_dir=out_dir)
  477. elif isinstance(urls, list):
  478. # save verbatim args to sources
  479. write_ahead_log = save_text_as_source('\n'.join(urls), filename='{ts}-import.txt', out_dir=out_dir)
  480. new_links += parse_links_from_source(write_ahead_log, root_url=None)
  481. # If we're going one level deeper, download each link and look for more links
  482. new_links_depth = []
  483. if new_links and depth == 1:
  484. log_crawl_started(new_links)
  485. for new_link in new_links:
  486. downloaded_file = save_file_as_source(new_link.url, filename=f'{new_link.timestamp}-crawl-{new_link.domain}.txt', out_dir=out_dir)
  487. new_links_depth += parse_links_from_source(downloaded_file, root_url=new_link.url)
  488. imported_links = list({link.url: link for link in (new_links + new_links_depth)}.values())
  489. new_links = dedupe_links(all_links, imported_links)
  490. write_main_index(links=new_links, out_dir=out_dir, finished=not new_links)
  491. all_links = load_main_index(out_dir=out_dir)
  492. if index_only:
  493. return all_links
  494. # Run the archive methods for each link
  495. archive_kwargs = {
  496. "out_dir": out_dir,
  497. }
  498. if extractors:
  499. archive_kwargs["methods"] = extractors
  500. if update_all:
  501. archive_links(all_links, overwrite=overwrite, **archive_kwargs)
  502. elif overwrite:
  503. archive_links(imported_links, overwrite=True, **archive_kwargs)
  504. elif new_links:
  505. archive_links(new_links, overwrite=False, **archive_kwargs)
  506. return all_links
  507. @enforce_types
  508. def remove(filter_str: Optional[str]=None,
  509. filter_patterns: Optional[List[str]]=None,
  510. filter_type: str='exact',
  511. snapshots: Optional[QuerySet]=None,
  512. after: Optional[float]=None,
  513. before: Optional[float]=None,
  514. yes: bool=False,
  515. delete: bool=False,
  516. out_dir: Path=OUTPUT_DIR) -> List[Link]:
  517. """Remove the specified URLs from the archive"""
  518. check_data_folder(out_dir=out_dir)
  519. if snapshots is None:
  520. if filter_str and filter_patterns:
  521. stderr(
  522. '[X] You should pass either a pattern as an argument, '
  523. 'or pass a list of patterns via stdin, but not both.\n',
  524. color='red',
  525. )
  526. raise SystemExit(2)
  527. elif not (filter_str or filter_patterns):
  528. stderr(
  529. '[X] You should pass either a pattern as an argument, '
  530. 'or pass a list of patterns via stdin.',
  531. color='red',
  532. )
  533. stderr()
  534. hint(('To remove all urls you can run:',
  535. 'archivebox remove --filter-type=regex ".*"'))
  536. stderr()
  537. raise SystemExit(2)
  538. elif filter_str:
  539. filter_patterns = [ptn.strip() for ptn in filter_str.split('\n')]
  540. list_kwargs = {
  541. "filter_patterns": filter_patterns,
  542. "filter_type": filter_type,
  543. "after": after,
  544. "before": before,
  545. }
  546. if snapshots:
  547. list_kwargs["snapshots"] = snapshots
  548. log_list_started(filter_patterns, filter_type)
  549. timer = TimedProgress(360, prefix=' ')
  550. try:
  551. snapshots = list_links(**list_kwargs)
  552. finally:
  553. timer.end()
  554. if not snapshots.exists():
  555. log_removal_finished(0, 0)
  556. raise SystemExit(1)
  557. log_links = [link.as_link() for link in snapshots]
  558. log_list_finished(log_links)
  559. log_removal_started(log_links, yes=yes, delete=delete)
  560. timer = TimedProgress(360, prefix=' ')
  561. try:
  562. for snapshot in snapshots:
  563. if delete:
  564. shutil.rmtree(snapshot.as_link().link_dir, ignore_errors=True)
  565. finally:
  566. timer.end()
  567. to_remove = snapshots.count()
  568. remove_from_sql_main_index(snapshots=snapshots, out_dir=out_dir)
  569. all_snapshots = load_main_index(out_dir=out_dir)
  570. log_removal_finished(all_snapshots.count(), to_remove)
  571. return all_snapshots
  572. @enforce_types
  573. def update(resume: Optional[float]=None,
  574. only_new: bool=ONLY_NEW,
  575. index_only: bool=False,
  576. overwrite: bool=False,
  577. filter_patterns_str: Optional[str]=None,
  578. filter_patterns: Optional[List[str]]=None,
  579. filter_type: Optional[str]=None,
  580. status: Optional[str]=None,
  581. after: Optional[str]=None,
  582. before: Optional[str]=None,
  583. out_dir: Path=OUTPUT_DIR) -> List[Link]:
  584. """Import any new links from subscriptions and retry any previously failed/skipped links"""
  585. check_data_folder(out_dir=out_dir)
  586. check_dependencies()
  587. new_links: List[Link] = [] # TODO: Remove input argument: only_new
  588. # Step 1: Filter for selected_links
  589. matching_snapshots = list_links(
  590. filter_patterns=filter_patterns,
  591. filter_type=filter_type,
  592. before=before,
  593. after=after,
  594. )
  595. matching_folders = list_folders(
  596. links=matching_snapshots,
  597. status=status,
  598. out_dir=out_dir,
  599. )
  600. all_links = [link for link in matching_folders.values() if link]
  601. if index_only:
  602. return all_links
  603. # Step 2: Run the archive methods for each link
  604. to_archive = new_links if only_new else all_links
  605. if resume:
  606. to_archive = [
  607. link for link in to_archive
  608. if link.timestamp >= str(resume)
  609. ]
  610. if not to_archive:
  611. stderr('')
  612. stderr(f'[√] Nothing found to resume after {resume}', color='green')
  613. return all_links
  614. archive_links(to_archive, overwrite=overwrite, out_dir=out_dir)
  615. # Step 4: Re-write links index with updated titles, icons, and resources
  616. all_links = load_main_index(out_dir=out_dir)
  617. return all_links
  618. @enforce_types
  619. def list_all(filter_patterns_str: Optional[str]=None,
  620. filter_patterns: Optional[List[str]]=None,
  621. filter_type: str='exact',
  622. status: Optional[str]=None,
  623. after: Optional[float]=None,
  624. before: Optional[float]=None,
  625. sort: Optional[str]=None,
  626. csv: Optional[str]=None,
  627. json: bool=False,
  628. html: bool=False,
  629. with_headers: bool=False,
  630. out_dir: Path=OUTPUT_DIR) -> Iterable[Link]:
  631. """List, filter, and export information about archive entries"""
  632. check_data_folder(out_dir=out_dir)
  633. if filter_patterns and filter_patterns_str:
  634. stderr(
  635. '[X] You should either pass filter patterns as an arguments '
  636. 'or via stdin, but not both.\n',
  637. color='red',
  638. )
  639. raise SystemExit(2)
  640. elif filter_patterns_str:
  641. filter_patterns = filter_patterns_str.split('\n')
  642. snapshots = list_links(
  643. filter_patterns=filter_patterns,
  644. filter_type=filter_type,
  645. before=before,
  646. after=after,
  647. )
  648. if sort:
  649. snapshots = snapshots.order_by(sort)
  650. folders = list_folders(
  651. links=snapshots,
  652. status=status,
  653. out_dir=out_dir,
  654. )
  655. print(printable_folders(folders, json=json, csv=csv, html=html, with_headers=with_headers))
  656. return folders
  657. @enforce_types
  658. def list_links(snapshots: Optional[QuerySet]=None,
  659. filter_patterns: Optional[List[str]]=None,
  660. filter_type: str='exact',
  661. after: Optional[float]=None,
  662. before: Optional[float]=None,
  663. out_dir: Path=OUTPUT_DIR) -> Iterable[Link]:
  664. check_data_folder(out_dir=out_dir)
  665. if snapshots:
  666. all_snapshots = snapshots
  667. else:
  668. all_snapshots = load_main_index(out_dir=out_dir)
  669. if after is not None:
  670. all_snapshots = all_snapshots.filter(timestamp__lt=after)
  671. if before is not None:
  672. all_snapshots = all_snapshots.filter(timestamp__gt=before)
  673. if filter_patterns:
  674. all_snapshots = snapshot_filter(all_snapshots, filter_patterns, filter_type)
  675. return all_snapshots
  676. @enforce_types
  677. def list_folders(links: List[Link],
  678. status: str,
  679. out_dir: Path=OUTPUT_DIR) -> Dict[str, Optional[Link]]:
  680. check_data_folder(out_dir=out_dir)
  681. STATUS_FUNCTIONS = {
  682. "indexed": get_indexed_folders,
  683. "archived": get_archived_folders,
  684. "unarchived": get_unarchived_folders,
  685. "present": get_present_folders,
  686. "valid": get_valid_folders,
  687. "invalid": get_invalid_folders,
  688. "duplicate": get_duplicate_folders,
  689. "orphaned": get_orphaned_folders,
  690. "corrupted": get_corrupted_folders,
  691. "unrecognized": get_unrecognized_folders,
  692. }
  693. try:
  694. return STATUS_FUNCTIONS[status](links, out_dir=out_dir)
  695. except KeyError:
  696. raise ValueError('Status not recognized.')
  697. @enforce_types
  698. def config(config_options_str: Optional[str]=None,
  699. config_options: Optional[List[str]]=None,
  700. get: bool=False,
  701. set: bool=False,
  702. reset: bool=False,
  703. out_dir: Path=OUTPUT_DIR) -> None:
  704. """Get and set your ArchiveBox project configuration values"""
  705. check_data_folder(out_dir=out_dir)
  706. if config_options and config_options_str:
  707. stderr(
  708. '[X] You should either pass config values as an arguments '
  709. 'or via stdin, but not both.\n',
  710. color='red',
  711. )
  712. raise SystemExit(2)
  713. elif config_options_str:
  714. config_options = config_options_str.split('\n')
  715. config_options = config_options or []
  716. no_args = not (get or set or reset or config_options)
  717. matching_config: ConfigDict = {}
  718. if get or no_args:
  719. if config_options:
  720. config_options = [get_real_name(key) for key in config_options]
  721. matching_config = {key: CONFIG[key] for key in config_options if key in CONFIG}
  722. failed_config = [key for key in config_options if key not in CONFIG]
  723. if failed_config:
  724. stderr()
  725. stderr('[X] These options failed to get', color='red')
  726. stderr(' {}'.format('\n '.join(config_options)))
  727. raise SystemExit(1)
  728. else:
  729. matching_config = CONFIG
  730. print(printable_config(matching_config))
  731. raise SystemExit(not matching_config)
  732. elif set:
  733. new_config = {}
  734. failed_options = []
  735. for line in config_options:
  736. if line.startswith('#') or not line.strip():
  737. continue
  738. if '=' not in line:
  739. stderr('[X] Config KEY=VALUE must have an = sign in it', color='red')
  740. stderr(f' {line}')
  741. raise SystemExit(2)
  742. raw_key, val = line.split('=', 1)
  743. raw_key = raw_key.upper().strip()
  744. key = get_real_name(raw_key)
  745. if key != raw_key:
  746. stderr(f'[i] Note: The config option {raw_key} has been renamed to {key}, please use the new name going forwards.', color='lightyellow')
  747. if key in CONFIG:
  748. new_config[key] = val.strip()
  749. else:
  750. failed_options.append(line)
  751. if new_config:
  752. before = CONFIG
  753. matching_config = write_config_file(new_config, out_dir=OUTPUT_DIR)
  754. after = load_all_config()
  755. print(printable_config(matching_config))
  756. side_effect_changes: ConfigDict = {}
  757. for key, val in after.items():
  758. if key in USER_CONFIG and (before[key] != after[key]) and (key not in matching_config):
  759. side_effect_changes[key] = after[key]
  760. if side_effect_changes:
  761. stderr()
  762. stderr('[i] Note: This change also affected these other options that depended on it:', color='lightyellow')
  763. print(' {}'.format(printable_config(side_effect_changes, prefix=' ')))
  764. if failed_options:
  765. stderr()
  766. stderr('[X] These options failed to set (check for typos):', color='red')
  767. stderr(' {}'.format('\n '.join(failed_options)))
  768. raise SystemExit(bool(failed_options))
  769. elif reset:
  770. stderr('[X] This command is not implemented yet.', color='red')
  771. stderr(' Please manually remove the relevant lines from your config file:')
  772. stderr(f' {CONFIG_FILE}')
  773. raise SystemExit(2)
  774. else:
  775. stderr('[X] You must pass either --get or --set, or no arguments to get the whole config.', color='red')
  776. stderr(' archivebox config')
  777. stderr(' archivebox config --get SOME_KEY')
  778. stderr(' archivebox config --set SOME_KEY=SOME_VALUE')
  779. raise SystemExit(2)
  780. @enforce_types
  781. def schedule(add: bool=False,
  782. show: bool=False,
  783. clear: bool=False,
  784. foreground: bool=False,
  785. run_all: bool=False,
  786. quiet: bool=False,
  787. every: Optional[str]=None,
  788. depth: int=0,
  789. import_path: Optional[str]=None,
  790. out_dir: Path=OUTPUT_DIR):
  791. """Set ArchiveBox to regularly import URLs at specific times using cron"""
  792. check_data_folder(out_dir=out_dir)
  793. (Path(out_dir) / LOGS_DIR_NAME).mkdir(exist_ok=True)
  794. cron = CronTab(user=True)
  795. cron = dedupe_cron_jobs(cron)
  796. if clear:
  797. print(cron.remove_all(comment=CRON_COMMENT))
  798. cron.write()
  799. raise SystemExit(0)
  800. existing_jobs = list(cron.find_comment(CRON_COMMENT))
  801. if every or add:
  802. every = every or 'day'
  803. quoted = lambda s: f'"{s}"' if s and ' ' in str(s) else str(s)
  804. cmd = [
  805. 'cd',
  806. quoted(out_dir),
  807. '&&',
  808. quoted(ARCHIVEBOX_BINARY),
  809. *(['add', f'--depth={depth}', f'"{import_path}"'] if import_path else ['update']),
  810. '>',
  811. quoted(Path(LOGS_DIR) / 'archivebox.log'),
  812. '2>&1',
  813. ]
  814. new_job = cron.new(command=' '.join(cmd), comment=CRON_COMMENT)
  815. if every in ('minute', 'hour', 'day', 'month', 'year'):
  816. set_every = getattr(new_job.every(), every)
  817. set_every()
  818. elif CronSlices.is_valid(every):
  819. new_job.setall(every)
  820. else:
  821. stderr('{red}[X] Got invalid timeperiod for cron task.{reset}'.format(**ANSI))
  822. stderr(' It must be one of minute/hour/day/month')
  823. stderr(' or a quoted cron-format schedule like:')
  824. stderr(' archivebox init --every=day https://example.com/some/rss/feed.xml')
  825. stderr(' archivebox init --every="0/5 * * * *" https://example.com/some/rss/feed.xml')
  826. raise SystemExit(1)
  827. cron = dedupe_cron_jobs(cron)
  828. cron.write()
  829. total_runs = sum(j.frequency_per_year() for j in cron)
  830. existing_jobs = list(cron.find_comment(CRON_COMMENT))
  831. print()
  832. print('{green}[√] Scheduled new ArchiveBox cron job for user: {} ({} jobs are active).{reset}'.format(USER, len(existing_jobs), **ANSI))
  833. print('\n'.join(f' > {cmd}' if str(cmd) == str(new_job) else f' {cmd}' for cmd in existing_jobs))
  834. if total_runs > 60 and not quiet:
  835. stderr()
  836. stderr('{lightyellow}[!] With the current cron config, ArchiveBox is estimated to run >{} times per year.{reset}'.format(total_runs, **ANSI))
  837. stderr(' Congrats on being an enthusiastic internet archiver! 👌')
  838. stderr()
  839. stderr(' Make sure you have enough storage space available to hold all the data.')
  840. stderr(' Using a compressed/deduped filesystem like ZFS is recommended if you plan on archiving a lot.')
  841. stderr('')
  842. elif show:
  843. if existing_jobs:
  844. print('\n'.join(str(cmd) for cmd in existing_jobs))
  845. else:
  846. stderr('{red}[X] There are no ArchiveBox cron jobs scheduled for your user ({}).{reset}'.format(USER, **ANSI))
  847. stderr(' To schedule a new job, run:')
  848. stderr(' archivebox schedule --every=[timeperiod] https://example.com/some/rss/feed.xml')
  849. raise SystemExit(0)
  850. cron = CronTab(user=True)
  851. cron = dedupe_cron_jobs(cron)
  852. existing_jobs = list(cron.find_comment(CRON_COMMENT))
  853. if foreground or run_all:
  854. if not existing_jobs:
  855. stderr('{red}[X] You must schedule some jobs first before running in foreground mode.{reset}'.format(**ANSI))
  856. stderr(' archivebox schedule --every=hour https://example.com/some/rss/feed.xml')
  857. raise SystemExit(1)
  858. print('{green}[*] Running {} ArchiveBox jobs in foreground task scheduler...{reset}'.format(len(existing_jobs), **ANSI))
  859. if run_all:
  860. try:
  861. for job in existing_jobs:
  862. sys.stdout.write(f' > {job.command.split("/archivebox ")[0].split(" && ")[0]}\n')
  863. sys.stdout.write(f' > {job.command.split("/archivebox ")[-1].split(" > ")[0]}')
  864. sys.stdout.flush()
  865. job.run()
  866. sys.stdout.write(f'\r √ {job.command.split("/archivebox ")[-1]}\n')
  867. except KeyboardInterrupt:
  868. print('\n{green}[√] Stopped.{reset}'.format(**ANSI))
  869. raise SystemExit(1)
  870. if foreground:
  871. try:
  872. for job in existing_jobs:
  873. print(f' > {job.command.split("/archivebox ")[-1].split(" > ")[0]}')
  874. for result in cron.run_scheduler():
  875. print(result)
  876. except KeyboardInterrupt:
  877. print('\n{green}[√] Stopped.{reset}'.format(**ANSI))
  878. raise SystemExit(1)
  879. @enforce_types
  880. def server(runserver_args: Optional[List[str]]=None,
  881. reload: bool=False,
  882. debug: bool=False,
  883. init: bool=False,
  884. out_dir: Path=OUTPUT_DIR) -> None:
  885. """Run the ArchiveBox HTTP server"""
  886. runserver_args = runserver_args or []
  887. if init:
  888. run_subcommand('init', stdin=None, pwd=out_dir)
  889. # setup config for django runserver
  890. from . import config
  891. config.SHOW_PROGRESS = False
  892. config.DEBUG = config.DEBUG or debug
  893. check_data_folder(out_dir=out_dir)
  894. setup_django(out_dir)
  895. from django.core.management import call_command
  896. from django.contrib.auth.models import User
  897. admin_user = User.objects.filter(is_superuser=True).order_by('date_joined').only('username').last()
  898. print('{green}[+] Starting ArchiveBox webserver...{reset}'.format(**ANSI))
  899. if admin_user:
  900. hint('The admin username is{lightblue} {}{reset}\n'.format(admin_user.username, **ANSI))
  901. else:
  902. print('{lightyellow}[!] No admin users exist yet, you will not be able to edit links in the UI.{reset}'.format(**ANSI))
  903. print()
  904. print(' To create an admin user, run:')
  905. print(' archivebox manage createsuperuser')
  906. print()
  907. # fallback to serving staticfiles insecurely with django when DEBUG=False
  908. if not config.DEBUG:
  909. runserver_args.append('--insecure') # TODO: serve statics w/ nginx instead
  910. # toggle autoreloading when archivebox code changes (it's on by default)
  911. if not reload:
  912. runserver_args.append('--noreload')
  913. config.SHOW_PROGRESS = False
  914. config.DEBUG = config.DEBUG or debug
  915. call_command("runserver", *runserver_args)
  916. @enforce_types
  917. def manage(args: Optional[List[str]]=None, out_dir: Path=OUTPUT_DIR) -> None:
  918. """Run an ArchiveBox Django management command"""
  919. check_data_folder(out_dir=out_dir)
  920. setup_django(out_dir)
  921. from django.core.management import execute_from_command_line
  922. if (args and "createsuperuser" in args) and (IN_DOCKER and not IS_TTY):
  923. stderr('[!] Warning: you need to pass -it to use interactive commands in docker', color='lightyellow')
  924. stderr(' docker run -it archivebox manage {}'.format(' '.join(args or ['...'])), color='lightyellow')
  925. stderr()
  926. execute_from_command_line([f'{ARCHIVEBOX_BINARY} manage', *(args or ['help'])])
  927. @enforce_types
  928. def shell(out_dir: Path=OUTPUT_DIR) -> None:
  929. """Enter an interactive ArchiveBox Django shell"""
  930. check_data_folder(out_dir=out_dir)
  931. setup_django(OUTPUT_DIR)
  932. from django.core.management import call_command
  933. call_command("shell_plus")