main.py 57 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447
  1. __package__ = 'archivebox'
  2. import os
  3. import time
  4. import sys
  5. import shutil
  6. import platform
  7. from typing import Dict, List, Optional, Iterable, IO, Union
  8. from pathlib import Path
  9. from datetime import date, datetime
  10. from crontab import CronTab, CronSlices
  11. from django.db.models import QuerySet
  12. from django.utils import timezone
  13. from .cli import (
  14. CLI_SUBCOMMANDS,
  15. run_subcommand,
  16. display_first,
  17. meta_cmds,
  18. main_cmds,
  19. archive_cmds,
  20. )
  21. from .parsers import (
  22. save_text_as_source,
  23. save_file_as_source,
  24. parse_links_memory,
  25. )
  26. from .index.schema import Link
  27. from .util import enforce_types # type: ignore
  28. from .system import get_dir_size, dedupe_cron_jobs, CRON_COMMENT
  29. from .system import run as run_shell
  30. from .index import (
  31. load_main_index,
  32. parse_links_from_source,
  33. dedupe_links,
  34. write_main_index,
  35. snapshot_filter,
  36. get_indexed_folders,
  37. get_archived_folders,
  38. get_unarchived_folders,
  39. get_present_folders,
  40. get_valid_folders,
  41. get_invalid_folders,
  42. get_duplicate_folders,
  43. get_orphaned_folders,
  44. get_corrupted_folders,
  45. get_unrecognized_folders,
  46. fix_invalid_folder_locations,
  47. write_link_details,
  48. )
  49. from .index.json import (
  50. parse_json_main_index,
  51. parse_json_links_details,
  52. generate_json_index_from_links,
  53. )
  54. from .index.sql import (
  55. get_admins,
  56. apply_migrations,
  57. remove_from_sql_main_index,
  58. )
  59. from .index.html import (
  60. generate_index_from_links,
  61. )
  62. from .index.csv import links_to_csv
  63. from .extractors import archive_links, archive_link, ignore_methods
  64. from .misc.logging import stderr, hint
  65. from .misc.checks import check_data_folder, check_dependencies
  66. from .config import (
  67. ConfigDict,
  68. ANSI,
  69. IS_TTY,
  70. DEBUG,
  71. IN_DOCKER,
  72. IN_QEMU,
  73. PUID,
  74. PGID,
  75. USER,
  76. TIMEZONE,
  77. ENFORCE_ATOMIC_WRITES,
  78. OUTPUT_PERMISSIONS,
  79. PYTHON_BINARY,
  80. ARCHIVEBOX_BINARY,
  81. ONLY_NEW,
  82. OUTPUT_DIR,
  83. SOURCES_DIR,
  84. ARCHIVE_DIR,
  85. LOGS_DIR,
  86. PACKAGE_DIR,
  87. CONFIG_FILE,
  88. ARCHIVE_DIR_NAME,
  89. JSON_INDEX_FILENAME,
  90. HTML_INDEX_FILENAME,
  91. SQL_INDEX_FILENAME,
  92. ALLOWED_IN_OUTPUT_DIR,
  93. SEARCH_BACKEND_ENGINE,
  94. LDAP,
  95. get_version,
  96. write_config_file,
  97. VERSION,
  98. VERSIONS_AVAILABLE,
  99. CAN_UPGRADE,
  100. COMMIT_HASH,
  101. BUILD_TIME,
  102. CODE_LOCATIONS,
  103. DATA_LOCATIONS,
  104. DEPENDENCIES,
  105. CHROME_BINARY,
  106. CHROME_VERSION,
  107. YOUTUBEDL_BINARY,
  108. YOUTUBEDL_VERSION,
  109. SINGLEFILE_VERSION,
  110. READABILITY_VERSION,
  111. MERCURY_VERSION,
  112. NODE_VERSION,
  113. load_all_config,
  114. CONFIG,
  115. USER_CONFIG,
  116. ADMIN_USERNAME,
  117. ADMIN_PASSWORD,
  118. get_real_name,
  119. setup_django,
  120. )
  121. from .logging_util import (
  122. TERM_WIDTH,
  123. TimedProgress,
  124. log_importing_started,
  125. log_crawl_started,
  126. log_removal_started,
  127. log_removal_finished,
  128. log_list_started,
  129. log_list_finished,
  130. printable_config,
  131. printable_folders,
  132. printable_filesize,
  133. printable_folder_status,
  134. printable_dependency_version,
  135. )
  136. @enforce_types
  137. def help(out_dir: Path=OUTPUT_DIR) -> None:
  138. """Print the ArchiveBox help message and usage"""
  139. all_subcommands = CLI_SUBCOMMANDS
  140. COMMANDS_HELP_TEXT = '\n '.join(
  141. f'{cmd.ljust(20)} {summary}'
  142. for cmd, summary in all_subcommands.items()
  143. if cmd in meta_cmds
  144. ) + '\n\n ' + '\n '.join(
  145. f'{cmd.ljust(20)} {summary}'
  146. for cmd, summary in all_subcommands.items()
  147. if cmd in main_cmds
  148. ) + '\n\n ' + '\n '.join(
  149. f'{cmd.ljust(20)} {summary}'
  150. for cmd, summary in all_subcommands.items()
  151. if cmd in archive_cmds
  152. ) + '\n\n ' + '\n '.join(
  153. f'{cmd.ljust(20)} {summary}'
  154. for cmd, summary in all_subcommands.items()
  155. if cmd not in display_first
  156. )
  157. if (Path(out_dir) / SQL_INDEX_FILENAME).exists():
  158. print('''{green}ArchiveBox v{}: The self-hosted internet archive.{reset}
  159. {lightred}Active data directory:{reset}
  160. {}
  161. {lightred}Usage:{reset}
  162. archivebox [command] [--help] [--version] [...args]
  163. {lightred}Commands:{reset}
  164. {}
  165. {lightred}Example Use:{reset}
  166. mkdir my-archive; cd my-archive/
  167. archivebox init
  168. archivebox status
  169. archivebox add https://example.com/some/page
  170. archivebox add --depth=1 ~/Downloads/bookmarks_export.html
  171. archivebox list --sort=timestamp --csv=timestamp,url,is_archived
  172. archivebox schedule --every=day https://example.com/some/feed.rss
  173. archivebox update --resume=15109948213.123
  174. {lightred}Documentation:{reset}
  175. https://github.com/ArchiveBox/ArchiveBox/wiki
  176. '''.format(VERSION, out_dir, COMMANDS_HELP_TEXT, **ANSI))
  177. else:
  178. print('{green}Welcome to ArchiveBox v{}!{reset}'.format(VERSION, **ANSI))
  179. print()
  180. if IN_DOCKER:
  181. print('When using Docker, you need to mount a volume to use as your data dir:')
  182. print(' docker run -v /some/path:/data archivebox ...')
  183. print()
  184. print('To import an existing archive (from a previous version of ArchiveBox):')
  185. print(' 1. cd into your data dir OUTPUT_DIR (usually ArchiveBox/output) and run:')
  186. print(' 2. archivebox init')
  187. print()
  188. print('To start a new archive:')
  189. print(' 1. Create an empty directory, then cd into it and run:')
  190. print(' 2. archivebox init')
  191. print()
  192. print('For more information, see the documentation here:')
  193. print(' https://github.com/ArchiveBox/ArchiveBox/wiki')
  194. @enforce_types
  195. def version(quiet: bool=False,
  196. out_dir: Path=OUTPUT_DIR) -> None:
  197. """Print the ArchiveBox version and dependency information"""
  198. print(VERSION)
  199. if not quiet:
  200. # 0.7.1
  201. # ArchiveBox v0.7.1+editable COMMIT_HASH=951bba5 BUILD_TIME=2023-12-17 16:46:05 1702860365
  202. # IN_DOCKER=False IN_QEMU=False ARCH=arm64 OS=Darwin PLATFORM=macOS-14.2-arm64-arm-64bit PYTHON=Cpython
  203. # FS_ATOMIC=True FS_REMOTE=False FS_USER=501:20 FS_PERMS=644
  204. # DEBUG=False IS_TTY=True TZ=UTC SEARCH_BACKEND=ripgrep LDAP=False
  205. p = platform.uname()
  206. print(
  207. 'ArchiveBox v{}'.format(get_version(CONFIG)),
  208. f'COMMIT_HASH={COMMIT_HASH[:7] if COMMIT_HASH else "unknown"}',
  209. f'BUILD_TIME={BUILD_TIME}',
  210. )
  211. print(
  212. f'IN_DOCKER={IN_DOCKER}',
  213. f'IN_QEMU={IN_QEMU}',
  214. f'ARCH={p.machine}',
  215. f'OS={p.system}',
  216. f'PLATFORM={platform.platform()}',
  217. f'PYTHON={sys.implementation.name.title()}',
  218. )
  219. OUTPUT_IS_REMOTE_FS = DATA_LOCATIONS['OUTPUT_DIR']['is_mount'] or DATA_LOCATIONS['ARCHIVE_DIR']['is_mount']
  220. print(
  221. f'FS_ATOMIC={ENFORCE_ATOMIC_WRITES}',
  222. f'FS_REMOTE={OUTPUT_IS_REMOTE_FS}',
  223. f'FS_USER={PUID}:{PGID}',
  224. f'FS_PERMS={OUTPUT_PERMISSIONS}',
  225. )
  226. print(
  227. f'DEBUG={DEBUG}',
  228. f'IS_TTY={IS_TTY}',
  229. f'TZ={TIMEZONE}',
  230. f'SEARCH_BACKEND={SEARCH_BACKEND_ENGINE}',
  231. f'LDAP={LDAP}',
  232. #f'DB=django.db.backends.sqlite3 (({CONFIG["SQLITE_JOURNAL_MODE"]})', # add this if we have more useful info to show eventually
  233. )
  234. print()
  235. print('{white}[i] Dependency versions:{reset}'.format(**ANSI))
  236. for name, dependency in DEPENDENCIES.items():
  237. print(printable_dependency_version(name, dependency))
  238. # add a newline between core dependencies and extractor dependencies for easier reading
  239. if name == 'ARCHIVEBOX_BINARY':
  240. print()
  241. print()
  242. print('{white}[i] Source-code locations:{reset}'.format(**ANSI))
  243. for name, path in CODE_LOCATIONS.items():
  244. print(printable_folder_status(name, path))
  245. print()
  246. if DATA_LOCATIONS['OUTPUT_DIR']['is_valid']:
  247. print('{white}[i] Data locations:{reset}'.format(**ANSI))
  248. for name, path in DATA_LOCATIONS.items():
  249. print(printable_folder_status(name, path))
  250. else:
  251. print()
  252. print('{white}[i] Data locations:{reset} (not in a data directory)'.format(**ANSI))
  253. print()
  254. check_dependencies(CONFIG)
  255. @enforce_types
  256. def run(subcommand: str,
  257. subcommand_args: Optional[List[str]],
  258. stdin: Optional[IO]=None,
  259. out_dir: Path=OUTPUT_DIR) -> None:
  260. """Run a given ArchiveBox subcommand with the given list of args"""
  261. run_subcommand(
  262. subcommand=subcommand,
  263. subcommand_args=subcommand_args,
  264. stdin=stdin,
  265. pwd=out_dir,
  266. )
  267. @enforce_types
  268. def init(force: bool=False, quick: bool=False, setup: bool=False, out_dir: Path=OUTPUT_DIR) -> None:
  269. """Initialize a new ArchiveBox collection in the current directory"""
  270. from core.models import Snapshot
  271. out_dir.mkdir(exist_ok=True)
  272. is_empty = not len(set(os.listdir(out_dir)) - ALLOWED_IN_OUTPUT_DIR)
  273. if (out_dir / JSON_INDEX_FILENAME).exists():
  274. stderr("[!] This folder contains a JSON index. It is deprecated, and will no longer be kept up to date automatically.", color="lightyellow")
  275. stderr(" You can run `archivebox list --json --with-headers > static_index.json` to manually generate it.", color="lightyellow")
  276. existing_index = (out_dir / SQL_INDEX_FILENAME).exists()
  277. if is_empty and not existing_index:
  278. print('{green}[+] Initializing a new ArchiveBox v{} collection...{reset}'.format(VERSION, **ANSI))
  279. print('{green}----------------------------------------------------------------------{reset}'.format(**ANSI))
  280. elif existing_index:
  281. # TODO: properly detect and print the existing version in current index as well
  282. print('{green}[*] Verifying and updating existing ArchiveBox collection to v{}...{reset}'.format(VERSION, **ANSI))
  283. print('{green}----------------------------------------------------------------------{reset}'.format(**ANSI))
  284. else:
  285. if force:
  286. stderr('[!] This folder appears to already have files in it, but no index.sqlite3 is present.', color='lightyellow')
  287. stderr(' Because --force was passed, ArchiveBox will initialize anyway (which may overwrite existing files).')
  288. else:
  289. stderr(
  290. ("{red}[X] This folder appears to already have files in it, but no index.sqlite3 present.{reset}\n\n"
  291. " You must run init in a completely empty directory, or an existing data folder.\n\n"
  292. " {lightred}Hint:{reset} To import an existing data folder make sure to cd into the folder first, \n"
  293. " then run and run 'archivebox init' to pick up where you left off.\n\n"
  294. " (Always make sure your data folder is backed up first before updating ArchiveBox)"
  295. ).format(**ANSI)
  296. )
  297. raise SystemExit(2)
  298. if existing_index:
  299. print('\n{green}[*] Verifying archive folder structure...{reset}'.format(**ANSI))
  300. else:
  301. print('\n{green}[+] Building archive folder structure...{reset}'.format(**ANSI))
  302. print(f' + ./{ARCHIVE_DIR.relative_to(OUTPUT_DIR)}, ./{SOURCES_DIR.relative_to(OUTPUT_DIR)}, ./{LOGS_DIR.relative_to(OUTPUT_DIR)}...')
  303. Path(SOURCES_DIR).mkdir(exist_ok=True)
  304. Path(ARCHIVE_DIR).mkdir(exist_ok=True)
  305. Path(LOGS_DIR).mkdir(exist_ok=True)
  306. print(f' + ./{CONFIG_FILE.relative_to(OUTPUT_DIR)}...')
  307. write_config_file({}, out_dir=out_dir)
  308. if (out_dir / SQL_INDEX_FILENAME).exists():
  309. print('\n{green}[*] Verifying main SQL index and running any migrations needed...{reset}'.format(**ANSI))
  310. else:
  311. print('\n{green}[+] Building main SQL index and running initial migrations...{reset}'.format(**ANSI))
  312. DATABASE_FILE = out_dir / SQL_INDEX_FILENAME
  313. for migration_line in apply_migrations(out_dir):
  314. print(f' {migration_line}')
  315. assert DATABASE_FILE.exists()
  316. print()
  317. print(f' √ ./{DATABASE_FILE.relative_to(OUTPUT_DIR)}')
  318. # from django.contrib.auth.models import User
  319. # if IS_TTY and not User.objects.filter(is_superuser=True).exists():
  320. # print('{green}[+] Creating admin user account...{reset}'.format(**ANSI))
  321. # call_command("createsuperuser", interactive=True)
  322. print()
  323. print('{green}[*] Checking links from indexes and archive folders (safe to Ctrl+C)...{reset}'.format(**ANSI))
  324. all_links = Snapshot.objects.none()
  325. pending_links: Dict[str, Link] = {}
  326. if existing_index:
  327. all_links = load_main_index(out_dir=out_dir, warn=False)
  328. print(' √ Loaded {} links from existing main index.'.format(all_links.count()))
  329. if quick:
  330. print(' > Skipping full snapshot directory check (quick mode)')
  331. else:
  332. try:
  333. # Links in data folders that dont match their timestamp
  334. fixed, cant_fix = fix_invalid_folder_locations(out_dir=out_dir)
  335. if fixed:
  336. print(' {lightyellow}√ Fixed {} data directory locations that didn\'t match their link timestamps.{reset}'.format(len(fixed), **ANSI))
  337. if cant_fix:
  338. print(' {lightyellow}! Could not fix {} data directory locations due to conflicts with existing folders.{reset}'.format(len(cant_fix), **ANSI))
  339. # Links in JSON index but not in main index
  340. orphaned_json_links = {
  341. link.url: link
  342. for link in parse_json_main_index(out_dir)
  343. if not all_links.filter(url=link.url).exists()
  344. }
  345. if orphaned_json_links:
  346. pending_links.update(orphaned_json_links)
  347. print(' {lightyellow}√ Added {} orphaned links from existing JSON index...{reset}'.format(len(orphaned_json_links), **ANSI))
  348. # Links in data dir indexes but not in main index
  349. orphaned_data_dir_links = {
  350. link.url: link
  351. for link in parse_json_links_details(out_dir)
  352. if not all_links.filter(url=link.url).exists()
  353. }
  354. if orphaned_data_dir_links:
  355. pending_links.update(orphaned_data_dir_links)
  356. print(' {lightyellow}√ Added {} orphaned links from existing archive directories.{reset}'.format(len(orphaned_data_dir_links), **ANSI))
  357. # Links in invalid/duplicate data dirs
  358. invalid_folders = {
  359. folder: link
  360. for folder, link in get_invalid_folders(all_links, out_dir=out_dir).items()
  361. }
  362. if invalid_folders:
  363. print(' {lightyellow}! Skipped adding {} invalid link data directories.{reset}'.format(len(invalid_folders), **ANSI))
  364. print(' X ' + '\n X '.join(f'./{Path(folder).relative_to(OUTPUT_DIR)} {link}' for folder, link in invalid_folders.items()))
  365. print()
  366. print(' {lightred}Hint:{reset} For more information about the link data directories that were skipped, run:'.format(**ANSI))
  367. print(' archivebox status')
  368. print(' archivebox list --status=invalid')
  369. except (KeyboardInterrupt, SystemExit):
  370. stderr()
  371. stderr('[x] Stopped checking archive directories due to Ctrl-C/SIGTERM', color='red')
  372. stderr(' Your archive data is safe, but you should re-run `archivebox init` to finish the process later.')
  373. stderr()
  374. stderr(' {lightred}Hint:{reset} In the future you can run a quick init without checking dirs like so:'.format(**ANSI))
  375. stderr(' archivebox init --quick')
  376. raise SystemExit(1)
  377. write_main_index(list(pending_links.values()), out_dir=out_dir)
  378. print('\n{green}----------------------------------------------------------------------{reset}'.format(**ANSI))
  379. from django.contrib.auth.models import User
  380. if (ADMIN_USERNAME and ADMIN_PASSWORD) and not User.objects.filter(username=ADMIN_USERNAME).exists():
  381. print('{green}[+] Found ADMIN_USERNAME and ADMIN_PASSWORD configuration options, creating new admin user.{reset}'.format(**ANSI))
  382. User.objects.create_superuser(username=ADMIN_USERNAME, password=ADMIN_PASSWORD)
  383. if existing_index:
  384. print('{green}[√] Done. Verified and updated the existing ArchiveBox collection.{reset}'.format(**ANSI))
  385. else:
  386. print('{green}[√] Done. A new ArchiveBox collection was initialized ({} links).{reset}'.format(len(all_links) + len(pending_links), **ANSI))
  387. json_index = out_dir / JSON_INDEX_FILENAME
  388. html_index = out_dir / HTML_INDEX_FILENAME
  389. index_name = f"{date.today()}_index_old"
  390. if json_index.exists():
  391. json_index.rename(f"{index_name}.json")
  392. if html_index.exists():
  393. html_index.rename(f"{index_name}.html")
  394. if setup:
  395. run_subcommand('setup', pwd=out_dir)
  396. if Snapshot.objects.count() < 25: # hide the hints for experienced users
  397. print()
  398. print(' {lightred}Hint:{reset} To view your archive index, run:'.format(**ANSI))
  399. print(' archivebox server # then visit http://127.0.0.1:8000')
  400. print()
  401. print(' To add new links, you can run:')
  402. print(" archivebox add < ~/some/path/to/list_of_links.txt")
  403. print()
  404. print(' For more usage and examples, run:')
  405. print(' archivebox help')
  406. @enforce_types
  407. def status(out_dir: Path=OUTPUT_DIR) -> None:
  408. """Print out some info and statistics about the archive collection"""
  409. check_data_folder(CONFIG)
  410. from core.models import Snapshot
  411. from django.contrib.auth import get_user_model
  412. User = get_user_model()
  413. print('{green}[*] Scanning archive main index...{reset}'.format(**ANSI))
  414. print(ANSI['lightyellow'], f' {out_dir}/*', ANSI['reset'])
  415. num_bytes, num_dirs, num_files = get_dir_size(out_dir, recursive=False, pattern='index.')
  416. size = printable_filesize(num_bytes)
  417. print(f' Index size: {size} across {num_files} files')
  418. print()
  419. links = load_main_index(out_dir=out_dir)
  420. num_sql_links = links.count()
  421. num_link_details = sum(1 for link in parse_json_links_details(out_dir=out_dir))
  422. print(f' > SQL Main Index: {num_sql_links} links'.ljust(36), f'(found in {SQL_INDEX_FILENAME})')
  423. print(f' > JSON Link Details: {num_link_details} links'.ljust(36), f'(found in {ARCHIVE_DIR_NAME}/*/index.json)')
  424. print()
  425. print('{green}[*] Scanning archive data directories...{reset}'.format(**ANSI))
  426. print(ANSI['lightyellow'], f' {ARCHIVE_DIR}/*', ANSI['reset'])
  427. num_bytes, num_dirs, num_files = get_dir_size(ARCHIVE_DIR)
  428. size = printable_filesize(num_bytes)
  429. print(f' Size: {size} across {num_files} files in {num_dirs} directories')
  430. print(ANSI['black'])
  431. num_indexed = len(get_indexed_folders(links, out_dir=out_dir))
  432. num_archived = len(get_archived_folders(links, out_dir=out_dir))
  433. num_unarchived = len(get_unarchived_folders(links, out_dir=out_dir))
  434. print(f' > indexed: {num_indexed}'.ljust(36), f'({get_indexed_folders.__doc__})')
  435. print(f' > archived: {num_archived}'.ljust(36), f'({get_archived_folders.__doc__})')
  436. print(f' > unarchived: {num_unarchived}'.ljust(36), f'({get_unarchived_folders.__doc__})')
  437. num_present = len(get_present_folders(links, out_dir=out_dir))
  438. num_valid = len(get_valid_folders(links, out_dir=out_dir))
  439. print()
  440. print(f' > present: {num_present}'.ljust(36), f'({get_present_folders.__doc__})')
  441. print(f' > valid: {num_valid}'.ljust(36), f'({get_valid_folders.__doc__})')
  442. duplicate = get_duplicate_folders(links, out_dir=out_dir)
  443. orphaned = get_orphaned_folders(links, out_dir=out_dir)
  444. corrupted = get_corrupted_folders(links, out_dir=out_dir)
  445. unrecognized = get_unrecognized_folders(links, out_dir=out_dir)
  446. num_invalid = len({**duplicate, **orphaned, **corrupted, **unrecognized})
  447. print(f' > invalid: {num_invalid}'.ljust(36), f'({get_invalid_folders.__doc__})')
  448. print(f' > duplicate: {len(duplicate)}'.ljust(36), f'({get_duplicate_folders.__doc__})')
  449. print(f' > orphaned: {len(orphaned)}'.ljust(36), f'({get_orphaned_folders.__doc__})')
  450. print(f' > corrupted: {len(corrupted)}'.ljust(36), f'({get_corrupted_folders.__doc__})')
  451. print(f' > unrecognized: {len(unrecognized)}'.ljust(36), f'({get_unrecognized_folders.__doc__})')
  452. print(ANSI['reset'])
  453. if num_indexed:
  454. print(' {lightred}Hint:{reset} You can list link data directories by status like so:'.format(**ANSI))
  455. print(' archivebox list --status=<status> (e.g. indexed, corrupted, archived, etc.)')
  456. if orphaned:
  457. print(' {lightred}Hint:{reset} To automatically import orphaned data directories into the main index, run:'.format(**ANSI))
  458. print(' archivebox init')
  459. if num_invalid:
  460. print(' {lightred}Hint:{reset} You may need to manually remove or fix some invalid data directories, afterwards make sure to run:'.format(**ANSI))
  461. print(' archivebox init')
  462. print()
  463. print('{green}[*] Scanning recent archive changes and user logins:{reset}'.format(**ANSI))
  464. print(ANSI['lightyellow'], f' {LOGS_DIR}/*', ANSI['reset'])
  465. users = get_admins().values_list('username', flat=True)
  466. print(f' UI users {len(users)}: {", ".join(users)}')
  467. last_login = User.objects.order_by('last_login').last()
  468. if last_login:
  469. print(f' Last UI login: {last_login.username} @ {str(last_login.last_login)[:16]}')
  470. last_downloaded = Snapshot.objects.order_by('downloaded_at').last()
  471. if last_downloaded:
  472. print(f' Last changes: {str(last_downloaded.downloaded_at)[:16]}')
  473. if not users:
  474. print()
  475. print(' {lightred}Hint:{reset} You can create an admin user by running:'.format(**ANSI))
  476. print(' archivebox manage createsuperuser')
  477. print()
  478. for snapshot in links.order_by('-downloaded_at')[:10]:
  479. if not snapshot.downloaded_at:
  480. continue
  481. print(
  482. ANSI['black'],
  483. (
  484. f' > {str(snapshot.downloaded_at)[:16]} '
  485. f'[{snapshot.num_outputs} {("X", "√")[snapshot.is_archived]} {printable_filesize(snapshot.archive_size)}] '
  486. f'"{snapshot.title}": {snapshot.url}'
  487. )[:TERM_WIDTH()],
  488. ANSI['reset'],
  489. )
  490. print(ANSI['black'], ' ...', ANSI['reset'])
  491. @enforce_types
  492. def oneshot(url: str, extractors: str="", out_dir: Path=OUTPUT_DIR, created_by_id: int | None=None) -> List[Link]:
  493. """
  494. Create a single URL archive folder with an index.json and index.html, and all the archive method outputs.
  495. You can run this to archive single pages without needing to create a whole collection with archivebox init.
  496. """
  497. oneshot_link, _ = parse_links_memory([url])
  498. if len(oneshot_link) > 1:
  499. stderr(
  500. '[X] You should pass a single url to the oneshot command',
  501. color='red'
  502. )
  503. raise SystemExit(2)
  504. methods = extractors.split(",") if extractors else ignore_methods(['title'])
  505. archive_link(oneshot_link[0], out_dir=out_dir, methods=methods, created_by_id=created_by_id)
  506. return oneshot_link
  507. @enforce_types
  508. def add(urls: Union[str, List[str]],
  509. tag: str='',
  510. depth: int=0,
  511. update: bool=not ONLY_NEW,
  512. update_all: bool=False,
  513. index_only: bool=False,
  514. overwrite: bool=False,
  515. # duplicate: bool=False, # TODO: reuse the logic from admin.py resnapshot to allow adding multiple snapshots by appending timestamp automatically
  516. init: bool=False,
  517. extractors: str="",
  518. parser: str="auto",
  519. created_by_id: int | None=None,
  520. out_dir: Path=OUTPUT_DIR) -> List[Link]:
  521. """Add a new URL or list of URLs to your archive"""
  522. from core.models import Snapshot, Tag
  523. # from queues.supervisor_util import start_cli_workers, tail_worker_logs
  524. # from queues.tasks import bg_archive_link
  525. assert depth in (0, 1), 'Depth must be 0 or 1 (depth >1 is not supported yet)'
  526. extractors = extractors.split(",") if extractors else []
  527. if init:
  528. run_subcommand('init', stdin=None, pwd=out_dir)
  529. # Load list of links from the existing index
  530. check_data_folder(CONFIG)
  531. check_dependencies(CONFIG)
  532. # worker = start_cli_workers()
  533. new_links: List[Link] = []
  534. all_links = load_main_index(out_dir=out_dir)
  535. log_importing_started(urls=urls, depth=depth, index_only=index_only)
  536. if isinstance(urls, str):
  537. # save verbatim stdin to sources
  538. write_ahead_log = save_text_as_source(urls, filename='{ts}-import.txt', out_dir=out_dir)
  539. elif isinstance(urls, list):
  540. # save verbatim args to sources
  541. write_ahead_log = save_text_as_source('\n'.join(urls), filename='{ts}-import.txt', out_dir=out_dir)
  542. new_links += parse_links_from_source(write_ahead_log, root_url=None, parser=parser)
  543. # If we're going one level deeper, download each link and look for more links
  544. new_links_depth = []
  545. if new_links and depth == 1:
  546. log_crawl_started(new_links)
  547. for new_link in new_links:
  548. try:
  549. downloaded_file = save_file_as_source(new_link.url, filename=f'{new_link.timestamp}-crawl-{new_link.domain}.txt', out_dir=out_dir)
  550. new_links_depth += parse_links_from_source(downloaded_file, root_url=new_link.url)
  551. except Exception as err:
  552. stderr('[!] Failed to get contents of URL {new_link.url}', err, color='red')
  553. imported_links = list({link.url: link for link in (new_links + new_links_depth)}.values())
  554. new_links = dedupe_links(all_links, imported_links)
  555. write_main_index(links=new_links, out_dir=out_dir, created_by_id=created_by_id)
  556. all_links = load_main_index(out_dir=out_dir)
  557. tags = [
  558. Tag.objects.get_or_create(name=name.strip(), defaults={'created_by_id': created_by_id})[0]
  559. for name in tag.split(',')
  560. if name.strip()
  561. ]
  562. if tags:
  563. for link in imported_links:
  564. snapshot = Snapshot.objects.get(url=link.url)
  565. snapshot.tags.add(*tags)
  566. snapshot.tags_str(nocache=True)
  567. snapshot.save()
  568. # print(f' √ Tagged {len(imported_links)} Snapshots with {len(tags)} tags {tags_str}')
  569. if index_only:
  570. # mock archive all the links using the fake index_only extractor method in order to update their state
  571. if overwrite:
  572. archive_links(imported_links, overwrite=overwrite, methods=['index_only'], out_dir=out_dir, created_by_id=created_by_id)
  573. else:
  574. archive_links(new_links, overwrite=False, methods=['index_only'], out_dir=out_dir, created_by_id=created_by_id)
  575. else:
  576. # fully run the archive extractor methods for each link
  577. archive_kwargs = {
  578. "out_dir": out_dir,
  579. "created_by_id": created_by_id,
  580. }
  581. if extractors:
  582. archive_kwargs["methods"] = extractors
  583. stderr()
  584. ts = datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S')
  585. if update:
  586. stderr(f'[*] [{ts}] Archiving + updating {len(imported_links)}/{len(all_links)}', len(imported_links), 'URLs from added set...', color='green')
  587. archive_links(imported_links, overwrite=overwrite, **archive_kwargs)
  588. elif update_all:
  589. stderr(f'[*] [{ts}] Archiving + updating {len(all_links)}/{len(all_links)}', len(all_links), 'URLs from entire library...', color='green')
  590. archive_links(all_links, overwrite=overwrite, **archive_kwargs)
  591. elif overwrite:
  592. stderr(f'[*] [{ts}] Archiving + overwriting {len(imported_links)}/{len(all_links)}', len(imported_links), 'URLs from added set...', color='green')
  593. archive_links(imported_links, overwrite=True, **archive_kwargs)
  594. elif new_links:
  595. stderr(f'[*] [{ts}] Archiving {len(new_links)}/{len(all_links)} URLs from added set...', color='green')
  596. archive_links(new_links, overwrite=False, **archive_kwargs)
  597. # tail_worker_logs(worker['stdout_logfile'])
  598. if CAN_UPGRADE:
  599. hint(f"There's a new version of ArchiveBox available! Your current version is {VERSION}. You can upgrade to {VERSIONS_AVAILABLE['recommended_version']['tag_name']} ({VERSIONS_AVAILABLE['recommended_version']['html_url']}). For more on how to upgrade: https://github.com/ArchiveBox/ArchiveBox/wiki/Upgrading-or-Merging-Archives\n")
  600. return new_links
  601. @enforce_types
  602. def remove(filter_str: Optional[str]=None,
  603. filter_patterns: Optional[List[str]]=None,
  604. filter_type: str='exact',
  605. snapshots: Optional[QuerySet]=None,
  606. after: Optional[float]=None,
  607. before: Optional[float]=None,
  608. yes: bool=False,
  609. delete: bool=False,
  610. out_dir: Path=OUTPUT_DIR) -> List[Link]:
  611. """Remove the specified URLs from the archive"""
  612. check_data_folder(CONFIG)
  613. if snapshots is None:
  614. if filter_str and filter_patterns:
  615. stderr(
  616. '[X] You should pass either a pattern as an argument, '
  617. 'or pass a list of patterns via stdin, but not both.\n',
  618. color='red',
  619. )
  620. raise SystemExit(2)
  621. elif not (filter_str or filter_patterns):
  622. stderr(
  623. '[X] You should pass either a pattern as an argument, '
  624. 'or pass a list of patterns via stdin.',
  625. color='red',
  626. )
  627. stderr()
  628. hint(('To remove all urls you can run:',
  629. 'archivebox remove --filter-type=regex ".*"'))
  630. stderr()
  631. raise SystemExit(2)
  632. elif filter_str:
  633. filter_patterns = [ptn.strip() for ptn in filter_str.split('\n')]
  634. list_kwargs = {
  635. "filter_patterns": filter_patterns,
  636. "filter_type": filter_type,
  637. "after": after,
  638. "before": before,
  639. }
  640. if snapshots:
  641. list_kwargs["snapshots"] = snapshots
  642. log_list_started(filter_patterns, filter_type)
  643. timer = TimedProgress(360, prefix=' ')
  644. try:
  645. snapshots = list_links(**list_kwargs)
  646. finally:
  647. timer.end()
  648. if not snapshots.exists():
  649. log_removal_finished(0, 0)
  650. raise SystemExit(1)
  651. log_links = [link.as_link() for link in snapshots]
  652. log_list_finished(log_links)
  653. log_removal_started(log_links, yes=yes, delete=delete)
  654. timer = TimedProgress(360, prefix=' ')
  655. try:
  656. for snapshot in snapshots:
  657. if delete:
  658. shutil.rmtree(snapshot.as_link().link_dir, ignore_errors=True)
  659. finally:
  660. timer.end()
  661. to_remove = snapshots.count()
  662. from .search import flush_search_index
  663. flush_search_index(snapshots=snapshots)
  664. remove_from_sql_main_index(snapshots=snapshots, out_dir=out_dir)
  665. all_snapshots = load_main_index(out_dir=out_dir)
  666. log_removal_finished(all_snapshots.count(), to_remove)
  667. return all_snapshots
  668. @enforce_types
  669. def update(resume: Optional[float]=None,
  670. only_new: bool=ONLY_NEW,
  671. index_only: bool=False,
  672. overwrite: bool=False,
  673. filter_patterns_str: Optional[str]=None,
  674. filter_patterns: Optional[List[str]]=None,
  675. filter_type: Optional[str]=None,
  676. status: Optional[str]=None,
  677. after: Optional[str]=None,
  678. before: Optional[str]=None,
  679. extractors: str="",
  680. out_dir: Path=OUTPUT_DIR) -> List[Link]:
  681. """Import any new links from subscriptions and retry any previously failed/skipped links"""
  682. from core.models import ArchiveResult
  683. from .search import index_links
  684. # from .queues.supervisor_util import start_cli_workers
  685. check_data_folder(CONFIG)
  686. check_dependencies(CONFIG)
  687. # start_cli_workers()
  688. new_links: List[Link] = [] # TODO: Remove input argument: only_new
  689. extractors = extractors.split(",") if extractors else []
  690. # Step 1: Filter for selected_links
  691. print('[*] Finding matching Snapshots to update...')
  692. print(f' - Filtering by {" ".join(filter_patterns)} ({filter_type}) {before=} {after=} {status=}...')
  693. matching_snapshots = list_links(
  694. filter_patterns=filter_patterns,
  695. filter_type=filter_type,
  696. before=before,
  697. after=after,
  698. )
  699. print(f' - Checking {matching_snapshots.count()} snapshot folders for existing data with {status=}...')
  700. matching_folders = list_folders(
  701. links=matching_snapshots,
  702. status=status,
  703. out_dir=out_dir,
  704. )
  705. all_links = (link for link in matching_folders.values() if link)
  706. print(' - Sorting by most unfinished -> least unfinished + date archived...')
  707. all_links = sorted(all_links, key=lambda link: (ArchiveResult.objects.filter(snapshot__url=link.url).count(), link.timestamp))
  708. if index_only:
  709. for link in all_links:
  710. write_link_details(link, out_dir=out_dir, skip_sql_index=True)
  711. index_links(all_links, out_dir=out_dir)
  712. return all_links
  713. # Step 2: Run the archive methods for each link
  714. to_archive = new_links if only_new else all_links
  715. if resume:
  716. to_archive = [
  717. link for link in to_archive
  718. if link.timestamp >= str(resume)
  719. ]
  720. if not to_archive:
  721. stderr('')
  722. stderr(f'[√] Nothing found to resume after {resume}', color='green')
  723. return all_links
  724. archive_kwargs = {
  725. "out_dir": out_dir,
  726. }
  727. if extractors:
  728. archive_kwargs["methods"] = extractors
  729. archive_links(to_archive, overwrite=overwrite, **archive_kwargs)
  730. # Step 4: Re-write links index with updated titles, icons, and resources
  731. all_links = load_main_index(out_dir=out_dir)
  732. return all_links
  733. @enforce_types
  734. def list_all(filter_patterns_str: Optional[str]=None,
  735. filter_patterns: Optional[List[str]]=None,
  736. filter_type: str='exact',
  737. status: Optional[str]=None,
  738. after: Optional[float]=None,
  739. before: Optional[float]=None,
  740. sort: Optional[str]=None,
  741. csv: Optional[str]=None,
  742. json: bool=False,
  743. html: bool=False,
  744. with_headers: bool=False,
  745. out_dir: Path=OUTPUT_DIR) -> Iterable[Link]:
  746. """List, filter, and export information about archive entries"""
  747. check_data_folder(CONFIG)
  748. if filter_patterns and filter_patterns_str:
  749. stderr(
  750. '[X] You should either pass filter patterns as an arguments '
  751. 'or via stdin, but not both.\n',
  752. color='red',
  753. )
  754. raise SystemExit(2)
  755. elif filter_patterns_str:
  756. filter_patterns = filter_patterns_str.split('\n')
  757. snapshots = list_links(
  758. filter_patterns=filter_patterns,
  759. filter_type=filter_type,
  760. before=before,
  761. after=after,
  762. )
  763. if sort:
  764. snapshots = snapshots.order_by(sort)
  765. folders = list_folders(
  766. links=snapshots,
  767. status=status,
  768. out_dir=out_dir,
  769. )
  770. if json:
  771. output = generate_json_index_from_links(folders.values(), with_headers)
  772. elif html:
  773. output = generate_index_from_links(folders.values(), with_headers)
  774. elif csv:
  775. output = links_to_csv(folders.values(), cols=csv.split(','), header=with_headers)
  776. else:
  777. output = printable_folders(folders, with_headers=with_headers)
  778. print(output)
  779. return folders
  780. @enforce_types
  781. def list_links(snapshots: Optional[QuerySet]=None,
  782. filter_patterns: Optional[List[str]]=None,
  783. filter_type: str='exact',
  784. after: Optional[float]=None,
  785. before: Optional[float]=None,
  786. out_dir: Path=OUTPUT_DIR) -> Iterable[Link]:
  787. check_data_folder(CONFIG)
  788. if snapshots:
  789. all_snapshots = snapshots
  790. else:
  791. all_snapshots = load_main_index(out_dir=out_dir)
  792. if after is not None:
  793. all_snapshots = all_snapshots.filter(timestamp__gte=after)
  794. if before is not None:
  795. all_snapshots = all_snapshots.filter(timestamp__lt=before)
  796. if filter_patterns:
  797. all_snapshots = snapshot_filter(all_snapshots, filter_patterns, filter_type)
  798. if not all_snapshots:
  799. stderr('[!] No Snapshots matched your filters:', filter_patterns, f'({filter_type})', color='lightyellow')
  800. return all_snapshots
  801. @enforce_types
  802. def list_folders(links: List[Link],
  803. status: str,
  804. out_dir: Path=OUTPUT_DIR) -> Dict[str, Optional[Link]]:
  805. check_data_folder(CONFIG)
  806. STATUS_FUNCTIONS = {
  807. "indexed": get_indexed_folders,
  808. "archived": get_archived_folders,
  809. "unarchived": get_unarchived_folders,
  810. "present": get_present_folders,
  811. "valid": get_valid_folders,
  812. "invalid": get_invalid_folders,
  813. "duplicate": get_duplicate_folders,
  814. "orphaned": get_orphaned_folders,
  815. "corrupted": get_corrupted_folders,
  816. "unrecognized": get_unrecognized_folders,
  817. }
  818. try:
  819. return STATUS_FUNCTIONS[status](links, out_dir=out_dir)
  820. except KeyError:
  821. raise ValueError('Status not recognized.')
  822. @enforce_types
  823. def setup(out_dir: Path=OUTPUT_DIR) -> None:
  824. """Automatically install all ArchiveBox dependencies and extras"""
  825. if not (out_dir / ARCHIVE_DIR_NAME).exists():
  826. run_subcommand('init', stdin=None, pwd=out_dir)
  827. setup_django(out_dir=out_dir, check_db=True)
  828. from django.contrib.auth import get_user_model
  829. User = get_user_model()
  830. if not User.objects.filter(is_superuser=True).exists():
  831. stderr('\n[+] Creating new admin user for the Web UI...', color='green')
  832. run_subcommand('manage', subcommand_args=['createsuperuser'], pwd=out_dir)
  833. stderr('\n[+] Installing enabled ArchiveBox dependencies automatically...', color='green')
  834. stderr('\n Installing YOUTUBEDL_BINARY automatically using pip...')
  835. if YOUTUBEDL_VERSION:
  836. print(f'{YOUTUBEDL_VERSION} is already installed', YOUTUBEDL_BINARY)
  837. else:
  838. try:
  839. run_shell([
  840. PYTHON_BINARY, '-m', 'pip',
  841. 'install',
  842. '--upgrade',
  843. '--no-cache-dir',
  844. '--no-warn-script-location',
  845. 'yt-dlp',
  846. ], capture_output=False, cwd=out_dir)
  847. pkg_path = run_shell([
  848. PYTHON_BINARY, '-m', 'pip',
  849. 'show',
  850. 'yt-dlp',
  851. ], capture_output=True, text=True, cwd=out_dir).stdout.decode().split('Location: ')[-1].split('\n', 1)[0]
  852. NEW_YOUTUBEDL_BINARY = Path(pkg_path) / 'yt-dlp' / '__main__.py'
  853. os.chmod(NEW_YOUTUBEDL_BINARY, 0o777)
  854. assert NEW_YOUTUBEDL_BINARY.exists(), f'yt-dlp must exist inside {pkg_path}'
  855. config(f'YOUTUBEDL_BINARY={NEW_YOUTUBEDL_BINARY}', set=True, out_dir=out_dir)
  856. except BaseException as e: # lgtm [py/catch-base-exception]
  857. stderr(f'[X] Failed to install python packages: {e}', color='red')
  858. raise SystemExit(1)
  859. if platform.machine() == 'armv7l':
  860. stderr('\n Skip the automatic installation of CHROME_BINARY because playwright is not available on armv7.')
  861. else:
  862. stderr('\n Installing CHROME_BINARY automatically using playwright...')
  863. if CHROME_VERSION:
  864. print(f'{CHROME_VERSION} is already installed', CHROME_BINARY)
  865. else:
  866. try:
  867. run_shell([
  868. PYTHON_BINARY, '-m', 'pip',
  869. 'install',
  870. '--upgrade',
  871. '--no-cache-dir',
  872. '--no-warn-script-location',
  873. 'playwright',
  874. ], capture_output=False, cwd=out_dir)
  875. run_shell([PYTHON_BINARY, '-m', 'playwright', 'install', 'chromium'], capture_output=False, cwd=out_dir)
  876. proc = run_shell([PYTHON_BINARY, '-c', 'from playwright.sync_api import sync_playwright; print(sync_playwright().start().chromium.executable_path)'], capture_output=True, text=True, cwd=out_dir)
  877. NEW_CHROME_BINARY = proc.stdout.decode().strip() if isinstance(proc.stdout, bytes) else proc.stdout.strip()
  878. assert NEW_CHROME_BINARY and len(NEW_CHROME_BINARY), 'CHROME_BINARY must contain a path'
  879. config(f'CHROME_BINARY={NEW_CHROME_BINARY}', set=True, out_dir=out_dir)
  880. except BaseException as e: # lgtm [py/catch-base-exception]
  881. stderr(f'[X] Failed to install chromium using playwright: {e.__class__.__name__} {e}', color='red')
  882. raise SystemExit(1)
  883. stderr('\n Installing SINGLEFILE_BINARY, READABILITY_BINARY, MERCURY_BINARY automatically using npm...')
  884. if not NODE_VERSION:
  885. stderr('[X] You must first install node & npm using your system package manager', color='red')
  886. hint([
  887. 'https://github.com/nodesource/distributions#table-of-contents',
  888. 'or to disable all node-based modules run: archivebox config --set USE_NODE=False',
  889. ])
  890. raise SystemExit(1)
  891. if all((SINGLEFILE_VERSION, READABILITY_VERSION, MERCURY_VERSION)):
  892. print('SINGLEFILE_BINARY, READABILITY_BINARY, and MERCURURY_BINARY are already installed')
  893. else:
  894. try:
  895. # clear out old npm package locations
  896. paths = (
  897. out_dir / 'package.json',
  898. out_dir / 'package_lock.json',
  899. out_dir / 'node_modules',
  900. )
  901. for path in paths:
  902. if path.is_dir():
  903. shutil.rmtree(path, ignore_errors=True)
  904. elif path.is_file():
  905. os.remove(path)
  906. shutil.copyfile(PACKAGE_DIR / 'package.json', out_dir / 'package.json') # copy the js requirements list from the source install into the data dir
  907. # lets blindly assume that calling out to npm via shell works reliably cross-platform 🤡 (until proven otherwise via support tickets)
  908. run_shell([
  909. 'npm',
  910. 'install',
  911. '--prefix', str(out_dir), # force it to put the node_modules dir in this folder
  912. '--force', # overwrite any existing node_modules
  913. '--no-save', # don't bother saving updating the package.json or package-lock.json file
  914. '--no-audit', # don't bother checking for newer versions with security vuln fixes
  915. '--no-fund', # hide "please fund our project" messages
  916. '--loglevel', 'error', # only show erros (hide warn/info/debug) during installation
  917. # these args are written in blood, change with caution
  918. ], capture_output=False, cwd=out_dir)
  919. os.remove(out_dir / 'package.json')
  920. except BaseException as e: # lgtm [py/catch-base-exception]
  921. stderr(f'[X] Failed to install npm packages: {e}', color='red')
  922. hint(f'Try deleting {out_dir}/node_modules and running it again')
  923. raise SystemExit(1)
  924. stderr('\n[√] Set up ArchiveBox and its dependencies successfully.', color='green')
  925. run_shell([PYTHON_BINARY, ARCHIVEBOX_BINARY, '--version'], capture_output=False, cwd=out_dir)
  926. @enforce_types
  927. def config(config_options_str: Optional[str]=None,
  928. config_options: Optional[List[str]]=None,
  929. get: bool=False,
  930. set: bool=False,
  931. reset: bool=False,
  932. out_dir: Path=OUTPUT_DIR) -> None:
  933. """Get and set your ArchiveBox project configuration values"""
  934. check_data_folder(CONFIG)
  935. if config_options and config_options_str:
  936. stderr(
  937. '[X] You should either pass config values as an arguments '
  938. 'or via stdin, but not both.\n',
  939. color='red',
  940. )
  941. raise SystemExit(2)
  942. elif config_options_str:
  943. config_options = config_options_str.split('\n')
  944. config_options = config_options or []
  945. no_args = not (get or set or reset or config_options)
  946. matching_config: ConfigDict = {}
  947. if get or no_args:
  948. if config_options:
  949. config_options = [get_real_name(key) for key in config_options]
  950. matching_config = {key: CONFIG[key] for key in config_options if key in CONFIG}
  951. failed_config = [key for key in config_options if key not in CONFIG]
  952. if failed_config:
  953. stderr()
  954. stderr('[X] These options failed to get', color='red')
  955. stderr(' {}'.format('\n '.join(config_options)))
  956. raise SystemExit(1)
  957. else:
  958. matching_config = CONFIG
  959. print(printable_config(matching_config))
  960. raise SystemExit(not matching_config)
  961. elif set:
  962. new_config = {}
  963. failed_options = []
  964. for line in config_options:
  965. if line.startswith('#') or not line.strip():
  966. continue
  967. if '=' not in line:
  968. stderr('[X] Config KEY=VALUE must have an = sign in it', color='red')
  969. stderr(f' {line}')
  970. raise SystemExit(2)
  971. raw_key, val = line.split('=', 1)
  972. raw_key = raw_key.upper().strip()
  973. key = get_real_name(raw_key)
  974. if key != raw_key:
  975. stderr(f'[i] Note: The config option {raw_key} has been renamed to {key}, please use the new name going forwards.', color='lightyellow')
  976. if key in CONFIG:
  977. new_config[key] = val.strip()
  978. else:
  979. failed_options.append(line)
  980. if new_config:
  981. before = CONFIG
  982. matching_config = write_config_file(new_config, out_dir=OUTPUT_DIR)
  983. after = load_all_config()
  984. print(printable_config(matching_config))
  985. side_effect_changes: ConfigDict = {}
  986. for key, val in after.items():
  987. if key in USER_CONFIG and (before[key] != after[key]) and (key not in matching_config):
  988. side_effect_changes[key] = after[key]
  989. if side_effect_changes:
  990. stderr()
  991. stderr('[i] Note: This change also affected these other options that depended on it:', color='lightyellow')
  992. print(' {}'.format(printable_config(side_effect_changes, prefix=' ')))
  993. if failed_options:
  994. stderr()
  995. stderr('[X] These options failed to set (check for typos):', color='red')
  996. stderr(' {}'.format('\n '.join(failed_options)))
  997. raise SystemExit(1)
  998. elif reset:
  999. stderr('[X] This command is not implemented yet.', color='red')
  1000. stderr(' Please manually remove the relevant lines from your config file:')
  1001. stderr(f' {CONFIG_FILE}')
  1002. raise SystemExit(2)
  1003. else:
  1004. stderr('[X] You must pass either --get or --set, or no arguments to get the whole config.', color='red')
  1005. stderr(' archivebox config')
  1006. stderr(' archivebox config --get SOME_KEY')
  1007. stderr(' archivebox config --set SOME_KEY=SOME_VALUE')
  1008. raise SystemExit(2)
  1009. @enforce_types
  1010. def schedule(add: bool=False,
  1011. show: bool=False,
  1012. clear: bool=False,
  1013. foreground: bool=False,
  1014. run_all: bool=False,
  1015. quiet: bool=False,
  1016. every: Optional[str]=None,
  1017. tag: str='',
  1018. depth: int=0,
  1019. overwrite: bool=False,
  1020. update: bool=not ONLY_NEW,
  1021. import_path: Optional[str]=None,
  1022. out_dir: Path=OUTPUT_DIR):
  1023. """Set ArchiveBox to regularly import URLs at specific times using cron"""
  1024. check_data_folder(CONFIG)
  1025. Path(LOGS_DIR).mkdir(exist_ok=True)
  1026. cron = CronTab(user=True)
  1027. cron = dedupe_cron_jobs(cron)
  1028. if clear:
  1029. print(cron.remove_all(comment=CRON_COMMENT))
  1030. cron.write()
  1031. raise SystemExit(0)
  1032. existing_jobs = list(cron.find_comment(CRON_COMMENT))
  1033. if every or add:
  1034. every = every or 'day'
  1035. quoted = lambda s: f'"{s}"' if (s and ' ' in str(s)) else str(s)
  1036. cmd = [
  1037. 'cd',
  1038. quoted(out_dir),
  1039. '&&',
  1040. quoted(ARCHIVEBOX_BINARY),
  1041. *([
  1042. 'add',
  1043. *(['--overwrite'] if overwrite else []),
  1044. *(['--update'] if update else []),
  1045. *([f'--tag={tag}'] if tag else []),
  1046. f'--depth={depth}',
  1047. f'"{import_path}"',
  1048. ] if import_path else ['update']),
  1049. '>>',
  1050. quoted(Path(LOGS_DIR) / 'schedule.log'),
  1051. '2>&1',
  1052. ]
  1053. new_job = cron.new(command=' '.join(cmd), comment=CRON_COMMENT)
  1054. if every in ('minute', 'hour', 'day', 'month', 'year'):
  1055. set_every = getattr(new_job.every(), every)
  1056. set_every()
  1057. elif CronSlices.is_valid(every):
  1058. new_job.setall(every)
  1059. else:
  1060. stderr('{red}[X] Got invalid timeperiod for cron task.{reset}'.format(**ANSI))
  1061. stderr(' It must be one of minute/hour/day/month')
  1062. stderr(' or a quoted cron-format schedule like:')
  1063. stderr(' archivebox init --every=day --depth=1 https://example.com/some/rss/feed.xml')
  1064. stderr(' archivebox init --every="0/5 * * * *" --depth=1 https://example.com/some/rss/feed.xml')
  1065. raise SystemExit(1)
  1066. cron = dedupe_cron_jobs(cron)
  1067. cron.write()
  1068. total_runs = sum(j.frequency_per_year() for j in cron)
  1069. existing_jobs = list(cron.find_comment(CRON_COMMENT))
  1070. print()
  1071. print('{green}[√] Scheduled new ArchiveBox cron job for user: {} ({} jobs are active).{reset}'.format(USER, len(existing_jobs), **ANSI))
  1072. print('\n'.join(f' > {cmd}' if str(cmd) == str(new_job) else f' {cmd}' for cmd in existing_jobs))
  1073. if total_runs > 60 and not quiet:
  1074. stderr()
  1075. stderr('{lightyellow}[!] With the current cron config, ArchiveBox is estimated to run >{} times per year.{reset}'.format(total_runs, **ANSI))
  1076. stderr(' Congrats on being an enthusiastic internet archiver! 👌')
  1077. stderr()
  1078. stderr(' Make sure you have enough storage space available to hold all the data.')
  1079. stderr(' Using a compressed/deduped filesystem like ZFS is recommended if you plan on archiving a lot.')
  1080. stderr('')
  1081. elif show:
  1082. if existing_jobs:
  1083. print('\n'.join(str(cmd) for cmd in existing_jobs))
  1084. else:
  1085. stderr('{red}[X] There are no ArchiveBox cron jobs scheduled for your user ({}).{reset}'.format(USER, **ANSI))
  1086. stderr(' To schedule a new job, run:')
  1087. stderr(' archivebox schedule --every=[timeperiod] --depth=1 https://example.com/some/rss/feed.xml')
  1088. raise SystemExit(0)
  1089. cron = CronTab(user=True)
  1090. cron = dedupe_cron_jobs(cron)
  1091. existing_jobs = list(cron.find_comment(CRON_COMMENT))
  1092. if foreground or run_all:
  1093. if not existing_jobs:
  1094. stderr('{red}[X] You must schedule some jobs first before running in foreground mode.{reset}'.format(**ANSI))
  1095. stderr(' archivebox schedule --every=hour --depth=1 https://example.com/some/rss/feed.xml')
  1096. raise SystemExit(1)
  1097. print('{green}[*] Running {} ArchiveBox jobs in foreground task scheduler...{reset}'.format(len(existing_jobs), **ANSI))
  1098. if run_all:
  1099. try:
  1100. for job in existing_jobs:
  1101. sys.stdout.write(f' > {job.command.split("/archivebox ")[0].split(" && ")[0]}\n')
  1102. sys.stdout.write(f' > {job.command.split("/archivebox ")[-1].split(" >> ")[0]}')
  1103. sys.stdout.flush()
  1104. job.run()
  1105. sys.stdout.write(f'\r √ {job.command.split("/archivebox ")[-1]}\n')
  1106. except KeyboardInterrupt:
  1107. print('\n{green}[√] Stopped.{reset}'.format(**ANSI))
  1108. raise SystemExit(1)
  1109. if foreground:
  1110. try:
  1111. for job in existing_jobs:
  1112. print(f' > {job.command.split("/archivebox ")[-1].split(" >> ")[0]}')
  1113. for result in cron.run_scheduler():
  1114. print(result)
  1115. except KeyboardInterrupt:
  1116. print('\n{green}[√] Stopped.{reset}'.format(**ANSI))
  1117. raise SystemExit(1)
  1118. if CAN_UPGRADE:
  1119. hint(f"There's a new version of ArchiveBox available! Your current version is {VERSION}. You can upgrade to {VERSIONS_AVAILABLE['recommended_version']['tag_name']} ({VERSIONS_AVAILABLE['recommended_version']['html_url']}). For more on how to upgrade: https://github.com/ArchiveBox/ArchiveBox/wiki/Upgrading-or-Merging-Archives\n")
  1120. @enforce_types
  1121. def server(runserver_args: Optional[List[str]]=None,
  1122. reload: bool=False,
  1123. debug: bool=False,
  1124. init: bool=False,
  1125. quick_init: bool=False,
  1126. createsuperuser: bool=False,
  1127. out_dir: Path=OUTPUT_DIR) -> None:
  1128. """Run the ArchiveBox HTTP server"""
  1129. runserver_args = runserver_args or []
  1130. if init:
  1131. run_subcommand('init', stdin=None, pwd=out_dir)
  1132. print()
  1133. elif quick_init:
  1134. run_subcommand('init', subcommand_args=['--quick'], stdin=None, pwd=out_dir)
  1135. print()
  1136. if createsuperuser:
  1137. run_subcommand('manage', subcommand_args=['createsuperuser'], pwd=out_dir)
  1138. print()
  1139. # setup config for django runserver
  1140. from . import config
  1141. config.SHOW_PROGRESS = False
  1142. config.DEBUG = config.DEBUG or debug
  1143. check_data_folder(CONFIG)
  1144. from django.core.management import call_command
  1145. from django.contrib.auth.models import User
  1146. print('{green}[+] Starting ArchiveBox webserver... {reset}'.format(**ANSI))
  1147. print(' > Logging errors to ./logs/errors.log')
  1148. if not User.objects.filter(is_superuser=True).exists():
  1149. print('{lightyellow}[!] No admin users exist yet, you will not be able to edit links in the UI.{reset}'.format(**ANSI))
  1150. print()
  1151. print(' To create an admin user, run:')
  1152. print(' archivebox manage createsuperuser')
  1153. print()
  1154. # toggle autoreloading when archivebox code changes
  1155. config.SHOW_PROGRESS = False
  1156. config.DEBUG = config.DEBUG or debug
  1157. if debug:
  1158. if not reload:
  1159. runserver_args.append('--noreload') # '--insecure'
  1160. call_command("runserver", *runserver_args)
  1161. else:
  1162. host = '127.0.0.1'
  1163. port = '8000'
  1164. try:
  1165. host_and_port = [arg for arg in runserver_args if arg.replace('.', '').replace(':', '').isdigit()][0]
  1166. if ':' in host_and_port:
  1167. host, port = host_and_port.split(':')
  1168. else:
  1169. if '.' in host_and_port:
  1170. host = host_and_port
  1171. else:
  1172. port = host_and_port
  1173. except IndexError:
  1174. pass
  1175. print(f' > Starting ArchiveBox webserver on http://{host}:{port}/')
  1176. from queues.supervisor_util import get_or_create_supervisord_process, start_worker, stop_worker, watch_worker
  1177. print()
  1178. supervisor = get_or_create_supervisord_process(daemonize=False)
  1179. bg_workers = [
  1180. {
  1181. "name": "worker_system_tasks",
  1182. "command": "archivebox manage djangohuey --queue system_tasks",
  1183. "autostart": "true",
  1184. "autorestart": "true",
  1185. "stdout_logfile": "logs/worker_system_tasks.log",
  1186. "redirect_stderr": "true",
  1187. },
  1188. ]
  1189. fg_worker = {
  1190. "name": "worker_daphne",
  1191. "command": f"daphne --bind={host} --port={port} --application-close-timeout=600 archivebox.core.asgi:application",
  1192. "autostart": "false",
  1193. "autorestart": "true",
  1194. "stdout_logfile": "logs/worker_daphne.log",
  1195. "redirect_stderr": "true",
  1196. }
  1197. print()
  1198. for worker in bg_workers:
  1199. start_worker(supervisor, worker)
  1200. print()
  1201. start_worker(supervisor, fg_worker)
  1202. print()
  1203. try:
  1204. watch_worker(supervisor, "worker_daphne")
  1205. except KeyboardInterrupt:
  1206. print("\n[🛑] Got Ctrl+C, stopping gracefully...")
  1207. except SystemExit:
  1208. pass
  1209. except BaseException as e:
  1210. print(f"\n[🛑] Got {e.__class__.__name__} exception, stopping web server gracefully...")
  1211. raise
  1212. finally:
  1213. stop_worker(supervisor, "worker_daphne")
  1214. time.sleep(0.5)
  1215. print("\n[🟩] ArchiveBox server shut down gracefully.")
  1216. @enforce_types
  1217. def manage(args: Optional[List[str]]=None, out_dir: Path=OUTPUT_DIR) -> None:
  1218. """Run an ArchiveBox Django management command"""
  1219. check_data_folder(CONFIG)
  1220. from django.core.management import execute_from_command_line
  1221. if (args and "createsuperuser" in args) and (IN_DOCKER and not IS_TTY):
  1222. stderr('[!] Warning: you need to pass -it to use interactive commands in docker', color='lightyellow')
  1223. stderr(' docker run -it archivebox manage {}'.format(' '.join(args or ['...'])), color='lightyellow')
  1224. stderr('')
  1225. execute_from_command_line([f'{ARCHIVEBOX_BINARY} manage', *(args or ['help'])])
  1226. @enforce_types
  1227. def shell(out_dir: Path=OUTPUT_DIR) -> None:
  1228. """Enter an interactive ArchiveBox Django shell"""
  1229. check_data_folder(CONFIG)
  1230. from django.core.management import call_command
  1231. call_command("shell_plus")