2
0

main.py 67 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524
  1. __package__ = 'archivebox'
  2. import os
  3. import sys
  4. import shutil
  5. import platform
  6. from typing import Dict, List, Optional, Iterable, IO, Union
  7. from pathlib import Path
  8. from datetime import date, datetime
  9. from crontab import CronTab, CronSlices
  10. from django.db.models import QuerySet
  11. from django.utils import timezone
  12. from archivebox.misc.checks import check_data_folder
  13. from archivebox.misc.util import enforce_types # type: ignore
  14. from archivebox.misc.system import get_dir_size, dedupe_cron_jobs, CRON_COMMENT
  15. from archivebox.misc.system import run as run_shell
  16. from archivebox.misc.logging import stderr, hint
  17. from archivebox.config import CONSTANTS, VERSION, DATA_DIR, ARCHIVE_DIR
  18. from archivebox.config.common import SHELL_CONFIG, SEARCH_BACKEND_CONFIG, STORAGE_CONFIG, SERVER_CONFIG, ARCHIVING_CONFIG
  19. from archivebox.config.permissions import SudoPermission, IN_DOCKER
  20. from archivebox.config.configfile import (
  21. write_config_file,
  22. load_all_config,
  23. get_real_name,
  24. )
  25. from .cli import (
  26. CLI_SUBCOMMANDS,
  27. run_subcommand,
  28. display_first,
  29. meta_cmds,
  30. setup_cmds,
  31. archive_cmds,
  32. )
  33. from .parsers import (
  34. save_text_as_source,
  35. save_file_as_source,
  36. parse_links_memory,
  37. )
  38. from .index.schema import Link
  39. from .index import (
  40. load_main_index,
  41. parse_links_from_source,
  42. dedupe_links,
  43. write_main_index,
  44. snapshot_filter,
  45. get_indexed_folders,
  46. get_archived_folders,
  47. get_unarchived_folders,
  48. get_present_folders,
  49. get_valid_folders,
  50. get_invalid_folders,
  51. get_duplicate_folders,
  52. get_orphaned_folders,
  53. get_corrupted_folders,
  54. get_unrecognized_folders,
  55. fix_invalid_folder_locations,
  56. write_link_details,
  57. )
  58. from .index.json import (
  59. parse_json_main_index,
  60. parse_json_links_details,
  61. generate_json_index_from_links,
  62. )
  63. from .index.sql import (
  64. get_admins,
  65. apply_migrations,
  66. remove_from_sql_main_index,
  67. )
  68. from .index.html import generate_index_from_links
  69. from .index.csv import links_to_csv
  70. from .extractors import archive_links, archive_link, ignore_methods
  71. from .logging_util import (
  72. TimedProgress,
  73. log_importing_started,
  74. log_crawl_started,
  75. log_removal_started,
  76. log_removal_finished,
  77. log_list_started,
  78. log_list_finished,
  79. printable_config,
  80. printable_folders,
  81. printable_filesize,
  82. printable_folder_status,
  83. )
  84. @enforce_types
  85. def help(out_dir: Path=DATA_DIR) -> None:
  86. """Print the ArchiveBox help message and usage"""
  87. from rich import print
  88. from rich.panel import Panel
  89. all_subcommands = CLI_SUBCOMMANDS
  90. COMMANDS_HELP_TEXT = '\n '.join(
  91. f'[green]{cmd.ljust(20)}[/green] {func.__doc__}'
  92. for cmd, func in all_subcommands.items()
  93. if cmd in meta_cmds
  94. ) + '\n\n ' + '\n '.join(
  95. f'[green]{cmd.ljust(20)}[/green] {func.__doc__}'
  96. for cmd, func in all_subcommands.items()
  97. if cmd in setup_cmds
  98. ) + '\n\n ' + '\n '.join(
  99. f'[green]{cmd.ljust(20)}[/green] {func.__doc__}'
  100. for cmd, func in all_subcommands.items()
  101. if cmd in archive_cmds
  102. ) + '\n\n ' + '\n '.join(
  103. f'[green]{cmd.ljust(20)}[/green] {func.__doc__}'
  104. for cmd, func in all_subcommands.items()
  105. if cmd not in display_first
  106. )
  107. DOCKER_USAGE = '''
  108. [dodger_blue3]Docker Usage:[/dodger_blue3]
  109. [grey53]# using Docker Compose:[/grey53]
  110. [blue]docker compose run[/blue] [dark_green]archivebox[/dark_green] [green]\\[command][/green] [green3][...args][/green3] [violet][--help][/violet] [grey53][--version][/grey53]
  111. [grey53]# using Docker:[/grey53]
  112. [blue]docker run[/blue] -v [light_slate_blue]$PWD:/data[/light_slate_blue] [grey53]-p 8000:8000[/grey53] -it [dark_green]archivebox/archivebox[/dark_green] [green]\\[command][/green] [green3][...args][/green3] [violet][--help][/violet] [grey53][--version][/grey53]
  113. ''' if IN_DOCKER else ''
  114. DOCKER_DOCS = '\n [link=https://github.com/ArchiveBox/ArchiveBox/wiki/Docker#usage]https://github.com/ArchiveBox/ArchiveBox/wiki/Docker[/link]' if IN_DOCKER else ''
  115. DOCKER_OUTSIDE_HINT = "\n [grey53]# outside of Docker:[/grey53]" if IN_DOCKER else ''
  116. DOCKER_CMD_PREFIX = "[blue]docker ... [/blue]" if IN_DOCKER else ''
  117. print(f'''{DOCKER_USAGE}
  118. [deep_sky_blue4]Usage:[/deep_sky_blue4]{DOCKER_OUTSIDE_HINT}
  119. [dark_green]archivebox[/dark_green] [green]\\[command][/green] [green3][...args][/green3] [violet][--help][/violet] [grey53][--version][/grey53]
  120. [deep_sky_blue4]Commands:[/deep_sky_blue4]
  121. {COMMANDS_HELP_TEXT}
  122. [deep_sky_blue4]Documentation:[/deep_sky_blue4]
  123. [link=https://github.com/ArchiveBox/ArchiveBox/wiki]https://github.com/ArchiveBox/ArchiveBox/wiki[/link]{DOCKER_DOCS}
  124. [link=https://github.com/ArchiveBox/ArchiveBox/wiki/Usage#cli-usage]https://github.com/ArchiveBox/ArchiveBox/wiki/Usage[/link]
  125. [link=https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration]https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration[/link]
  126. ''')
  127. if os.access(CONSTANTS.ARCHIVE_DIR, os.R_OK) and CONSTANTS.ARCHIVE_DIR.is_dir():
  128. pretty_out_dir = str(out_dir).replace(str(Path('~').expanduser()), '~')
  129. EXAMPLE_USAGE = f'''
  130. [light_slate_blue]DATA DIR[/light_slate_blue]: [yellow]{pretty_out_dir}[/yellow]
  131. [violet]Hint:[/violet] [i]Common maintenance tasks:[/i]
  132. [dark_green]archivebox[/dark_green] [green]init[/green] [grey53]# make sure database is up-to-date (safe to run multiple times)[/grey53]
  133. [dark_green]archivebox[/dark_green] [green]install[/green] [grey53]# make sure plugins are up-to-date (wget, chrome, singlefile, etc.)[/grey53]
  134. [dark_green]archivebox[/dark_green] [green]status[/green] [grey53]# get a health checkup report on your collection[/grey53]
  135. [dark_green]archivebox[/dark_green] [green]update[/green] [grey53]# retry any previously failed or interrupted archiving tasks[/grey53]
  136. [violet]Hint:[/violet] [i]More example usage:[/i]
  137. [dark_green]archivebox[/dark_green] [green]add[/green] --depth=1 "https://example.com/some/page"
  138. [dark_green]archivebox[/dark_green] [green]list[/green] --sort=timestamp --csv=timestamp,downloaded_at,url,title
  139. [dark_green]archivebox[/dark_green] [green]schedule[/green] --every=day --depth=1 "https://example.com/some/feed.rss"
  140. [dark_green]archivebox[/dark_green] [green]server[/green] [blue]0.0.0.0:8000[/blue] [grey53]# Start the Web UI / API server[/grey53]
  141. '''
  142. print(Panel(EXAMPLE_USAGE, expand=False, border_style='grey53', title='[green3]:white_check_mark: A collection [light_slate_blue]DATA DIR[/light_slate_blue] is currently active[/green3]', subtitle='Commands run inside this dir will only apply to this collection.'))
  143. else:
  144. DATA_SETUP_HELP = '\n'
  145. if IN_DOCKER:
  146. DATA_SETUP_HELP += '[violet]Hint:[/violet] When using Docker, you need to mount a volume to use as your data dir:\n'
  147. DATA_SETUP_HELP += ' docker run [violet]-v /some/path/data:/data[/violet] archivebox/archivebox ...\n\n'
  148. DATA_SETUP_HELP += 'To load an [dark_blue]existing[/dark_blue] collection:\n'
  149. DATA_SETUP_HELP += ' 1. [green]cd[/green] ~/archivebox/data [grey53]# go into existing [light_slate_blue]DATA DIR[/light_slate_blue] (can be anywhere)[/grey53]\n'
  150. DATA_SETUP_HELP += f' 2. {DOCKER_CMD_PREFIX}[dark_green]archivebox[/dark_green] [green]init[/green] [grey53]# migrate to latest version (safe to run multiple times)[/grey53]\n'
  151. DATA_SETUP_HELP += f' 3. {DOCKER_CMD_PREFIX}[dark_green]archivebox[/dark_green] [green]install[/green] [grey53]# auto-update all plugins (wget, chrome, singlefile, etc.)[/grey53]\n'
  152. DATA_SETUP_HELP += f' 4. {DOCKER_CMD_PREFIX}[dark_green]archivebox[/dark_green] [green]help[/green] [grey53]# ...get help with next steps... [/grey53]\n\n'
  153. DATA_SETUP_HELP += 'To start a [sea_green1]new[/sea_green1] collection:\n'
  154. DATA_SETUP_HELP += ' 1. [green]mkdir[/green] ~/archivebox/data [grey53]# create a new, empty [light_slate_blue]DATA DIR[/light_slate_blue] (can be anywhere)[/grey53]\n'
  155. DATA_SETUP_HELP += ' 2. [green]cd[/green] ~/archivebox/data [grey53]# cd into the new directory[/grey53]\n'
  156. DATA_SETUP_HELP += f' 3. {DOCKER_CMD_PREFIX}[dark_green]archivebox[/dark_green] [green]init[/green] [grey53]# initialize ArchiveBox in the new data dir[/grey53]\n'
  157. DATA_SETUP_HELP += f' 4. {DOCKER_CMD_PREFIX}[dark_green]archivebox[/dark_green] [green]install[/green] [grey53]# auto-install all plugins (wget, chrome, singlefile, etc.)[/grey53]\n'
  158. DATA_SETUP_HELP += f' 5. {DOCKER_CMD_PREFIX}[dark_green]archivebox[/dark_green] [green]help[/green] [grey53]# ... get help with next steps... [/grey53]\n'
  159. print(Panel(DATA_SETUP_HELP, expand=False, border_style='grey53', title='[red]:cross_mark: No collection is currently active[/red]', subtitle='All archivebox [green]commands[/green] should be run from inside a collection [light_slate_blue]DATA DIR[/light_slate_blue]'))
  160. @enforce_types
  161. def version(quiet: bool=False,
  162. out_dir: Path=DATA_DIR,
  163. binproviders: Optional[List[str]]=None,
  164. binaries: Optional[List[str]]=None,
  165. ) -> None:
  166. """Print the ArchiveBox version and dependency information"""
  167. print(VERSION)
  168. if quiet or '--version' in sys.argv:
  169. return
  170. from rich.panel import Panel
  171. from rich.console import Console
  172. console = Console()
  173. prnt = console.print
  174. from django.conf import settings
  175. from abx.archivebox.base_binary import BaseBinary, apt, brew, env
  176. from archivebox.config.version import get_COMMIT_HASH, get_BUILD_TIME
  177. from archivebox.config.permissions import ARCHIVEBOX_USER, ARCHIVEBOX_GROUP, RUNNING_AS_UID, RUNNING_AS_GID
  178. from archivebox.config.paths import get_data_locations, get_code_locations
  179. from plugins_auth.ldap.config import LDAP_CONFIG
  180. # 0.7.1
  181. # ArchiveBox v0.7.1+editable COMMIT_HASH=951bba5 BUILD_TIME=2023-12-17 16:46:05 1702860365
  182. # IN_DOCKER=False IN_QEMU=False ARCH=arm64 OS=Darwin PLATFORM=macOS-14.2-arm64-arm-64bit PYTHON=Cpython
  183. # FS_ATOMIC=True FS_REMOTE=False FS_USER=501:20 FS_PERMS=644
  184. # DEBUG=False IS_TTY=True TZ=UTC SEARCH_BACKEND=ripgrep LDAP=False
  185. p = platform.uname()
  186. COMMIT_HASH = get_COMMIT_HASH()
  187. prnt(
  188. '[dark_green]ArchiveBox[/dark_green] [dark_goldenrod]v{}[/dark_goldenrod]'.format(CONSTANTS.VERSION),
  189. f'COMMIT_HASH={COMMIT_HASH[:7] if COMMIT_HASH else "unknown"}',
  190. f'BUILD_TIME={get_BUILD_TIME()}',
  191. )
  192. prnt(
  193. f'IN_DOCKER={IN_DOCKER}',
  194. f'IN_QEMU={SHELL_CONFIG.IN_QEMU}',
  195. f'ARCH={p.machine}',
  196. f'OS={p.system}',
  197. f'PLATFORM={platform.platform()}',
  198. f'PYTHON={sys.implementation.name.title()}' + (' (venv)' if CONSTANTS.IS_INSIDE_VENV else ''),
  199. )
  200. OUTPUT_IS_REMOTE_FS = get_data_locations().DATA_DIR.is_mount or get_data_locations().ARCHIVE_DIR.is_mount
  201. DATA_DIR_STAT = CONSTANTS.DATA_DIR.stat()
  202. prnt(
  203. f'EUID={os.geteuid()}:{os.getegid()} UID={RUNNING_AS_UID}:{RUNNING_AS_GID} PUID={ARCHIVEBOX_USER}:{ARCHIVEBOX_GROUP}',
  204. f'FS_UID={DATA_DIR_STAT.st_uid}:{DATA_DIR_STAT.st_gid}',
  205. f'FS_PERMS={STORAGE_CONFIG.OUTPUT_PERMISSIONS}',
  206. f'FS_ATOMIC={STORAGE_CONFIG.ENFORCE_ATOMIC_WRITES}',
  207. f'FS_REMOTE={OUTPUT_IS_REMOTE_FS}',
  208. )
  209. prnt(
  210. f'DEBUG={SHELL_CONFIG.DEBUG}',
  211. f'IS_TTY={SHELL_CONFIG.IS_TTY}',
  212. f'SUDO={CONSTANTS.IS_ROOT}',
  213. f'ID={CONSTANTS.MACHINE_ID}:{CONSTANTS.COLLECTION_ID}',
  214. f'SEARCH_BACKEND={SEARCH_BACKEND_CONFIG.SEARCH_BACKEND_ENGINE}',
  215. f'LDAP={LDAP_CONFIG.LDAP_ENABLED}',
  216. #f'DB=django.db.backends.sqlite3 (({CONFIG["SQLITE_JOURNAL_MODE"]})', # add this if we have more useful info to show eventually
  217. )
  218. prnt()
  219. if not (os.access(CONSTANTS.ARCHIVE_DIR, os.R_OK) and os.access(CONSTANTS.CONFIG_FILE, os.R_OK)):
  220. PANEL_TEXT = '\n'.join((
  221. # '',
  222. # f'[yellow]CURRENT DIR =[/yellow] [red]{os.getcwd()}[/red]',
  223. '',
  224. '[violet]Hint:[/violet] [green]cd[/green] into a collection [blue]DATA_DIR[/blue] and run [green]archivebox version[/green] again...',
  225. ' [grey53]OR[/grey53] run [green]archivebox init[/green] to create a new collection in the current dir.',
  226. '',
  227. ' [i][grey53](this is [red]REQUIRED[/red] if you are opening a Github Issue to get help)[/grey53][/i]',
  228. '',
  229. ))
  230. prnt(Panel(PANEL_TEXT, expand=False, border_style='grey53', title='[red]:exclamation: No collection [blue]DATA_DIR[/blue] is currently active[/red]', subtitle='Full version info is only available when inside a collection [light_slate_blue]DATA DIR[/light_slate_blue]'))
  231. prnt()
  232. return
  233. prnt('[pale_green1][i] Binary Dependencies:[/pale_green1]')
  234. failures = []
  235. for name, binary in list(settings.BINARIES.items()):
  236. if binary.name == 'archivebox':
  237. continue
  238. # skip if the binary is not in the requested list of binaries
  239. if binaries and binary.name not in binaries:
  240. continue
  241. # skip if the binary is not supported by any of the requested binproviders
  242. if binproviders and binary.binproviders_supported and not any(provider.name in binproviders for provider in binary.binproviders_supported):
  243. continue
  244. err = None
  245. try:
  246. loaded_bin = binary.load()
  247. except Exception as e:
  248. err = e
  249. loaded_bin = binary
  250. provider_summary = f'[dark_sea_green3]{loaded_bin.binprovider.name.ljust(10)}[/dark_sea_green3]' if loaded_bin.binprovider else '[grey23]not found[/grey23] '
  251. if loaded_bin.abspath:
  252. abspath = str(loaded_bin.abspath).replace(str(DATA_DIR), '[light_slate_blue].[/light_slate_blue]').replace(str(Path('~').expanduser()), '~')
  253. if ' ' in abspath:
  254. abspath = abspath.replace(' ', r'\ ')
  255. else:
  256. abspath = f'[red]{err}[/red]'
  257. prnt('', '[green]√[/green]' if loaded_bin.is_valid else '[red]X[/red]', '', loaded_bin.name.ljust(21), str(loaded_bin.version).ljust(12), provider_summary, abspath, overflow='ignore', crop=False)
  258. if not loaded_bin.is_valid:
  259. failures.append(loaded_bin.name)
  260. prnt()
  261. prnt('[gold3][i] Package Managers:[/gold3]')
  262. for name, binprovider in list(settings.BINPROVIDERS.items()):
  263. err = None
  264. if binproviders and binprovider.name not in binproviders:
  265. continue
  266. # TODO: implement a BinProvider.BINARY() method that gets the loaded binary for a binprovider's INSTALLER_BIN
  267. loaded_bin = binprovider.INSTALLER_BINARY or BaseBinary(name=binprovider.INSTALLER_BIN, binproviders=[env, apt, brew])
  268. abspath = None
  269. if loaded_bin.abspath:
  270. abspath = str(loaded_bin.abspath).replace(str(DATA_DIR), '.').replace(str(Path('~').expanduser()), '~')
  271. if ' ' in abspath:
  272. abspath = abspath.replace(' ', r'\ ')
  273. PATH = str(binprovider.PATH).replace(str(DATA_DIR), '[light_slate_blue].[/light_slate_blue]').replace(str(Path('~').expanduser()), '~')
  274. ownership_summary = f'UID=[blue]{str(binprovider.EUID).ljust(4)}[/blue]'
  275. provider_summary = f'[dark_sea_green3]{str(abspath).ljust(52)}[/dark_sea_green3]' if abspath else f'[grey23]{"not available".ljust(52)}[/grey23]'
  276. prnt('', '[green]√[/green]' if binprovider.is_valid else '[grey53]-[/grey53]', '', binprovider.name.ljust(11), provider_summary, ownership_summary, f'PATH={PATH}', overflow='ellipsis', soft_wrap=True)
  277. if not (binaries or binproviders):
  278. # dont show source code / data dir info if we just want to get version info for a binary or binprovider
  279. prnt()
  280. prnt('[deep_sky_blue3][i] Code locations:[/deep_sky_blue3]')
  281. for name, path in get_code_locations().items():
  282. prnt(printable_folder_status(name, path), overflow='ignore', crop=False)
  283. prnt()
  284. if os.access(CONSTANTS.ARCHIVE_DIR, os.R_OK) or os.access(CONSTANTS.CONFIG_FILE, os.R_OK):
  285. prnt('[bright_yellow][i] Data locations:[/bright_yellow]')
  286. for name, path in get_data_locations().items():
  287. prnt(printable_folder_status(name, path), overflow='ignore', crop=False)
  288. from archivebox.misc.checks import check_data_dir_permissions
  289. check_data_dir_permissions()
  290. else:
  291. prnt()
  292. prnt('[red][i] Data locations:[/red] (not in a data directory)')
  293. prnt()
  294. if failures:
  295. raise SystemExit(1)
  296. raise SystemExit(0)
  297. @enforce_types
  298. def run(subcommand: str,
  299. subcommand_args: Optional[List[str]],
  300. stdin: Optional[IO]=None,
  301. out_dir: Path=DATA_DIR) -> None:
  302. """Run a given ArchiveBox subcommand with the given list of args"""
  303. run_subcommand(
  304. subcommand=subcommand,
  305. subcommand_args=subcommand_args,
  306. stdin=stdin,
  307. pwd=out_dir,
  308. )
  309. @enforce_types
  310. def init(force: bool=False, quick: bool=False, install: bool=False, out_dir: Path=DATA_DIR) -> None:
  311. """Initialize a new ArchiveBox collection in the current directory"""
  312. from core.models import Snapshot
  313. from rich import print
  314. # if os.access(out_dir / CONSTANTS.JSON_INDEX_FILENAME, os.F_OK):
  315. # print("[red]:warning: This folder contains a JSON index. It is deprecated, and will no longer be kept up to date automatically.[/red]", file=sys.stderr)
  316. # print("[red] You can run `archivebox list --json --with-headers > static_index.json` to manually generate it.[/red]", file=sys.stderr)
  317. is_empty = not len(set(os.listdir(out_dir)) - CONSTANTS.ALLOWED_IN_DATA_DIR)
  318. existing_index = os.path.isfile(CONSTANTS.DATABASE_FILE)
  319. if is_empty and not existing_index:
  320. print(f'[turquoise4][+] Initializing a new ArchiveBox v{VERSION} collection...[/turquoise4]')
  321. print('[green]----------------------------------------------------------------------[/green]')
  322. elif existing_index:
  323. # TODO: properly detect and print the existing version in current index as well
  324. print(f'[green][*] Verifying and updating existing ArchiveBox collection to v{VERSION}...[/green]')
  325. print('[green]----------------------------------------------------------------------[/green]')
  326. else:
  327. if force:
  328. print('[red][!] This folder appears to already have files in it, but no index.sqlite3 is present.[/red]')
  329. print('[red] Because --force was passed, ArchiveBox will initialize anyway (which may overwrite existing files).[/red]')
  330. else:
  331. print(
  332. ("[red][X] This folder appears to already have files in it, but no index.sqlite3 present.[/red]\n\n"
  333. " You must run init in a completely empty directory, or an existing data folder.\n\n"
  334. " [violet]Hint:[/violet] To import an existing data folder make sure to cd into the folder first, \n"
  335. " then run and run 'archivebox init' to pick up where you left off.\n\n"
  336. " (Always make sure your data folder is backed up first before updating ArchiveBox)"
  337. )
  338. )
  339. raise SystemExit(2)
  340. if existing_index:
  341. print('\n[green][*] Verifying archive folder structure...[/green]')
  342. else:
  343. print('\n[green][+] Building archive folder structure...[/green]')
  344. print(f' + ./{CONSTANTS.ARCHIVE_DIR.relative_to(DATA_DIR)}, ./{CONSTANTS.SOURCES_DIR.relative_to(DATA_DIR)}, ./{CONSTANTS.LOGS_DIR.relative_to(DATA_DIR)}...')
  345. Path(CONSTANTS.SOURCES_DIR).mkdir(exist_ok=True)
  346. Path(CONSTANTS.ARCHIVE_DIR).mkdir(exist_ok=True)
  347. Path(CONSTANTS.LOGS_DIR).mkdir(exist_ok=True)
  348. print(f' + ./{CONSTANTS.CONFIG_FILE.relative_to(DATA_DIR)}...')
  349. # create the .archivebox_id file with a unique ID for this collection
  350. from archivebox.config.paths import _get_collection_id
  351. _get_collection_id(CONSTANTS.DATA_DIR, force_create=True)
  352. # create the ArchiveBox.conf file
  353. write_config_file({'SECRET_KEY': SERVER_CONFIG.SECRET_KEY})
  354. if os.access(CONSTANTS.DATABASE_FILE, os.F_OK):
  355. print('\n[green][*] Verifying main SQL index and running any migrations needed...[/green]')
  356. else:
  357. print('\n[green][+] Building main SQL index and running initial migrations...[/green]')
  358. for migration_line in apply_migrations(out_dir):
  359. sys.stdout.write(f' {migration_line}\n')
  360. assert os.path.isfile(CONSTANTS.DATABASE_FILE) and os.access(CONSTANTS.DATABASE_FILE, os.R_OK)
  361. print()
  362. print(f' √ ./{CONSTANTS.DATABASE_FILE.relative_to(DATA_DIR)}')
  363. # from django.contrib.auth.models import User
  364. # if SHELL_CONFIG.IS_TTY and not User.objects.filter(is_superuser=True).exclude(username='system').exists():
  365. # print('{green}[+] Creating admin user account...{reset}'.format(**SHELL_CONFIG.ANSI))
  366. # call_command("createsuperuser", interactive=True)
  367. print()
  368. print('[dodger_blue3][*] Checking links from indexes and archive folders (safe to Ctrl+C)...[/dodger_blue3]')
  369. all_links = Snapshot.objects.none()
  370. pending_links: Dict[str, Link] = {}
  371. if existing_index:
  372. all_links = load_main_index(out_dir=out_dir, warn=False)
  373. print(f' √ Loaded {all_links.count()} links from existing main index.')
  374. if quick:
  375. print(' > Skipping full snapshot directory check (quick mode)')
  376. else:
  377. try:
  378. # Links in data folders that dont match their timestamp
  379. fixed, cant_fix = fix_invalid_folder_locations(out_dir=out_dir)
  380. if fixed:
  381. print(f' [yellow]√ Fixed {len(fixed)} data directory locations that didn\'t match their link timestamps.[/yellow]')
  382. if cant_fix:
  383. print(f' [red]! Could not fix {len(cant_fix)} data directory locations due to conflicts with existing folders.[/red]')
  384. # Links in JSON index but not in main index
  385. orphaned_json_links = {
  386. link.url: link
  387. for link in parse_json_main_index(out_dir)
  388. if not all_links.filter(url=link.url).exists()
  389. }
  390. if orphaned_json_links:
  391. pending_links.update(orphaned_json_links)
  392. print(f' [yellow]√ Added {len(orphaned_json_links)} orphaned links from existing JSON index...[/yellow]')
  393. # Links in data dir indexes but not in main index
  394. orphaned_data_dir_links = {
  395. link.url: link
  396. for link in parse_json_links_details(out_dir)
  397. if not all_links.filter(url=link.url).exists()
  398. }
  399. if orphaned_data_dir_links:
  400. pending_links.update(orphaned_data_dir_links)
  401. print(f' [yellow]√ Added {len(orphaned_data_dir_links)} orphaned links from existing archive directories.[/yellow]')
  402. # Links in invalid/duplicate data dirs
  403. invalid_folders = {
  404. folder: link
  405. for folder, link in get_invalid_folders(all_links, out_dir=out_dir).items()
  406. }
  407. if invalid_folders:
  408. print(f' [red]! Skipped adding {len(invalid_folders)} invalid link data directories.[/red]')
  409. print(' X ' + '\n X '.join(f'./{Path(folder).relative_to(DATA_DIR)} {link}' for folder, link in invalid_folders.items()))
  410. print()
  411. print(' [violet]Hint:[/violet] For more information about the link data directories that were skipped, run:')
  412. print(' archivebox status')
  413. print(' archivebox list --status=invalid')
  414. except (KeyboardInterrupt, SystemExit):
  415. print(file=sys.stderr)
  416. print('[yellow]:stop_sign: Stopped checking archive directories due to Ctrl-C/SIGTERM[/yellow]', file=sys.stderr)
  417. print(' Your archive data is safe, but you should re-run `archivebox init` to finish the process later.', file=sys.stderr)
  418. print(file=sys.stderr)
  419. print(' [violet]Hint:[/violet] In the future you can run a quick init without checking dirs like so:', file=sys.stderr)
  420. print(' archivebox init --quick', file=sys.stderr)
  421. raise SystemExit(1)
  422. write_main_index(list(pending_links.values()), out_dir=out_dir)
  423. print('\n[green]----------------------------------------------------------------------[/green]')
  424. from django.contrib.auth.models import User
  425. if (SERVER_CONFIG.ADMIN_USERNAME and SERVER_CONFIG.ADMIN_PASSWORD) and not User.objects.filter(username=SERVER_CONFIG.ADMIN_USERNAME).exists():
  426. print('[green][+] Found ADMIN_USERNAME and ADMIN_PASSWORD configuration options, creating new admin user.[/green]')
  427. User.objects.create_superuser(username=SERVER_CONFIG.ADMIN_USERNAME, password=SERVER_CONFIG.ADMIN_PASSWORD)
  428. if existing_index:
  429. print('[green][√] Done. Verified and updated the existing ArchiveBox collection.[/green]')
  430. else:
  431. print(f'[green][√] Done. A new ArchiveBox collection was initialized ({len(all_links) + len(pending_links)} links).[/green]')
  432. json_index = out_dir / CONSTANTS.JSON_INDEX_FILENAME
  433. html_index = out_dir / CONSTANTS.HTML_INDEX_FILENAME
  434. index_name = f"{date.today()}_index_old"
  435. if os.access(json_index, os.F_OK):
  436. json_index.rename(f"{index_name}.json")
  437. if os.access(html_index, os.F_OK):
  438. html_index.rename(f"{index_name}.html")
  439. CONSTANTS.PERSONAS_DIR.mkdir(parents=True, exist_ok=True)
  440. CONSTANTS.DEFAULT_TMP_DIR.mkdir(parents=True, exist_ok=True)
  441. CONSTANTS.DEFAULT_LIB_DIR.mkdir(parents=True, exist_ok=True)
  442. from archivebox.config.common import STORAGE_CONFIG
  443. STORAGE_CONFIG.TMP_DIR.mkdir(parents=True, exist_ok=True)
  444. STORAGE_CONFIG.LIB_DIR.mkdir(parents=True, exist_ok=True)
  445. if install:
  446. run_subcommand('install', pwd=out_dir)
  447. if Snapshot.objects.count() < 25: # hide the hints for experienced users
  448. print()
  449. print(' [violet]Hint:[/violet] To view your archive index, run:')
  450. print(' archivebox server # then visit [deep_sky_blue4][link=http://127.0.0.1:8000]http://127.0.0.1:8000[/link][/deep_sky_blue4]')
  451. print()
  452. print(' To add new links, you can run:')
  453. print(" archivebox add < ~/some/path/to/list_of_links.txt")
  454. print()
  455. print(' For more usage and examples, run:')
  456. print(' archivebox help')
  457. @enforce_types
  458. def status(out_dir: Path=DATA_DIR) -> None:
  459. """Print out some info and statistics about the archive collection"""
  460. check_data_folder()
  461. from core.models import Snapshot
  462. from django.contrib.auth import get_user_model
  463. User = get_user_model()
  464. print('{green}[*] Scanning archive main index...{reset}'.format(**SHELL_CONFIG.ANSI))
  465. print(SHELL_CONFIG.ANSI['lightyellow'], f' {out_dir}/*', SHELL_CONFIG.ANSI['reset'])
  466. num_bytes, num_dirs, num_files = get_dir_size(out_dir, recursive=False, pattern='index.')
  467. size = printable_filesize(num_bytes)
  468. print(f' Index size: {size} across {num_files} files')
  469. print()
  470. links = load_main_index(out_dir=out_dir)
  471. num_sql_links = links.count()
  472. num_link_details = sum(1 for link in parse_json_links_details(out_dir=out_dir))
  473. print(f' > SQL Main Index: {num_sql_links} links'.ljust(36), f'(found in {CONSTANTS.SQL_INDEX_FILENAME})')
  474. print(f' > JSON Link Details: {num_link_details} links'.ljust(36), f'(found in {ARCHIVE_DIR.name}/*/index.json)')
  475. print()
  476. print('{green}[*] Scanning archive data directories...{reset}'.format(**SHELL_CONFIG.ANSI))
  477. print(SHELL_CONFIG.ANSI['lightyellow'], f' {ARCHIVE_DIR}/*', SHELL_CONFIG.ANSI['reset'])
  478. num_bytes, num_dirs, num_files = get_dir_size(ARCHIVE_DIR)
  479. size = printable_filesize(num_bytes)
  480. print(f' Size: {size} across {num_files} files in {num_dirs} directories')
  481. print(SHELL_CONFIG.ANSI['black'])
  482. num_indexed = len(get_indexed_folders(links, out_dir=out_dir))
  483. num_archived = len(get_archived_folders(links, out_dir=out_dir))
  484. num_unarchived = len(get_unarchived_folders(links, out_dir=out_dir))
  485. print(f' > indexed: {num_indexed}'.ljust(36), f'({get_indexed_folders.__doc__})')
  486. print(f' > archived: {num_archived}'.ljust(36), f'({get_archived_folders.__doc__})')
  487. print(f' > unarchived: {num_unarchived}'.ljust(36), f'({get_unarchived_folders.__doc__})')
  488. num_present = len(get_present_folders(links, out_dir=out_dir))
  489. num_valid = len(get_valid_folders(links, out_dir=out_dir))
  490. print()
  491. print(f' > present: {num_present}'.ljust(36), f'({get_present_folders.__doc__})')
  492. print(f' > valid: {num_valid}'.ljust(36), f'({get_valid_folders.__doc__})')
  493. duplicate = get_duplicate_folders(links, out_dir=out_dir)
  494. orphaned = get_orphaned_folders(links, out_dir=out_dir)
  495. corrupted = get_corrupted_folders(links, out_dir=out_dir)
  496. unrecognized = get_unrecognized_folders(links, out_dir=out_dir)
  497. num_invalid = len({**duplicate, **orphaned, **corrupted, **unrecognized})
  498. print(f' > invalid: {num_invalid}'.ljust(36), f'({get_invalid_folders.__doc__})')
  499. print(f' > duplicate: {len(duplicate)}'.ljust(36), f'({get_duplicate_folders.__doc__})')
  500. print(f' > orphaned: {len(orphaned)}'.ljust(36), f'({get_orphaned_folders.__doc__})')
  501. print(f' > corrupted: {len(corrupted)}'.ljust(36), f'({get_corrupted_folders.__doc__})')
  502. print(f' > unrecognized: {len(unrecognized)}'.ljust(36), f'({get_unrecognized_folders.__doc__})')
  503. print(SHELL_CONFIG.ANSI['reset'])
  504. if num_indexed:
  505. print(' {lightred}Hint:{reset} You can list link data directories by status like so:'.format(**SHELL_CONFIG.ANSI))
  506. print(' archivebox list --status=<status> (e.g. indexed, corrupted, archived, etc.)')
  507. if orphaned:
  508. print(' {lightred}Hint:{reset} To automatically import orphaned data directories into the main index, run:'.format(**SHELL_CONFIG.ANSI))
  509. print(' archivebox init')
  510. if num_invalid:
  511. print(' {lightred}Hint:{reset} You may need to manually remove or fix some invalid data directories, afterwards make sure to run:'.format(**SHELL_CONFIG.ANSI))
  512. print(' archivebox init')
  513. print()
  514. print('{green}[*] Scanning recent archive changes and user logins:{reset}'.format(**SHELL_CONFIG.ANSI))
  515. print(SHELL_CONFIG.ANSI['lightyellow'], f' {CONSTANTS.LOGS_DIR}/*', SHELL_CONFIG.ANSI['reset'])
  516. users = get_admins().values_list('username', flat=True)
  517. print(f' UI users {len(users)}: {", ".join(users)}')
  518. last_login = User.objects.order_by('last_login').last()
  519. if last_login:
  520. print(f' Last UI login: {last_login.username} @ {str(last_login.last_login)[:16]}')
  521. last_downloaded = Snapshot.objects.order_by('downloaded_at').last()
  522. if last_downloaded:
  523. print(f' Last changes: {str(last_downloaded.downloaded_at)[:16]}')
  524. if not users:
  525. print()
  526. print(' {lightred}Hint:{reset} You can create an admin user by running:'.format(**SHELL_CONFIG.ANSI))
  527. print(' archivebox manage createsuperuser')
  528. print()
  529. for snapshot in links.order_by('-downloaded_at')[:10]:
  530. if not snapshot.downloaded_at:
  531. continue
  532. print(
  533. SHELL_CONFIG.ANSI['black'],
  534. (
  535. f' > {str(snapshot.downloaded_at)[:16]} '
  536. f'[{snapshot.num_outputs} {("X", "√")[snapshot.is_archived]} {printable_filesize(snapshot.archive_size)}] '
  537. f'"{snapshot.title}": {snapshot.url}'
  538. )[:SHELL_CONFIG.TERM_WIDTH],
  539. SHELL_CONFIG.ANSI['reset'],
  540. )
  541. print(SHELL_CONFIG.ANSI['black'], ' ...', SHELL_CONFIG.ANSI['reset'])
  542. @enforce_types
  543. def oneshot(url: str, extractors: str="", out_dir: Path=DATA_DIR, created_by_id: int | None=None) -> List[Link]:
  544. """
  545. Create a single URL archive folder with an index.json and index.html, and all the archive method outputs.
  546. You can run this to archive single pages without needing to create a whole collection with archivebox init.
  547. """
  548. oneshot_link, _ = parse_links_memory([url])
  549. if len(oneshot_link) > 1:
  550. stderr(
  551. '[X] You should pass a single url to the oneshot command',
  552. color='red'
  553. )
  554. raise SystemExit(2)
  555. methods = extractors.split(",") if extractors else ignore_methods(['title'])
  556. archive_link(oneshot_link[0], out_dir=out_dir, methods=methods, created_by_id=created_by_id)
  557. return oneshot_link
  558. @enforce_types
  559. def add(urls: Union[str, List[str]],
  560. tag: str='',
  561. depth: int=0,
  562. update: bool=not ARCHIVING_CONFIG.ONLY_NEW,
  563. update_all: bool=False,
  564. index_only: bool=False,
  565. overwrite: bool=False,
  566. # duplicate: bool=False, # TODO: reuse the logic from admin.py resnapshot to allow adding multiple snapshots by appending timestamp automatically
  567. init: bool=False,
  568. extractors: str="",
  569. parser: str="auto",
  570. created_by_id: int | None=None,
  571. out_dir: Path=DATA_DIR) -> List[Link]:
  572. """Add a new URL or list of URLs to your archive"""
  573. from core.models import Snapshot, Tag
  574. # from queues.supervisor_util import start_cli_workers, tail_worker_logs
  575. # from queues.tasks import bg_archive_link
  576. assert depth in (0, 1), 'Depth must be 0 or 1 (depth >1 is not supported yet)'
  577. extractors = extractors.split(",") if extractors else []
  578. if init:
  579. run_subcommand('init', stdin=None, pwd=out_dir)
  580. # Load list of links from the existing index
  581. check_data_folder()
  582. # worker = start_cli_workers()
  583. new_links: List[Link] = []
  584. all_links = load_main_index(out_dir=out_dir)
  585. log_importing_started(urls=urls, depth=depth, index_only=index_only)
  586. if isinstance(urls, str):
  587. # save verbatim stdin to sources
  588. write_ahead_log = save_text_as_source(urls, filename='{ts}-import.txt', out_dir=out_dir)
  589. elif isinstance(urls, list):
  590. # save verbatim args to sources
  591. write_ahead_log = save_text_as_source('\n'.join(urls), filename='{ts}-import.txt', out_dir=out_dir)
  592. new_links += parse_links_from_source(write_ahead_log, root_url=None, parser=parser)
  593. # If we're going one level deeper, download each link and look for more links
  594. new_links_depth = []
  595. if new_links and depth == 1:
  596. log_crawl_started(new_links)
  597. for new_link in new_links:
  598. try:
  599. downloaded_file = save_file_as_source(new_link.url, filename=f'{new_link.timestamp}-crawl-{new_link.domain}.txt', out_dir=out_dir)
  600. new_links_depth += parse_links_from_source(downloaded_file, root_url=new_link.url)
  601. except Exception as err:
  602. stderr('[!] Failed to get contents of URL {new_link.url}', err, color='red')
  603. imported_links = list({link.url: link for link in (new_links + new_links_depth)}.values())
  604. new_links = dedupe_links(all_links, imported_links)
  605. write_main_index(links=new_links, out_dir=out_dir, created_by_id=created_by_id)
  606. all_links = load_main_index(out_dir=out_dir)
  607. tags = [
  608. Tag.objects.get_or_create(name=name.strip(), defaults={'created_by_id': created_by_id})[0]
  609. for name in tag.split(',')
  610. if name.strip()
  611. ]
  612. if tags:
  613. for link in imported_links:
  614. snapshot = Snapshot.objects.get(url=link.url)
  615. snapshot.tags.add(*tags)
  616. snapshot.tags_str(nocache=True)
  617. snapshot.save()
  618. # print(f' √ Tagged {len(imported_links)} Snapshots with {len(tags)} tags {tags_str}')
  619. if index_only:
  620. # mock archive all the links using the fake index_only extractor method in order to update their state
  621. if overwrite:
  622. archive_links(imported_links, overwrite=overwrite, methods=['index_only'], out_dir=out_dir, created_by_id=created_by_id)
  623. else:
  624. archive_links(new_links, overwrite=False, methods=['index_only'], out_dir=out_dir, created_by_id=created_by_id)
  625. else:
  626. # fully run the archive extractor methods for each link
  627. archive_kwargs = {
  628. "out_dir": out_dir,
  629. "created_by_id": created_by_id,
  630. }
  631. if extractors:
  632. archive_kwargs["methods"] = extractors
  633. stderr()
  634. ts = datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S')
  635. if update:
  636. stderr(f'[*] [{ts}] Archiving + updating {len(imported_links)}/{len(all_links)}', len(imported_links), 'URLs from added set...', color='green')
  637. archive_links(imported_links, overwrite=overwrite, **archive_kwargs)
  638. elif update_all:
  639. stderr(f'[*] [{ts}] Archiving + updating {len(all_links)}/{len(all_links)}', len(all_links), 'URLs from entire library...', color='green')
  640. archive_links(all_links, overwrite=overwrite, **archive_kwargs)
  641. elif overwrite:
  642. stderr(f'[*] [{ts}] Archiving + overwriting {len(imported_links)}/{len(all_links)}', len(imported_links), 'URLs from added set...', color='green')
  643. archive_links(imported_links, overwrite=True, **archive_kwargs)
  644. elif new_links:
  645. stderr(f'[*] [{ts}] Archiving {len(new_links)}/{len(all_links)} URLs from added set...', color='green')
  646. archive_links(new_links, overwrite=False, **archive_kwargs)
  647. # tail_worker_logs(worker['stdout_logfile'])
  648. # if CAN_UPGRADE:
  649. # hint(f"There's a new version of ArchiveBox available! Your current version is {VERSION}. You can upgrade to {VERSIONS_AVAILABLE['recommended_version']['tag_name']} ({VERSIONS_AVAILABLE['recommended_version']['html_url']}). For more on how to upgrade: https://github.com/ArchiveBox/ArchiveBox/wiki/Upgrading-or-Merging-Archives\n")
  650. return new_links
  651. @enforce_types
  652. def remove(filter_str: Optional[str]=None,
  653. filter_patterns: Optional[List[str]]=None,
  654. filter_type: str='exact',
  655. snapshots: Optional[QuerySet]=None,
  656. after: Optional[float]=None,
  657. before: Optional[float]=None,
  658. yes: bool=False,
  659. delete: bool=False,
  660. out_dir: Path=DATA_DIR) -> List[Link]:
  661. """Remove the specified URLs from the archive"""
  662. check_data_folder()
  663. if snapshots is None:
  664. if filter_str and filter_patterns:
  665. stderr(
  666. '[X] You should pass either a pattern as an argument, '
  667. 'or pass a list of patterns via stdin, but not both.\n',
  668. color='red',
  669. )
  670. raise SystemExit(2)
  671. elif not (filter_str or filter_patterns):
  672. stderr(
  673. '[X] You should pass either a pattern as an argument, '
  674. 'or pass a list of patterns via stdin.',
  675. color='red',
  676. )
  677. stderr()
  678. hint(('To remove all urls you can run:',
  679. 'archivebox remove --filter-type=regex ".*"'))
  680. stderr()
  681. raise SystemExit(2)
  682. elif filter_str:
  683. filter_patterns = [ptn.strip() for ptn in filter_str.split('\n')]
  684. list_kwargs = {
  685. "filter_patterns": filter_patterns,
  686. "filter_type": filter_type,
  687. "after": after,
  688. "before": before,
  689. }
  690. if snapshots:
  691. list_kwargs["snapshots"] = snapshots
  692. log_list_started(filter_patterns, filter_type)
  693. timer = TimedProgress(360, prefix=' ')
  694. try:
  695. snapshots = list_links(**list_kwargs)
  696. finally:
  697. timer.end()
  698. if not snapshots.exists():
  699. log_removal_finished(0, 0)
  700. raise SystemExit(1)
  701. log_links = [link.as_link() for link in snapshots]
  702. log_list_finished(log_links)
  703. log_removal_started(log_links, yes=yes, delete=delete)
  704. timer = TimedProgress(360, prefix=' ')
  705. try:
  706. for snapshot in snapshots:
  707. if delete:
  708. shutil.rmtree(snapshot.as_link().link_dir, ignore_errors=True)
  709. finally:
  710. timer.end()
  711. to_remove = snapshots.count()
  712. from .search import flush_search_index
  713. flush_search_index(snapshots=snapshots)
  714. remove_from_sql_main_index(snapshots=snapshots, out_dir=out_dir)
  715. all_snapshots = load_main_index(out_dir=out_dir)
  716. log_removal_finished(all_snapshots.count(), to_remove)
  717. return all_snapshots
  718. @enforce_types
  719. def update(resume: Optional[float]=None,
  720. only_new: bool=ARCHIVING_CONFIG.ONLY_NEW,
  721. index_only: bool=False,
  722. overwrite: bool=False,
  723. filter_patterns_str: Optional[str]=None,
  724. filter_patterns: Optional[List[str]]=None,
  725. filter_type: Optional[str]=None,
  726. status: Optional[str]=None,
  727. after: Optional[str]=None,
  728. before: Optional[str]=None,
  729. extractors: str="",
  730. out_dir: Path=DATA_DIR) -> List[Link]:
  731. """Import any new links from subscriptions and retry any previously failed/skipped links"""
  732. from core.models import ArchiveResult
  733. from .search import index_links
  734. # from .queues.supervisor_util import start_cli_workers
  735. check_data_folder()
  736. # start_cli_workers()
  737. new_links: List[Link] = [] # TODO: Remove input argument: only_new
  738. extractors = extractors.split(",") if extractors else []
  739. # Step 1: Filter for selected_links
  740. print('[*] Finding matching Snapshots to update...')
  741. print(f' - Filtering by {" ".join(filter_patterns)} ({filter_type}) {before=} {after=} {status=}...')
  742. matching_snapshots = list_links(
  743. filter_patterns=filter_patterns,
  744. filter_type=filter_type,
  745. before=before,
  746. after=after,
  747. )
  748. print(f' - Checking {matching_snapshots.count()} snapshot folders for existing data with {status=}...')
  749. matching_folders = list_folders(
  750. links=matching_snapshots,
  751. status=status,
  752. out_dir=out_dir,
  753. )
  754. all_links = (link for link in matching_folders.values() if link)
  755. print(' - Sorting by most unfinished -> least unfinished + date archived...')
  756. all_links = sorted(all_links, key=lambda link: (ArchiveResult.objects.filter(snapshot__url=link.url).count(), link.timestamp))
  757. if index_only:
  758. for link in all_links:
  759. write_link_details(link, out_dir=out_dir, skip_sql_index=True)
  760. index_links(all_links, out_dir=out_dir)
  761. return all_links
  762. # Step 2: Run the archive methods for each link
  763. to_archive = new_links if only_new else all_links
  764. if resume:
  765. to_archive = [
  766. link for link in to_archive
  767. if link.timestamp >= str(resume)
  768. ]
  769. if not to_archive:
  770. stderr('')
  771. stderr(f'[√] Nothing found to resume after {resume}', color='green')
  772. return all_links
  773. archive_kwargs = {
  774. "out_dir": out_dir,
  775. }
  776. if extractors:
  777. archive_kwargs["methods"] = extractors
  778. archive_links(to_archive, overwrite=overwrite, **archive_kwargs)
  779. # Step 4: Re-write links index with updated titles, icons, and resources
  780. all_links = load_main_index(out_dir=out_dir)
  781. return all_links
  782. @enforce_types
  783. def list_all(filter_patterns_str: Optional[str]=None,
  784. filter_patterns: Optional[List[str]]=None,
  785. filter_type: str='exact',
  786. status: Optional[str]=None,
  787. after: Optional[float]=None,
  788. before: Optional[float]=None,
  789. sort: Optional[str]=None,
  790. csv: Optional[str]=None,
  791. json: bool=False,
  792. html: bool=False,
  793. with_headers: bool=False,
  794. out_dir: Path=DATA_DIR) -> Iterable[Link]:
  795. """List, filter, and export information about archive entries"""
  796. check_data_folder()
  797. if filter_patterns and filter_patterns_str:
  798. stderr(
  799. '[X] You should either pass filter patterns as an arguments '
  800. 'or via stdin, but not both.\n',
  801. color='red',
  802. )
  803. raise SystemExit(2)
  804. elif filter_patterns_str:
  805. filter_patterns = filter_patterns_str.split('\n')
  806. snapshots = list_links(
  807. filter_patterns=filter_patterns,
  808. filter_type=filter_type,
  809. before=before,
  810. after=after,
  811. )
  812. if sort:
  813. snapshots = snapshots.order_by(sort)
  814. folders = list_folders(
  815. links=snapshots,
  816. status=status,
  817. out_dir=out_dir,
  818. )
  819. if json:
  820. output = generate_json_index_from_links(folders.values(), with_headers)
  821. elif html:
  822. output = generate_index_from_links(folders.values(), with_headers)
  823. elif csv:
  824. output = links_to_csv(folders.values(), cols=csv.split(','), header=with_headers)
  825. else:
  826. output = printable_folders(folders, with_headers=with_headers)
  827. print(output)
  828. return folders
  829. @enforce_types
  830. def list_links(snapshots: Optional[QuerySet]=None,
  831. filter_patterns: Optional[List[str]]=None,
  832. filter_type: str='exact',
  833. after: Optional[float]=None,
  834. before: Optional[float]=None,
  835. out_dir: Path=DATA_DIR) -> Iterable[Link]:
  836. check_data_folder()
  837. if snapshots:
  838. all_snapshots = snapshots
  839. else:
  840. all_snapshots = load_main_index(out_dir=out_dir)
  841. if after is not None:
  842. all_snapshots = all_snapshots.filter(timestamp__gte=after)
  843. if before is not None:
  844. all_snapshots = all_snapshots.filter(timestamp__lt=before)
  845. if filter_patterns:
  846. all_snapshots = snapshot_filter(all_snapshots, filter_patterns, filter_type)
  847. if not all_snapshots:
  848. stderr('[!] No Snapshots matched your filters:', filter_patterns, f'({filter_type})', color='lightyellow')
  849. return all_snapshots
  850. @enforce_types
  851. def list_folders(links: List[Link],
  852. status: str,
  853. out_dir: Path=DATA_DIR) -> Dict[str, Optional[Link]]:
  854. check_data_folder()
  855. STATUS_FUNCTIONS = {
  856. "indexed": get_indexed_folders,
  857. "archived": get_archived_folders,
  858. "unarchived": get_unarchived_folders,
  859. "present": get_present_folders,
  860. "valid": get_valid_folders,
  861. "invalid": get_invalid_folders,
  862. "duplicate": get_duplicate_folders,
  863. "orphaned": get_orphaned_folders,
  864. "corrupted": get_corrupted_folders,
  865. "unrecognized": get_unrecognized_folders,
  866. }
  867. try:
  868. return STATUS_FUNCTIONS[status](links, out_dir=out_dir)
  869. except KeyError:
  870. raise ValueError('Status not recognized.')
  871. @enforce_types
  872. def install(out_dir: Path=DATA_DIR, binproviders: Optional[List[str]]=None, binaries: Optional[List[str]]=None, dry_run: bool=False) -> None:
  873. """Automatically install all ArchiveBox dependencies and extras"""
  874. # if running as root:
  875. # - run init to create index + lib dir
  876. # - chown -R 911 DATA_DIR
  877. # - install all binaries as root
  878. # - chown -R 911 LIB_DIR
  879. # else:
  880. # - run init to create index + lib dir as current user
  881. # - install all binaries as current user
  882. # - recommend user re-run with sudo if any deps need to be installed as root
  883. from rich import print
  884. from django.conf import settings
  885. from archivebox import CONSTANTS
  886. from archivebox.config.permissions import IS_ROOT, ARCHIVEBOX_USER, ARCHIVEBOX_GROUP
  887. if not (os.access(ARCHIVE_DIR, os.R_OK) and ARCHIVE_DIR.is_dir()):
  888. run_subcommand('init', stdin=None, pwd=out_dir) # must init full index because we need a db to store InstalledBinary entries in
  889. print('\n[green][+] Installing ArchiveBox dependencies automatically...[/green]')
  890. # we never want the data dir to be owned by root, detect owner of existing owner of DATA_DIR to try and guess desired non-root UID
  891. if IS_ROOT:
  892. EUID = os.geteuid()
  893. # if we have sudo/root permissions, take advantage of them just while installing dependencies
  894. print()
  895. print(f'[yellow]:warning: Running as UID=[blue]{EUID}[/blue] with [red]sudo[/red] only for dependencies that need it.[/yellow]')
  896. print(f' DATA_DIR, LIB_DIR, and TMP_DIR will be owned by [blue]{ARCHIVEBOX_USER}:{ARCHIVEBOX_GROUP}[/blue].')
  897. print()
  898. package_manager_names = ', '.join(
  899. f'[yellow]{binprovider.name}[/yellow]'
  900. for binprovider in reversed(list(settings.BINPROVIDERS.values()))
  901. if not binproviders or (binproviders and binprovider.name in binproviders)
  902. )
  903. print(f'[+] Setting up package managers {package_manager_names}...')
  904. for binprovider in reversed(list(settings.BINPROVIDERS.values())):
  905. if binproviders and binprovider.name not in binproviders:
  906. continue
  907. try:
  908. binprovider.setup()
  909. except Exception:
  910. # it's ok, installing binaries below will automatically set up package managers as needed
  911. # e.g. if user does not have npm available we cannot set it up here yet, but once npm Binary is installed
  912. # the next package that depends on npm will automatically call binprovider.setup() during its own install
  913. pass
  914. print()
  915. for binary in reversed(list(settings.BINARIES.values())):
  916. if binary.name in ('archivebox', 'django', 'sqlite', 'python'):
  917. # obviously must already be installed if we are running
  918. continue
  919. if binaries and binary.name not in binaries:
  920. continue
  921. providers = ' [grey53]or[/grey53] '.join(
  922. provider.name for provider in binary.binproviders_supported
  923. if not binproviders or (binproviders and provider.name in binproviders)
  924. )
  925. if not providers:
  926. continue
  927. print(f'[+] Detecting / Installing [yellow]{binary.name.ljust(22)}[/yellow] using [red]{providers}[/red]...')
  928. try:
  929. with SudoPermission(uid=0, fallback=True):
  930. # print(binary.load_or_install(fresh=True).model_dump(exclude={'overrides', 'bin_dir', 'hook_type'}))
  931. if binproviders:
  932. providers_supported_by_binary = [provider.name for provider in binary.binproviders_supported]
  933. for binprovider_name in binproviders:
  934. if binprovider_name not in providers_supported_by_binary:
  935. continue
  936. try:
  937. if dry_run:
  938. # always show install commands when doing a dry run
  939. sys.stderr.write("\033[2;49;90m") # grey53
  940. result = binary.install(binproviders=[binprovider_name], dry_run=dry_run).model_dump(exclude={'overrides', 'bin_dir', 'hook_type'})
  941. sys.stderr.write("\033[00m\n") # reset
  942. else:
  943. result = binary.load_or_install(binproviders=[binprovider_name], fresh=True, dry_run=dry_run, quiet=False).model_dump(exclude={'overrides', 'bin_dir', 'hook_type'})
  944. if result and result['loaded_version']:
  945. break
  946. except Exception as e:
  947. print(f'[red]:cross_mark: Failed to install {binary.name} as using {binprovider_name} as user {ARCHIVEBOX_USER}: {e}[/red]')
  948. else:
  949. if dry_run:
  950. sys.stderr.write("\033[2;49;90m") # grey53
  951. binary.install(dry_run=dry_run).model_dump(exclude={'overrides', 'bin_dir', 'hook_type'})
  952. sys.stderr.write("\033[00m\n") # reset
  953. else:
  954. binary.load_or_install(fresh=True, dry_run=dry_run).model_dump(exclude={'overrides', 'bin_dir', 'hook_type'})
  955. if IS_ROOT:
  956. with SudoPermission(uid=0):
  957. if ARCHIVEBOX_USER == 0:
  958. os.system(f'chmod -R 777 "{CONSTANTS.LIB_DIR.resolve()}"')
  959. else:
  960. os.system(f'chown -R {ARCHIVEBOX_USER} "{CONSTANTS.LIB_DIR.resolve()}"')
  961. except Exception as e:
  962. print(f'[red]:cross_mark: Failed to install {binary.name} as user {ARCHIVEBOX_USER}: {e}[/red]')
  963. if binaries and len(binaries) == 1:
  964. # if we are only installing a single binary, raise the exception so the user can see what went wrong
  965. raise
  966. from django.contrib.auth import get_user_model
  967. User = get_user_model()
  968. if not User.objects.filter(is_superuser=True).exclude(username='system').exists():
  969. stderr('\n[+] Don\'t forget to create a new admin user for the Web UI...', color='green')
  970. stderr(' archivebox manage createsuperuser')
  971. # run_subcommand('manage', subcommand_args=['createsuperuser'], pwd=out_dir)
  972. print('\n[green][√] Set up ArchiveBox and its dependencies successfully.[/green]\n', file=sys.stderr)
  973. from plugins_pkg.pip.binaries import ARCHIVEBOX_BINARY
  974. extra_args = []
  975. if binproviders:
  976. extra_args.append(f'--binproviders={",".join(binproviders)}')
  977. if binaries:
  978. extra_args.append(f'--binaries={",".join(binaries)}')
  979. proc = run_shell([ARCHIVEBOX_BINARY.load().abspath, 'version', *extra_args], capture_output=False, cwd=out_dir)
  980. raise SystemExit(proc.returncode)
  981. # backwards-compatibility:
  982. setup = install
  983. @enforce_types
  984. def config(config_options_str: Optional[str]=None,
  985. config_options: Optional[List[str]]=None,
  986. get: bool=False,
  987. set: bool=False,
  988. search: bool=False,
  989. reset: bool=False,
  990. out_dir: Path=DATA_DIR) -> None:
  991. """Get and set your ArchiveBox project configuration values"""
  992. import abx.archivebox.reads
  993. from rich import print
  994. check_data_folder()
  995. if config_options and config_options_str:
  996. stderr(
  997. '[X] You should either pass config values as an arguments '
  998. 'or via stdin, but not both.\n',
  999. color='red',
  1000. )
  1001. raise SystemExit(2)
  1002. elif config_options_str:
  1003. config_options = config_options_str.split('\n')
  1004. from django.conf import settings
  1005. config_options = config_options or []
  1006. no_args = not (get or set or reset or config_options)
  1007. matching_config = {}
  1008. if search:
  1009. if config_options:
  1010. config_options = [get_real_name(key) for key in config_options]
  1011. matching_config = {key: settings.FLAT_CONFIG[key] for key in config_options if key in settings.FLAT_CONFIG}
  1012. for config_section in settings.CONFIGS.values():
  1013. aliases = config_section.aliases
  1014. for search_key in config_options:
  1015. # search all aliases in the section
  1016. for alias_key, key in aliases.items():
  1017. if search_key.lower() in alias_key.lower():
  1018. matching_config[key] = config_section.model_dump()[key]
  1019. # search all keys and values in the section
  1020. for existing_key, value in config_section.model_dump().items():
  1021. if search_key.lower() in existing_key.lower() or search_key.lower() in str(value).lower():
  1022. matching_config[existing_key] = value
  1023. print(printable_config(matching_config))
  1024. raise SystemExit(not matching_config)
  1025. elif get or no_args:
  1026. if config_options:
  1027. config_options = [get_real_name(key) for key in config_options]
  1028. matching_config = {key: settings.FLAT_CONFIG[key] for key in config_options if key in settings.FLAT_CONFIG}
  1029. failed_config = [key for key in config_options if key not in settings.FLAT_CONFIG]
  1030. if failed_config:
  1031. stderr()
  1032. stderr('[X] These options failed to get', color='red')
  1033. stderr(' {}'.format('\n '.join(config_options)))
  1034. raise SystemExit(1)
  1035. else:
  1036. matching_config = settings.FLAT_CONFIG
  1037. print(printable_config(matching_config))
  1038. raise SystemExit(not matching_config)
  1039. elif set:
  1040. new_config = {}
  1041. failed_options = []
  1042. for line in config_options:
  1043. if line.startswith('#') or not line.strip():
  1044. continue
  1045. if '=' not in line:
  1046. stderr('[X] Config KEY=VALUE must have an = sign in it', color='red')
  1047. stderr(f' {line}')
  1048. raise SystemExit(2)
  1049. raw_key, val = line.split('=', 1)
  1050. raw_key = raw_key.upper().strip()
  1051. key = get_real_name(raw_key)
  1052. if key != raw_key:
  1053. stderr(f'[i] Note: The config option {raw_key} has been renamed to {key}, please use the new name going forwards.', color='lightyellow')
  1054. if key in settings.FLAT_CONFIG:
  1055. new_config[key] = val.strip()
  1056. else:
  1057. failed_options.append(line)
  1058. if new_config:
  1059. before = settings.FLAT_CONFIG
  1060. matching_config = write_config_file(new_config)
  1061. after = {**load_all_config(), **abx.archivebox.reads.get_FLAT_CONFIG()}
  1062. print(printable_config(matching_config))
  1063. side_effect_changes = {}
  1064. for key, val in after.items():
  1065. if key in settings.FLAT_CONFIG and (str(before[key]) != str(after[key])) and (key not in matching_config):
  1066. side_effect_changes[key] = after[key]
  1067. # import ipdb; ipdb.set_trace()
  1068. if side_effect_changes:
  1069. stderr()
  1070. stderr('[i] Note: This change also affected these other options that depended on it:', color='lightyellow')
  1071. print(' {}'.format(printable_config(side_effect_changes, prefix=' ')))
  1072. if failed_options:
  1073. stderr()
  1074. stderr('[X] These options failed to set (check for typos):', color='red')
  1075. stderr(' {}'.format('\n '.join(failed_options)))
  1076. raise SystemExit(1)
  1077. elif reset:
  1078. stderr('[X] This command is not implemented yet.', color='red')
  1079. stderr(' Please manually remove the relevant lines from your config file:')
  1080. raise SystemExit(2)
  1081. else:
  1082. stderr('[X] You must pass either --get or --set, or no arguments to get the whole config.', color='red')
  1083. stderr(' archivebox config')
  1084. stderr(' archivebox config --get SOME_KEY')
  1085. stderr(' archivebox config --set SOME_KEY=SOME_VALUE')
  1086. raise SystemExit(2)
  1087. @enforce_types
  1088. def schedule(add: bool=False,
  1089. show: bool=False,
  1090. clear: bool=False,
  1091. foreground: bool=False,
  1092. run_all: bool=False,
  1093. quiet: bool=False,
  1094. every: Optional[str]=None,
  1095. tag: str='',
  1096. depth: int=0,
  1097. overwrite: bool=False,
  1098. update: bool=not ARCHIVING_CONFIG.ONLY_NEW,
  1099. import_path: Optional[str]=None,
  1100. out_dir: Path=DATA_DIR):
  1101. """Set ArchiveBox to regularly import URLs at specific times using cron"""
  1102. check_data_folder()
  1103. from archivebox.plugins_pkg.pip.binaries import ARCHIVEBOX_BINARY
  1104. from archivebox.config.permissions import USER
  1105. Path(CONSTANTS.LOGS_DIR).mkdir(exist_ok=True)
  1106. cron = CronTab(user=True)
  1107. cron = dedupe_cron_jobs(cron)
  1108. if clear:
  1109. print(cron.remove_all(comment=CRON_COMMENT))
  1110. cron.write()
  1111. raise SystemExit(0)
  1112. existing_jobs = list(cron.find_comment(CRON_COMMENT))
  1113. if every or add:
  1114. every = every or 'day'
  1115. quoted = lambda s: f'"{s}"' if (s and ' ' in str(s)) else str(s)
  1116. cmd = [
  1117. 'cd',
  1118. quoted(out_dir),
  1119. '&&',
  1120. quoted(ARCHIVEBOX_BINARY.load().abspath),
  1121. *([
  1122. 'add',
  1123. *(['--overwrite'] if overwrite else []),
  1124. *(['--update'] if update else []),
  1125. *([f'--tag={tag}'] if tag else []),
  1126. f'--depth={depth}',
  1127. f'"{import_path}"',
  1128. ] if import_path else ['update']),
  1129. '>>',
  1130. quoted(Path(CONSTANTS.LOGS_DIR) / 'schedule.log'),
  1131. '2>&1',
  1132. ]
  1133. new_job = cron.new(command=' '.join(cmd), comment=CRON_COMMENT)
  1134. if every in ('minute', 'hour', 'day', 'month', 'year'):
  1135. set_every = getattr(new_job.every(), every)
  1136. set_every()
  1137. elif CronSlices.is_valid(every):
  1138. new_job.setall(every)
  1139. else:
  1140. stderr('{red}[X] Got invalid timeperiod for cron task.{reset}'.format(**SHELL_CONFIG.ANSI))
  1141. stderr(' It must be one of minute/hour/day/month')
  1142. stderr(' or a quoted cron-format schedule like:')
  1143. stderr(' archivebox init --every=day --depth=1 https://example.com/some/rss/feed.xml')
  1144. stderr(' archivebox init --every="0/5 * * * *" --depth=1 https://example.com/some/rss/feed.xml')
  1145. raise SystemExit(1)
  1146. cron = dedupe_cron_jobs(cron)
  1147. cron.write()
  1148. total_runs = sum(j.frequency_per_year() for j in cron)
  1149. existing_jobs = list(cron.find_comment(CRON_COMMENT))
  1150. print()
  1151. print('{green}[√] Scheduled new ArchiveBox cron job for user: {} ({} jobs are active).{reset}'.format(USER, len(existing_jobs), **SHELL_CONFIG.ANSI))
  1152. print('\n'.join(f' > {cmd}' if str(cmd) == str(new_job) else f' {cmd}' for cmd in existing_jobs))
  1153. if total_runs > 60 and not quiet:
  1154. stderr()
  1155. stderr('{lightyellow}[!] With the current cron config, ArchiveBox is estimated to run >{} times per year.{reset}'.format(total_runs, **SHELL_CONFIG.ANSI))
  1156. stderr(' Congrats on being an enthusiastic internet archiver! 👌')
  1157. stderr()
  1158. stderr(' Make sure you have enough storage space available to hold all the data.')
  1159. stderr(' Using a compressed/deduped filesystem like ZFS is recommended if you plan on archiving a lot.')
  1160. stderr('')
  1161. elif show:
  1162. if existing_jobs:
  1163. print('\n'.join(str(cmd) for cmd in existing_jobs))
  1164. else:
  1165. stderr('{red}[X] There are no ArchiveBox cron jobs scheduled for your user ({}).{reset}'.format(USER, **SHELL_CONFIG.ANSI))
  1166. stderr(' To schedule a new job, run:')
  1167. stderr(' archivebox schedule --every=[timeperiod] --depth=1 https://example.com/some/rss/feed.xml')
  1168. raise SystemExit(0)
  1169. cron = CronTab(user=True)
  1170. cron = dedupe_cron_jobs(cron)
  1171. existing_jobs = list(cron.find_comment(CRON_COMMENT))
  1172. if foreground or run_all:
  1173. if not existing_jobs:
  1174. stderr('{red}[X] You must schedule some jobs first before running in foreground mode.{reset}'.format(**SHELL_CONFIG.ANSI))
  1175. stderr(' archivebox schedule --every=hour --depth=1 https://example.com/some/rss/feed.xml')
  1176. raise SystemExit(1)
  1177. print('{green}[*] Running {} ArchiveBox jobs in foreground task scheduler...{reset}'.format(len(existing_jobs), **SHELL_CONFIG.ANSI))
  1178. if run_all:
  1179. try:
  1180. for job in existing_jobs:
  1181. sys.stdout.write(f' > {job.command.split("/archivebox ")[0].split(" && ")[0]}\n')
  1182. sys.stdout.write(f' > {job.command.split("/archivebox ")[-1].split(" >> ")[0]}')
  1183. sys.stdout.flush()
  1184. job.run()
  1185. sys.stdout.write(f'\r √ {job.command.split("/archivebox ")[-1]}\n')
  1186. except KeyboardInterrupt:
  1187. print('\n{green}[√] Stopped.{reset}'.format(**SHELL_CONFIG.ANSI))
  1188. raise SystemExit(1)
  1189. if foreground:
  1190. try:
  1191. for job in existing_jobs:
  1192. print(f' > {job.command.split("/archivebox ")[-1].split(" >> ")[0]}')
  1193. for result in cron.run_scheduler():
  1194. print(result)
  1195. except KeyboardInterrupt:
  1196. print('\n{green}[√] Stopped.{reset}'.format(**SHELL_CONFIG.ANSI))
  1197. raise SystemExit(1)
  1198. # if CAN_UPGRADE:
  1199. # hint(f"There's a new version of ArchiveBox available! Your current version is {VERSION}. You can upgrade to {VERSIONS_AVAILABLE['recommended_version']['tag_name']} ({VERSIONS_AVAILABLE['recommended_version']['html_url']}). For more on how to upgrade: https://github.com/ArchiveBox/ArchiveBox/wiki/Upgrading-or-Merging-Archives\n")
  1200. @enforce_types
  1201. def server(runserver_args: Optional[List[str]]=None,
  1202. reload: bool=False,
  1203. debug: bool=False,
  1204. init: bool=False,
  1205. quick_init: bool=False,
  1206. createsuperuser: bool=False,
  1207. daemonize: bool=False,
  1208. out_dir: Path=DATA_DIR) -> None:
  1209. """Run the ArchiveBox HTTP server"""
  1210. from rich import print
  1211. runserver_args = runserver_args or []
  1212. if init:
  1213. run_subcommand('init', stdin=None, pwd=out_dir)
  1214. print()
  1215. elif quick_init:
  1216. run_subcommand('init', subcommand_args=['--quick'], stdin=None, pwd=out_dir)
  1217. print()
  1218. if createsuperuser:
  1219. run_subcommand('manage', subcommand_args=['createsuperuser'], pwd=out_dir)
  1220. print()
  1221. check_data_folder()
  1222. from django.core.management import call_command
  1223. from django.contrib.auth.models import User
  1224. if not User.objects.filter(is_superuser=True).exclude(username='system').exists():
  1225. print()
  1226. # print('[yellow][!] No admin accounts exist, you must create one to be able to log in to the Admin UI![/yellow]')
  1227. print('[violet]Hint:[/violet] To create an [bold]admin username & password[/bold] for the [deep_sky_blue3][underline][link=http://{host}:{port}/admin]Admin UI[/link][/underline][/deep_sky_blue3], run:')
  1228. print(' [green]archivebox manage createsuperuser[/green]')
  1229. print()
  1230. host = '127.0.0.1'
  1231. port = '8000'
  1232. try:
  1233. host_and_port = [arg for arg in runserver_args if arg.replace('.', '').replace(':', '').isdigit()][0]
  1234. if ':' in host_and_port:
  1235. host, port = host_and_port.split(':')
  1236. else:
  1237. if '.' in host_and_port:
  1238. host = host_and_port
  1239. else:
  1240. port = host_and_port
  1241. except IndexError:
  1242. pass
  1243. print('[green][+] Starting ArchiveBox webserver...[/green]')
  1244. print(f' [blink][green]>[/green][/blink] Starting ArchiveBox webserver on [deep_sky_blue4][link=http://{host}:{port}]http://{host}:{port}[/link][/deep_sky_blue4]')
  1245. print(f' [green]>[/green] Log in to ArchiveBox Admin UI on [deep_sky_blue3][link=http://{host}:{port}/admin]http://{host}:{port}/admin[/link][/deep_sky_blue3]')
  1246. print(' > Writing ArchiveBox error log to ./logs/errors.log')
  1247. if SHELL_CONFIG.DEBUG:
  1248. if not reload:
  1249. runserver_args.append('--noreload') # '--insecure'
  1250. call_command("runserver", *runserver_args)
  1251. else:
  1252. from queues.supervisor_util import start_server_workers
  1253. print()
  1254. start_server_workers(host=host, port=port, daemonize=False)
  1255. print("\n[i][green][🟩] ArchiveBox server shut down gracefully.[/green][/i]")
  1256. @enforce_types
  1257. def manage(args: Optional[List[str]]=None, out_dir: Path=DATA_DIR) -> None:
  1258. """Run an ArchiveBox Django management command"""
  1259. check_data_folder()
  1260. from django.core.management import execute_from_command_line
  1261. if (args and "createsuperuser" in args) and (IN_DOCKER and not SHELL_CONFIG.IS_TTY):
  1262. stderr('[!] Warning: you need to pass -it to use interactive commands in docker', color='lightyellow')
  1263. stderr(' docker run -it archivebox manage {}'.format(' '.join(args or ['...'])), color='lightyellow')
  1264. stderr('')
  1265. # import ipdb; ipdb.set_trace()
  1266. execute_from_command_line(['manage.py', *(args or ['help'])])
  1267. @enforce_types
  1268. def shell(out_dir: Path=DATA_DIR) -> None:
  1269. """Enter an interactive ArchiveBox Django shell"""
  1270. check_data_folder()
  1271. from django.core.management import call_command
  1272. call_command("shell_plus")