config.py 56 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196
  1. """
  2. ArchiveBox config definitons (including defaults and dynamic config options).
  3. Config Usage Example:
  4. archivebox config --set MEDIA_TIMEOUT=600
  5. env MEDIA_TIMEOUT=600 USE_COLOR=False ... archivebox [subcommand] ...
  6. Config Precedence Order:
  7. 1. cli args (--update-all / --index-only / etc.)
  8. 2. shell environment vars (env USE_COLOR=False archivebox add '...')
  9. 3. config file (echo "SAVE_FAVICON=False" >> ArchiveBox.conf)
  10. 4. defaults (defined below in Python)
  11. Documentation:
  12. https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration
  13. """
  14. __package__ = 'archivebox'
  15. import os
  16. import io
  17. import re
  18. import sys
  19. import json
  20. import getpass
  21. import platform
  22. import shutil
  23. import sqlite3
  24. import django
  25. from hashlib import md5
  26. from pathlib import Path
  27. from datetime import datetime, timezone
  28. from typing import Optional, Type, Tuple, Dict, Union, List
  29. from subprocess import run, PIPE, DEVNULL
  30. from configparser import ConfigParser
  31. from collections import defaultdict
  32. from .config_stubs import (
  33. SimpleConfigValueDict,
  34. ConfigValue,
  35. ConfigDict,
  36. ConfigDefaultValue,
  37. ConfigDefaultDict,
  38. )
  39. SYSTEM_USER = getpass.getuser() or os.getlogin()
  40. try:
  41. import pwd
  42. SYSTEM_USER = pwd.getpwuid(os.geteuid()).pw_name or SYSTEM_USER
  43. except ModuleNotFoundError:
  44. # pwd is only needed for some linux systems, doesn't exist on windows
  45. pass
  46. ############################### Config Schema ##################################
  47. CONFIG_SCHEMA: Dict[str, ConfigDefaultDict] = {
  48. 'SHELL_CONFIG': {
  49. 'IS_TTY': {'type': bool, 'default': lambda _: sys.stdout.isatty()},
  50. 'USE_COLOR': {'type': bool, 'default': lambda c: c['IS_TTY']},
  51. 'SHOW_PROGRESS': {'type': bool, 'default': lambda c: (c['IS_TTY'] and platform.system() != 'Darwin')}, # progress bars are buggy on mac, disable for now
  52. 'IN_DOCKER': {'type': bool, 'default': False},
  53. # TODO: 'SHOW_HINTS': {'type: bool, 'default': True},
  54. },
  55. 'GENERAL_CONFIG': {
  56. 'OUTPUT_DIR': {'type': str, 'default': None},
  57. 'CONFIG_FILE': {'type': str, 'default': None},
  58. 'ONLY_NEW': {'type': bool, 'default': True},
  59. 'TIMEOUT': {'type': int, 'default': 60},
  60. 'MEDIA_TIMEOUT': {'type': int, 'default': 3600},
  61. 'OUTPUT_PERMISSIONS': {'type': str, 'default': '644'},
  62. 'RESTRICT_FILE_NAMES': {'type': str, 'default': 'windows'},
  63. 'URL_BLACKLIST': {'type': str, 'default': r'\.(css|js|otf|ttf|woff|woff2|gstatic\.com|googleapis\.com/css)(\?.*)?$'}, # to avoid downloading code assets as their own pages
  64. 'URL_WHITELIST': {'type': str, 'default': None},
  65. 'ENFORCE_ATOMIC_WRITES': {'type': bool, 'default': True},
  66. 'TAG_SEPARATOR_PATTERN': {'type': str, 'default': r'[,]'},
  67. },
  68. 'SERVER_CONFIG': {
  69. 'SECRET_KEY': {'type': str, 'default': None},
  70. 'BIND_ADDR': {'type': str, 'default': lambda c: ['127.0.0.1:8000', '0.0.0.0:8000'][c['IN_DOCKER']]},
  71. 'ALLOWED_HOSTS': {'type': str, 'default': '*'},
  72. 'DEBUG': {'type': bool, 'default': False},
  73. 'PUBLIC_INDEX': {'type': bool, 'default': True},
  74. 'PUBLIC_SNAPSHOTS': {'type': bool, 'default': True},
  75. 'PUBLIC_ADD_VIEW': {'type': bool, 'default': False},
  76. 'FOOTER_INFO': {'type': str, 'default': 'Content is hosted for personal archiving purposes only. Contact server owner for any takedown requests.'},
  77. 'SNAPSHOTS_PER_PAGE': {'type': int, 'default': 40},
  78. 'CUSTOM_TEMPLATES_DIR': {'type': str, 'default': None},
  79. 'TIME_ZONE': {'type': str, 'default': 'UTC'},
  80. },
  81. 'ARCHIVE_METHOD_TOGGLES': {
  82. 'SAVE_TITLE': {'type': bool, 'default': True, 'aliases': ('FETCH_TITLE',)},
  83. 'SAVE_FAVICON': {'type': bool, 'default': True, 'aliases': ('FETCH_FAVICON',)},
  84. 'SAVE_WGET': {'type': bool, 'default': True, 'aliases': ('FETCH_WGET',)},
  85. 'SAVE_WGET_REQUISITES': {'type': bool, 'default': True, 'aliases': ('FETCH_WGET_REQUISITES',)},
  86. 'SAVE_SINGLEFILE': {'type': bool, 'default': True, 'aliases': ('FETCH_SINGLEFILE',)},
  87. 'SAVE_READABILITY': {'type': bool, 'default': True, 'aliases': ('FETCH_READABILITY',)},
  88. 'SAVE_MERCURY': {'type': bool, 'default': True, 'aliases': ('FETCH_MERCURY',)},
  89. 'SAVE_PDF': {'type': bool, 'default': True, 'aliases': ('FETCH_PDF',)},
  90. 'SAVE_SCREENSHOT': {'type': bool, 'default': True, 'aliases': ('FETCH_SCREENSHOT',)},
  91. 'SAVE_DOM': {'type': bool, 'default': True, 'aliases': ('FETCH_DOM',)},
  92. 'SAVE_HEADERS': {'type': bool, 'default': True, 'aliases': ('FETCH_HEADERS',)},
  93. 'SAVE_WARC': {'type': bool, 'default': True, 'aliases': ('FETCH_WARC',)},
  94. 'SAVE_GIT': {'type': bool, 'default': True, 'aliases': ('FETCH_GIT',)},
  95. 'SAVE_MEDIA': {'type': bool, 'default': True, 'aliases': ('FETCH_MEDIA',)},
  96. 'SAVE_ARCHIVE_DOT_ORG': {'type': bool, 'default': True, 'aliases': ('SUBMIT_ARCHIVE_DOT_ORG',)},
  97. },
  98. 'ARCHIVE_METHOD_OPTIONS': {
  99. 'RESOLUTION': {'type': str, 'default': '1440,2000', 'aliases': ('SCREENSHOT_RESOLUTION',)},
  100. 'GIT_DOMAINS': {'type': str, 'default': 'github.com,bitbucket.org,gitlab.com,gist.github.com'},
  101. 'CHECK_SSL_VALIDITY': {'type': bool, 'default': True},
  102. 'MEDIA_MAX_SIZE': {'type': str, 'default': '750m'},
  103. 'CURL_USER_AGENT': {'type': str, 'default': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/605.1.15 ArchiveBox/{VERSION} (+https://github.com/ArchiveBox/ArchiveBox/) curl/{CURL_VERSION}'},
  104. 'WGET_USER_AGENT': {'type': str, 'default': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/605.1.15 ArchiveBox/{VERSION} (+https://github.com/ArchiveBox/ArchiveBox/) wget/{WGET_VERSION}'},
  105. 'CHROME_USER_AGENT': {'type': str, 'default': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/605.1.15 ArchiveBox/{VERSION} (+https://github.com/ArchiveBox/ArchiveBox/)'},
  106. 'COOKIES_FILE': {'type': str, 'default': None},
  107. 'CHROME_USER_DATA_DIR': {'type': str, 'default': None},
  108. 'CHROME_HEADLESS': {'type': bool, 'default': True},
  109. 'CHROME_SANDBOX': {'type': bool, 'default': lambda c: not c['IN_DOCKER']},
  110. 'YOUTUBEDL_ARGS': {'type': list, 'default': lambda c: [
  111. '--write-description',
  112. '--write-info-json',
  113. '--write-annotations',
  114. '--write-thumbnail',
  115. '--no-call-home',
  116. '--write-sub',
  117. '--all-subs',
  118. '--write-auto-sub',
  119. '--convert-subs=srt',
  120. '--yes-playlist',
  121. '--continue',
  122. '--ignore-errors',
  123. '--no-abort-on-error',
  124. '--geo-bypass',
  125. '--add-metadata',
  126. '--max-filesize={}'.format(c['MEDIA_MAX_SIZE']),
  127. ]},
  128. 'WGET_ARGS': {'type': list, 'default': ['--no-verbose',
  129. '--adjust-extension',
  130. '--convert-links',
  131. '--force-directories',
  132. '--backup-converted',
  133. '--span-hosts',
  134. '--no-parent',
  135. '-e', 'robots=off',
  136. ]},
  137. 'CURL_ARGS': {'type': list, 'default': ['--silent',
  138. '--location',
  139. '--compressed'
  140. ]},
  141. 'GIT_ARGS': {'type': list, 'default': ['--recursive']},
  142. },
  143. 'SEARCH_BACKEND_CONFIG' : {
  144. 'USE_INDEXING_BACKEND': {'type': bool, 'default': True},
  145. 'USE_SEARCHING_BACKEND': {'type': bool, 'default': True},
  146. 'SEARCH_BACKEND_ENGINE': {'type': str, 'default': 'ripgrep'},
  147. 'SEARCH_BACKEND_HOST_NAME': {'type': str, 'default': 'localhost'},
  148. 'SEARCH_BACKEND_PORT': {'type': int, 'default': 1491},
  149. 'SEARCH_BACKEND_PASSWORD': {'type': str, 'default': 'SecretPassword'},
  150. # SONIC
  151. 'SONIC_COLLECTION': {'type': str, 'default': 'archivebox'},
  152. 'SONIC_BUCKET': {'type': str, 'default': 'snapshots'},
  153. 'SEARCH_BACKEND_TIMEOUT': {'type': int, 'default': 90},
  154. },
  155. 'DEPENDENCY_CONFIG': {
  156. 'USE_CURL': {'type': bool, 'default': True},
  157. 'USE_WGET': {'type': bool, 'default': True},
  158. 'USE_SINGLEFILE': {'type': bool, 'default': True},
  159. 'USE_READABILITY': {'type': bool, 'default': True},
  160. 'USE_MERCURY': {'type': bool, 'default': True},
  161. 'USE_GIT': {'type': bool, 'default': True},
  162. 'USE_CHROME': {'type': bool, 'default': True},
  163. 'USE_NODE': {'type': bool, 'default': True},
  164. 'USE_YOUTUBEDL': {'type': bool, 'default': True},
  165. 'USE_RIPGREP': {'type': bool, 'default': True},
  166. 'CURL_BINARY': {'type': str, 'default': 'curl'},
  167. 'GIT_BINARY': {'type': str, 'default': 'git'},
  168. 'WGET_BINARY': {'type': str, 'default': 'wget'},
  169. 'SINGLEFILE_BINARY': {'type': str, 'default': lambda c: bin_path('single-file')},
  170. 'READABILITY_BINARY': {'type': str, 'default': lambda c: bin_path('readability-extractor')},
  171. 'MERCURY_BINARY': {'type': str, 'default': lambda c: bin_path('mercury-parser')},
  172. 'YOUTUBEDL_BINARY': {'type': str, 'default': 'youtube-dl'},
  173. 'NODE_BINARY': {'type': str, 'default': 'node'},
  174. 'RIPGREP_BINARY': {'type': str, 'default': 'rg'},
  175. 'CHROME_BINARY': {'type': str, 'default': None},
  176. 'POCKET_CONSUMER_KEY': {'type': str, 'default': None},
  177. 'POCKET_ACCESS_TOKENS': {'type': dict, 'default': {}},
  178. },
  179. }
  180. ########################## Backwards-Compatibility #############################
  181. # for backwards compatibility with old config files, check old/deprecated names for each key
  182. CONFIG_ALIASES = {
  183. alias: key
  184. for section in CONFIG_SCHEMA.values()
  185. for key, default in section.items()
  186. for alias in default.get('aliases', ())
  187. }
  188. USER_CONFIG = {key for section in CONFIG_SCHEMA.values() for key in section.keys()}
  189. def get_real_name(key: str) -> str:
  190. """get the current canonical name for a given deprecated config key"""
  191. return CONFIG_ALIASES.get(key.upper().strip(), key.upper().strip())
  192. ################################ Constants #####################################
  193. PACKAGE_DIR_NAME = 'archivebox'
  194. TEMPLATES_DIR_NAME = 'templates'
  195. ARCHIVE_DIR_NAME = 'archive'
  196. SOURCES_DIR_NAME = 'sources'
  197. LOGS_DIR_NAME = 'logs'
  198. SQL_INDEX_FILENAME = 'index.sqlite3'
  199. JSON_INDEX_FILENAME = 'index.json'
  200. HTML_INDEX_FILENAME = 'index.html'
  201. ROBOTS_TXT_FILENAME = 'robots.txt'
  202. FAVICON_FILENAME = 'favicon.ico'
  203. CONFIG_FILENAME = 'ArchiveBox.conf'
  204. DEFAULT_CLI_COLORS = {
  205. 'reset': '\033[00;00m',
  206. 'lightblue': '\033[01;30m',
  207. 'lightyellow': '\033[01;33m',
  208. 'lightred': '\033[01;35m',
  209. 'red': '\033[01;31m',
  210. 'green': '\033[01;32m',
  211. 'blue': '\033[01;34m',
  212. 'white': '\033[01;37m',
  213. 'black': '\033[01;30m',
  214. }
  215. ANSI = {k: '' for k in DEFAULT_CLI_COLORS.keys()}
  216. COLOR_DICT = defaultdict(lambda: [(0, 0, 0), (0, 0, 0)], {
  217. '00': [(0, 0, 0), (0, 0, 0)],
  218. '30': [(0, 0, 0), (0, 0, 0)],
  219. '31': [(255, 0, 0), (128, 0, 0)],
  220. '32': [(0, 200, 0), (0, 128, 0)],
  221. '33': [(255, 255, 0), (128, 128, 0)],
  222. '34': [(0, 0, 255), (0, 0, 128)],
  223. '35': [(255, 0, 255), (128, 0, 128)],
  224. '36': [(0, 255, 255), (0, 128, 128)],
  225. '37': [(255, 255, 255), (255, 255, 255)],
  226. })
  227. STATICFILE_EXTENSIONS = {
  228. # 99.999% of the time, URLs ending in these extensions are static files
  229. # that can be downloaded as-is, not html pages that need to be rendered
  230. 'gif', 'jpeg', 'jpg', 'png', 'tif', 'tiff', 'wbmp', 'ico', 'jng', 'bmp',
  231. 'svg', 'svgz', 'webp', 'ps', 'eps', 'ai',
  232. 'mp3', 'mp4', 'm4a', 'mpeg', 'mpg', 'mkv', 'mov', 'webm', 'm4v',
  233. 'flv', 'wmv', 'avi', 'ogg', 'ts', 'm3u8',
  234. 'pdf', 'txt', 'rtf', 'rtfd', 'doc', 'docx', 'ppt', 'pptx', 'xls', 'xlsx',
  235. 'atom', 'rss', 'css', 'js', 'json',
  236. 'dmg', 'iso', 'img',
  237. 'rar', 'war', 'hqx', 'zip', 'gz', 'bz2', '7z',
  238. # Less common extensions to consider adding later
  239. # jar, swf, bin, com, exe, dll, deb
  240. # ear, hqx, eot, wmlc, kml, kmz, cco, jardiff, jnlp, run, msi, msp, msm,
  241. # pl pm, prc pdb, rar, rpm, sea, sit, tcl tk, der, pem, crt, xpi, xspf,
  242. # ra, mng, asx, asf, 3gpp, 3gp, mid, midi, kar, jad, wml, htc, mml
  243. # These are always treated as pages, not as static files, never add them:
  244. # html, htm, shtml, xhtml, xml, aspx, php, cgi
  245. }
  246. # When initializing archivebox in a new directory, we check to make sure the dir is
  247. # actually empty so that we dont clobber someone's home directory or desktop by accident.
  248. # These files are exceptions to the is_empty check when we're trying to init a new dir,
  249. # as they could be from a previous archivebox version, system artifacts, dependencies, etc.
  250. ALLOWED_IN_OUTPUT_DIR = {
  251. '.gitignore',
  252. 'lost+found',
  253. '.DS_Store',
  254. '.venv',
  255. 'venv',
  256. 'virtualenv',
  257. '.virtualenv',
  258. 'node_modules',
  259. 'package.json',
  260. 'package-lock.json',
  261. 'yarn.lock',
  262. 'static',
  263. 'sonic',
  264. ARCHIVE_DIR_NAME,
  265. SOURCES_DIR_NAME,
  266. LOGS_DIR_NAME,
  267. SQL_INDEX_FILENAME,
  268. f'{SQL_INDEX_FILENAME}-wal',
  269. f'{SQL_INDEX_FILENAME}-shm',
  270. JSON_INDEX_FILENAME,
  271. HTML_INDEX_FILENAME,
  272. ROBOTS_TXT_FILENAME,
  273. FAVICON_FILENAME,
  274. CONFIG_FILENAME,
  275. f'{CONFIG_FILENAME}.bak',
  276. 'static_index.json',
  277. }
  278. ############################## Derived Config ##################################
  279. DYNAMIC_CONFIG_SCHEMA: ConfigDefaultDict = {
  280. 'TERM_WIDTH': {'default': lambda c: lambda: shutil.get_terminal_size((100, 10)).columns},
  281. 'USER': {'default': lambda c: SYSTEM_USER},
  282. 'ANSI': {'default': lambda c: DEFAULT_CLI_COLORS if c['USE_COLOR'] else {k: '' for k in DEFAULT_CLI_COLORS.keys()}},
  283. 'PACKAGE_DIR': {'default': lambda c: Path(__file__).resolve().parent},
  284. 'TEMPLATES_DIR': {'default': lambda c: c['PACKAGE_DIR'] / TEMPLATES_DIR_NAME},
  285. 'CUSTOM_TEMPLATES_DIR': {'default': lambda c: c['CUSTOM_TEMPLATES_DIR'] and Path(c['CUSTOM_TEMPLATES_DIR'])},
  286. 'OUTPUT_DIR': {'default': lambda c: Path(c['OUTPUT_DIR']).resolve() if c['OUTPUT_DIR'] else Path(os.curdir).resolve()},
  287. 'ARCHIVE_DIR': {'default': lambda c: c['OUTPUT_DIR'] / ARCHIVE_DIR_NAME},
  288. 'SOURCES_DIR': {'default': lambda c: c['OUTPUT_DIR'] / SOURCES_DIR_NAME},
  289. 'LOGS_DIR': {'default': lambda c: c['OUTPUT_DIR'] / LOGS_DIR_NAME},
  290. 'CONFIG_FILE': {'default': lambda c: Path(c['CONFIG_FILE']).resolve() if c['CONFIG_FILE'] else c['OUTPUT_DIR'] / CONFIG_FILENAME},
  291. 'COOKIES_FILE': {'default': lambda c: c['COOKIES_FILE'] and Path(c['COOKIES_FILE']).resolve()},
  292. 'CHROME_USER_DATA_DIR': {'default': lambda c: find_chrome_data_dir() if c['CHROME_USER_DATA_DIR'] is None else (Path(c['CHROME_USER_DATA_DIR']).resolve() if c['CHROME_USER_DATA_DIR'] else None)}, # None means unset, so we autodetect it with find_chrome_Data_dir(), but emptystring '' means user manually set it to '', and we should store it as None
  293. 'URL_BLACKLIST_PTN': {'default': lambda c: c['URL_BLACKLIST'] and re.compile(c['URL_BLACKLIST'] or '', re.IGNORECASE | re.UNICODE | re.MULTILINE)},
  294. 'URL_WHITELIST_PTN': {'default': lambda c: c['URL_WHITELIST'] and re.compile(c['URL_WHITELIST'] or '', re.IGNORECASE | re.UNICODE | re.MULTILINE)},
  295. 'DIR_OUTPUT_PERMISSIONS': {'default': lambda c: c['OUTPUT_PERMISSIONS'].replace('6', '7').replace('4', '5')},
  296. 'ARCHIVEBOX_BINARY': {'default': lambda c: sys.argv[0] or bin_path('archivebox')},
  297. 'VERSION': {'default': lambda c: json.loads((Path(c['PACKAGE_DIR']) / 'package.json').read_text(encoding='utf-8').strip())['version']},
  298. 'PYTHON_BINARY': {'default': lambda c: sys.executable},
  299. 'PYTHON_ENCODING': {'default': lambda c: sys.stdout.encoding.upper()},
  300. 'PYTHON_VERSION': {'default': lambda c: '{}.{}.{}'.format(*sys.version_info[:3])},
  301. 'DJANGO_BINARY': {'default': lambda c: django.__file__.replace('__init__.py', 'bin/django-admin.py')},
  302. 'DJANGO_VERSION': {'default': lambda c: '{}.{}.{} {} ({})'.format(*django.VERSION)},
  303. 'USE_CURL': {'default': lambda c: c['USE_CURL'] and (c['SAVE_FAVICON'] or c['SAVE_TITLE'] or c['SAVE_ARCHIVE_DOT_ORG'])},
  304. 'CURL_VERSION': {'default': lambda c: bin_version(c['CURL_BINARY']) if c['USE_CURL'] else None},
  305. 'CURL_USER_AGENT': {'default': lambda c: c['CURL_USER_AGENT'].format(**c)},
  306. 'CURL_ARGS': {'default': lambda c: c['CURL_ARGS'] or []},
  307. 'SAVE_FAVICON': {'default': lambda c: c['USE_CURL'] and c['SAVE_FAVICON']},
  308. 'SAVE_ARCHIVE_DOT_ORG': {'default': lambda c: c['USE_CURL'] and c['SAVE_ARCHIVE_DOT_ORG']},
  309. 'USE_WGET': {'default': lambda c: c['USE_WGET'] and (c['SAVE_WGET'] or c['SAVE_WARC'])},
  310. 'WGET_VERSION': {'default': lambda c: bin_version(c['WGET_BINARY']) if c['USE_WGET'] else None},
  311. 'WGET_AUTO_COMPRESSION': {'default': lambda c: wget_supports_compression(c) if c['USE_WGET'] else False},
  312. 'WGET_USER_AGENT': {'default': lambda c: c['WGET_USER_AGENT'].format(**c)},
  313. 'SAVE_WGET': {'default': lambda c: c['USE_WGET'] and c['SAVE_WGET']},
  314. 'SAVE_WARC': {'default': lambda c: c['USE_WGET'] and c['SAVE_WARC']},
  315. 'WGET_ARGS': {'default': lambda c: c['WGET_ARGS'] or []},
  316. 'RIPGREP_VERSION': {'default': lambda c: bin_version(c['RIPGREP_BINARY']) if c['USE_RIPGREP'] else None},
  317. 'USE_SINGLEFILE': {'default': lambda c: c['USE_SINGLEFILE'] and c['SAVE_SINGLEFILE']},
  318. 'SINGLEFILE_VERSION': {'default': lambda c: bin_version(c['SINGLEFILE_BINARY']) if c['USE_SINGLEFILE'] else None},
  319. 'USE_READABILITY': {'default': lambda c: c['USE_READABILITY'] and c['SAVE_READABILITY']},
  320. 'READABILITY_VERSION': {'default': lambda c: bin_version(c['READABILITY_BINARY']) if c['USE_READABILITY'] else None},
  321. 'USE_MERCURY': {'default': lambda c: c['USE_MERCURY'] and c['SAVE_MERCURY']},
  322. 'MERCURY_VERSION': {'default': lambda c: '1.0.0' if shutil.which(str(bin_path(c['MERCURY_BINARY']))) else None}, # mercury is unversioned
  323. 'USE_GIT': {'default': lambda c: c['USE_GIT'] and c['SAVE_GIT']},
  324. 'GIT_VERSION': {'default': lambda c: bin_version(c['GIT_BINARY']) if c['USE_GIT'] else None},
  325. 'SAVE_GIT': {'default': lambda c: c['USE_GIT'] and c['SAVE_GIT']},
  326. 'USE_YOUTUBEDL': {'default': lambda c: c['USE_YOUTUBEDL'] and c['SAVE_MEDIA']},
  327. 'YOUTUBEDL_VERSION': {'default': lambda c: bin_version(c['YOUTUBEDL_BINARY']) if c['USE_YOUTUBEDL'] else None},
  328. 'SAVE_MEDIA': {'default': lambda c: c['USE_YOUTUBEDL'] and c['SAVE_MEDIA']},
  329. 'YOUTUBEDL_ARGS': {'default': lambda c: c['YOUTUBEDL_ARGS'] or []},
  330. 'CHROME_BINARY': {'default': lambda c: c['CHROME_BINARY'] or find_chrome_binary()},
  331. 'USE_CHROME': {'default': lambda c: c['USE_CHROME'] and c['CHROME_BINARY'] and (c['SAVE_PDF'] or c['SAVE_SCREENSHOT'] or c['SAVE_DOM'] or c['SAVE_SINGLEFILE'])},
  332. 'CHROME_VERSION': {'default': lambda c: bin_version(c['CHROME_BINARY']) if c['USE_CHROME'] else None},
  333. 'SAVE_PDF': {'default': lambda c: c['USE_CHROME'] and c['SAVE_PDF']},
  334. 'SAVE_SCREENSHOT': {'default': lambda c: c['USE_CHROME'] and c['SAVE_SCREENSHOT']},
  335. 'SAVE_DOM': {'default': lambda c: c['USE_CHROME'] and c['SAVE_DOM']},
  336. 'SAVE_SINGLEFILE': {'default': lambda c: c['USE_CHROME'] and c['SAVE_SINGLEFILE'] and c['USE_NODE']},
  337. 'SAVE_READABILITY': {'default': lambda c: c['USE_READABILITY'] and c['USE_NODE']},
  338. 'SAVE_MERCURY': {'default': lambda c: c['USE_MERCURY'] and c['USE_NODE']},
  339. 'USE_NODE': {'default': lambda c: c['USE_NODE'] and (c['SAVE_READABILITY'] or c['SAVE_SINGLEFILE'] or c['SAVE_MERCURY'])},
  340. 'NODE_VERSION': {'default': lambda c: bin_version(c['NODE_BINARY']) if c['USE_NODE'] else None},
  341. 'DEPENDENCIES': {'default': lambda c: get_dependency_info(c)},
  342. 'CODE_LOCATIONS': {'default': lambda c: get_code_locations(c)},
  343. 'EXTERNAL_LOCATIONS': {'default': lambda c: get_external_locations(c)},
  344. 'DATA_LOCATIONS': {'default': lambda c: get_data_locations(c)},
  345. 'CHROME_OPTIONS': {'default': lambda c: get_chrome_info(c)},
  346. }
  347. ################################### Helpers ####################################
  348. def load_config_val(key: str,
  349. default: ConfigDefaultValue=None,
  350. type: Optional[Type]=None,
  351. aliases: Optional[Tuple[str, ...]]=None,
  352. config: Optional[ConfigDict]=None,
  353. env_vars: Optional[os._Environ]=None,
  354. config_file_vars: Optional[Dict[str, str]]=None) -> ConfigValue:
  355. """parse bool, int, and str key=value pairs from env"""
  356. config_keys_to_check = (key, *(aliases or ()))
  357. for key in config_keys_to_check:
  358. if env_vars:
  359. val = env_vars.get(key)
  360. if val:
  361. break
  362. if config_file_vars:
  363. val = config_file_vars.get(key)
  364. if val:
  365. break
  366. if type is None or val is None:
  367. if callable(default):
  368. assert isinstance(config, dict)
  369. return default(config)
  370. return default
  371. elif type is bool:
  372. if val.lower() in ('true', 'yes', '1'):
  373. return True
  374. elif val.lower() in ('false', 'no', '0'):
  375. return False
  376. else:
  377. raise ValueError(f'Invalid configuration option {key}={val} (expected a boolean: True/False)')
  378. elif type is str:
  379. if val.lower() in ('true', 'false', 'yes', 'no', '1', '0'):
  380. raise ValueError(f'Invalid configuration option {key}={val} (expected a string)')
  381. return val.strip()
  382. elif type is int:
  383. if not val.isdigit():
  384. raise ValueError(f'Invalid configuration option {key}={val} (expected an integer)')
  385. return int(val)
  386. elif type is list or type is dict:
  387. return json.loads(val)
  388. raise Exception('Config values can only be str, bool, int or json')
  389. def load_config_file(out_dir: str=None) -> Optional[Dict[str, str]]:
  390. """load the ini-formatted config file from OUTPUT_DIR/Archivebox.conf"""
  391. out_dir = out_dir or Path(os.getenv('OUTPUT_DIR', '.')).resolve()
  392. config_path = Path(out_dir) / CONFIG_FILENAME
  393. if config_path.exists():
  394. config_file = ConfigParser()
  395. config_file.optionxform = str
  396. config_file.read(config_path)
  397. # flatten into one namespace
  398. config_file_vars = {
  399. key.upper(): val
  400. for section, options in config_file.items()
  401. for key, val in options.items()
  402. }
  403. # print('[i] Loaded config file', os.path.abspath(config_path))
  404. # print(config_file_vars)
  405. return config_file_vars
  406. return None
  407. def write_config_file(config: Dict[str, str], out_dir: str=None) -> ConfigDict:
  408. """load the ini-formatted config file from OUTPUT_DIR/Archivebox.conf"""
  409. from .system import atomic_write
  410. CONFIG_HEADER = (
  411. """# This is the config file for your ArchiveBox collection.
  412. #
  413. # You can add options here manually in INI format, or automatically by running:
  414. # archivebox config --set KEY=VALUE
  415. #
  416. # If you modify this file manually, make sure to update your archive after by running:
  417. # archivebox init
  418. #
  419. # A list of all possible config with documentation and examples can be found here:
  420. # https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration
  421. """)
  422. out_dir = out_dir or Path(os.getenv('OUTPUT_DIR', '.')).resolve()
  423. config_path = Path(out_dir) / CONFIG_FILENAME
  424. if not config_path.exists():
  425. atomic_write(config_path, CONFIG_HEADER)
  426. config_file = ConfigParser()
  427. config_file.optionxform = str
  428. config_file.read(config_path)
  429. with open(config_path, 'r', encoding='utf-8') as old:
  430. atomic_write(f'{config_path}.bak', old.read())
  431. find_section = lambda key: [name for name, opts in CONFIG_SCHEMA.items() if key in opts][0]
  432. # Set up sections in empty config file
  433. for key, val in config.items():
  434. section = find_section(key)
  435. if section in config_file:
  436. existing_config = dict(config_file[section])
  437. else:
  438. existing_config = {}
  439. config_file[section] = {**existing_config, key: val}
  440. # always make sure there's a SECRET_KEY defined for Django
  441. existing_secret_key = None
  442. if 'SERVER_CONFIG' in config_file and 'SECRET_KEY' in config_file['SERVER_CONFIG']:
  443. existing_secret_key = config_file['SERVER_CONFIG']['SECRET_KEY']
  444. if (not existing_secret_key) or ('not a valid secret' in existing_secret_key):
  445. from django.utils.crypto import get_random_string
  446. chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
  447. random_secret_key = get_random_string(50, chars)
  448. if 'SERVER_CONFIG' in config_file:
  449. config_file['SERVER_CONFIG']['SECRET_KEY'] = random_secret_key
  450. else:
  451. config_file['SERVER_CONFIG'] = {'SECRET_KEY': random_secret_key}
  452. with open(config_path, 'w+', encoding='utf-8') as new:
  453. config_file.write(new)
  454. try:
  455. # validate the config by attempting to re-parse it
  456. CONFIG = load_all_config()
  457. except BaseException: # lgtm [py/catch-base-exception]
  458. # something went horribly wrong, rever to the previous version
  459. with open(f'{config_path}.bak', 'r', encoding='utf-8') as old:
  460. atomic_write(config_path, old.read())
  461. raise
  462. if Path(f'{config_path}.bak').exists():
  463. os.remove(f'{config_path}.bak')
  464. return {
  465. key.upper(): CONFIG.get(key.upper())
  466. for key in config.keys()
  467. }
  468. def load_config(defaults: ConfigDefaultDict,
  469. config: Optional[ConfigDict]=None,
  470. out_dir: Optional[str]=None,
  471. env_vars: Optional[os._Environ]=None,
  472. config_file_vars: Optional[Dict[str, str]]=None) -> ConfigDict:
  473. env_vars = env_vars or os.environ
  474. config_file_vars = config_file_vars or load_config_file(out_dir=out_dir)
  475. extended_config: ConfigDict = config.copy() if config else {}
  476. for key, default in defaults.items():
  477. try:
  478. extended_config[key] = load_config_val(
  479. key,
  480. default=default['default'],
  481. type=default.get('type'),
  482. aliases=default.get('aliases'),
  483. config=extended_config,
  484. env_vars=env_vars,
  485. config_file_vars=config_file_vars,
  486. )
  487. except KeyboardInterrupt:
  488. raise SystemExit(0)
  489. except Exception as e:
  490. stderr()
  491. stderr(f'[X] Error while loading configuration value: {key}', color='red', config=extended_config)
  492. stderr(' {}: {}'.format(e.__class__.__name__, e))
  493. stderr()
  494. stderr(' Check your config for mistakes and try again (your archive data is unaffected).')
  495. stderr()
  496. stderr(' For config documentation and examples see:')
  497. stderr(' https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration')
  498. stderr()
  499. # raise
  500. raise SystemExit(2)
  501. return extended_config
  502. # def write_config(config: ConfigDict):
  503. # with open(os.path.join(config['OUTPUT_DIR'], CONFIG_FILENAME), 'w+') as f:
  504. # Logging Helpers
  505. def stdout(*args, color: Optional[str]=None, prefix: str='', config: Optional[ConfigDict]=None) -> None:
  506. ansi = DEFAULT_CLI_COLORS if (config or {}).get('USE_COLOR') else ANSI
  507. if color:
  508. strs = [ansi[color], ' '.join(str(a) for a in args), ansi['reset'], '\n']
  509. else:
  510. strs = [' '.join(str(a) for a in args), '\n']
  511. sys.stdout.write(prefix + ''.join(strs))
  512. def stderr(*args, color: Optional[str]=None, prefix: str='', config: Optional[ConfigDict]=None) -> None:
  513. ansi = DEFAULT_CLI_COLORS if (config or {}).get('USE_COLOR') else ANSI
  514. if color:
  515. strs = [ansi[color], ' '.join(str(a) for a in args), ansi['reset'], '\n']
  516. else:
  517. strs = [' '.join(str(a) for a in args), '\n']
  518. sys.stderr.write(prefix + ''.join(strs))
  519. def hint(text: Union[Tuple[str, ...], List[str], str], prefix=' ', config: Optional[ConfigDict]=None) -> None:
  520. ansi = DEFAULT_CLI_COLORS if (config or {}).get('USE_COLOR') else ANSI
  521. if isinstance(text, str):
  522. stderr('{}{lightred}Hint:{reset} {}'.format(prefix, text, **ansi))
  523. else:
  524. stderr('{}{lightred}Hint:{reset} {}'.format(prefix, text[0], **ansi))
  525. for line in text[1:]:
  526. stderr('{} {}'.format(prefix, line))
  527. # Dependency Metadata Helpers
  528. def bin_version(binary: Optional[str]) -> Optional[str]:
  529. """check the presence and return valid version line of a specified binary"""
  530. abspath = bin_path(binary)
  531. if not binary or not abspath:
  532. return None
  533. try:
  534. version_str = run([abspath, "--version"], stdout=PIPE, env={'LANG': 'C'}).stdout.strip().decode()
  535. # take first 3 columns of first line of version info
  536. return ' '.join(version_str.split('\n')[0].strip().split()[:3])
  537. except OSError:
  538. pass
  539. # stderr(f'[X] Unable to find working version of dependency: {binary}', color='red')
  540. # stderr(' Make sure it\'s installed, then confirm it\'s working by running:')
  541. # stderr(f' {binary} --version')
  542. # stderr()
  543. # stderr(' If you don\'t want to install it, you can disable it via config. See here for more info:')
  544. # stderr(' https://github.com/ArchiveBox/ArchiveBox/wiki/Install')
  545. return None
  546. def bin_path(binary: Optional[str]) -> Optional[str]:
  547. if binary is None:
  548. return None
  549. node_modules_bin = Path('.') / 'node_modules' / '.bin' / binary
  550. if node_modules_bin.exists():
  551. return str(node_modules_bin.resolve())
  552. return shutil.which(str(Path(binary).expanduser())) or shutil.which(str(binary)) or binary
  553. def bin_hash(binary: Optional[str]) -> Optional[str]:
  554. if binary is None:
  555. return None
  556. abs_path = bin_path(binary)
  557. if abs_path is None or not Path(abs_path).exists():
  558. return None
  559. file_hash = md5()
  560. with io.open(abs_path, mode='rb') as f:
  561. for chunk in iter(lambda: f.read(io.DEFAULT_BUFFER_SIZE), b''):
  562. file_hash.update(chunk)
  563. return f'md5:{file_hash.hexdigest()}'
  564. def find_chrome_binary() -> Optional[str]:
  565. """find any installed chrome binaries in the default locations"""
  566. # Precedence: Chromium, Chrome, Beta, Canary, Unstable, Dev
  567. # make sure data dir finding precedence order always matches binary finding order
  568. default_executable_paths = (
  569. 'chromium-browser',
  570. 'chromium',
  571. '/Applications/Chromium.app/Contents/MacOS/Chromium',
  572. 'chrome',
  573. 'google-chrome',
  574. '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome',
  575. 'google-chrome-stable',
  576. 'google-chrome-beta',
  577. 'google-chrome-canary',
  578. '/Applications/Google Chrome Canary.app/Contents/MacOS/Google Chrome Canary',
  579. 'google-chrome-unstable',
  580. 'google-chrome-dev',
  581. )
  582. for name in default_executable_paths:
  583. full_path_exists = shutil.which(name)
  584. if full_path_exists:
  585. return name
  586. return None
  587. def find_chrome_data_dir() -> Optional[str]:
  588. """find any installed chrome user data directories in the default locations"""
  589. # Precedence: Chromium, Chrome, Beta, Canary, Unstable, Dev
  590. # make sure data dir finding precedence order always matches binary finding order
  591. default_profile_paths = (
  592. '~/.config/chromium',
  593. '~/Library/Application Support/Chromium',
  594. '~/AppData/Local/Chromium/User Data',
  595. '~/.config/chrome',
  596. '~/.config/google-chrome',
  597. '~/Library/Application Support/Google/Chrome',
  598. '~/AppData/Local/Google/Chrome/User Data',
  599. '~/.config/google-chrome-stable',
  600. '~/.config/google-chrome-beta',
  601. '~/Library/Application Support/Google/Chrome Canary',
  602. '~/AppData/Local/Google/Chrome SxS/User Data',
  603. '~/.config/google-chrome-unstable',
  604. '~/.config/google-chrome-dev',
  605. )
  606. for path in default_profile_paths:
  607. full_path = Path(path).resolve()
  608. if full_path.exists():
  609. return full_path
  610. return None
  611. def wget_supports_compression(config):
  612. try:
  613. cmd = [
  614. config['WGET_BINARY'],
  615. "--compression=auto",
  616. "--help",
  617. ]
  618. return not run(cmd, stdout=DEVNULL, stderr=DEVNULL).returncode
  619. except (FileNotFoundError, OSError):
  620. return False
  621. def get_code_locations(config: ConfigDict) -> SimpleConfigValueDict:
  622. return {
  623. 'PACKAGE_DIR': {
  624. 'path': (config['PACKAGE_DIR']).resolve(),
  625. 'enabled': True,
  626. 'is_valid': (config['PACKAGE_DIR'] / '__main__.py').exists(),
  627. },
  628. 'TEMPLATES_DIR': {
  629. 'path': (config['TEMPLATES_DIR']).resolve(),
  630. 'enabled': True,
  631. 'is_valid': (config['TEMPLATES_DIR'] / 'static').exists(),
  632. },
  633. 'CUSTOM_TEMPLATES_DIR': {
  634. 'path': config['CUSTOM_TEMPLATES_DIR'] and Path(config['CUSTOM_TEMPLATES_DIR']).resolve(),
  635. 'enabled': bool(config['CUSTOM_TEMPLATES_DIR']),
  636. 'is_valid': config['CUSTOM_TEMPLATES_DIR'] and Path(config['CUSTOM_TEMPLATES_DIR']).exists(),
  637. },
  638. # 'NODE_MODULES_DIR': {
  639. # 'path': ,
  640. # 'enabled': ,
  641. # 'is_valid': (...).exists(),
  642. # },
  643. }
  644. def get_external_locations(config: ConfigDict) -> ConfigValue:
  645. abspath = lambda path: None if path is None else Path(path).resolve()
  646. return {
  647. 'CHROME_USER_DATA_DIR': {
  648. 'path': abspath(config['CHROME_USER_DATA_DIR']),
  649. 'enabled': config['USE_CHROME'] and config['CHROME_USER_DATA_DIR'],
  650. 'is_valid': False if config['CHROME_USER_DATA_DIR'] is None else (Path(config['CHROME_USER_DATA_DIR']) / 'Default').exists(),
  651. },
  652. 'COOKIES_FILE': {
  653. 'path': abspath(config['COOKIES_FILE']),
  654. 'enabled': config['USE_WGET'] and config['COOKIES_FILE'],
  655. 'is_valid': False if config['COOKIES_FILE'] is None else Path(config['COOKIES_FILE']).exists(),
  656. },
  657. }
  658. def get_data_locations(config: ConfigDict) -> ConfigValue:
  659. return {
  660. 'OUTPUT_DIR': {
  661. 'path': config['OUTPUT_DIR'].resolve(),
  662. 'enabled': True,
  663. 'is_valid': (config['OUTPUT_DIR'] / SQL_INDEX_FILENAME).exists(),
  664. },
  665. 'SOURCES_DIR': {
  666. 'path': config['SOURCES_DIR'].resolve(),
  667. 'enabled': True,
  668. 'is_valid': config['SOURCES_DIR'].exists(),
  669. },
  670. 'LOGS_DIR': {
  671. 'path': config['LOGS_DIR'].resolve(),
  672. 'enabled': True,
  673. 'is_valid': config['LOGS_DIR'].exists(),
  674. },
  675. 'ARCHIVE_DIR': {
  676. 'path': config['ARCHIVE_DIR'].resolve(),
  677. 'enabled': True,
  678. 'is_valid': config['ARCHIVE_DIR'].exists(),
  679. },
  680. 'CONFIG_FILE': {
  681. 'path': config['CONFIG_FILE'].resolve(),
  682. 'enabled': True,
  683. 'is_valid': config['CONFIG_FILE'].exists(),
  684. },
  685. 'SQL_INDEX': {
  686. 'path': (config['OUTPUT_DIR'] / SQL_INDEX_FILENAME).resolve(),
  687. 'enabled': True,
  688. 'is_valid': (config['OUTPUT_DIR'] / SQL_INDEX_FILENAME).exists(),
  689. },
  690. }
  691. def get_dependency_info(config: ConfigDict) -> ConfigValue:
  692. return {
  693. 'ARCHIVEBOX_BINARY': {
  694. 'path': bin_path(config['ARCHIVEBOX_BINARY']),
  695. 'version': config['VERSION'],
  696. 'hash': bin_hash(config['ARCHIVEBOX_BINARY']),
  697. 'enabled': True,
  698. 'is_valid': True,
  699. },
  700. 'PYTHON_BINARY': {
  701. 'path': bin_path(config['PYTHON_BINARY']),
  702. 'version': config['PYTHON_VERSION'],
  703. 'hash': bin_hash(config['PYTHON_BINARY']),
  704. 'enabled': True,
  705. 'is_valid': bool(config['PYTHON_VERSION']),
  706. },
  707. 'DJANGO_BINARY': {
  708. 'path': bin_path(config['DJANGO_BINARY']),
  709. 'version': config['DJANGO_VERSION'],
  710. 'hash': bin_hash(config['DJANGO_BINARY']),
  711. 'enabled': True,
  712. 'is_valid': bool(config['DJANGO_VERSION']),
  713. },
  714. 'CURL_BINARY': {
  715. 'path': bin_path(config['CURL_BINARY']),
  716. 'version': config['CURL_VERSION'],
  717. 'hash': bin_hash(config['CURL_BINARY']),
  718. 'enabled': config['USE_CURL'],
  719. 'is_valid': bool(config['CURL_VERSION']),
  720. },
  721. 'WGET_BINARY': {
  722. 'path': bin_path(config['WGET_BINARY']),
  723. 'version': config['WGET_VERSION'],
  724. 'hash': bin_hash(config['WGET_BINARY']),
  725. 'enabled': config['USE_WGET'],
  726. 'is_valid': bool(config['WGET_VERSION']),
  727. },
  728. 'NODE_BINARY': {
  729. 'path': bin_path(config['NODE_BINARY']),
  730. 'version': config['NODE_VERSION'],
  731. 'hash': bin_hash(config['NODE_BINARY']),
  732. 'enabled': config['USE_NODE'],
  733. 'is_valid': bool(config['NODE_VERSION']),
  734. },
  735. 'SINGLEFILE_BINARY': {
  736. 'path': bin_path(config['SINGLEFILE_BINARY']),
  737. 'version': config['SINGLEFILE_VERSION'],
  738. 'hash': bin_hash(config['SINGLEFILE_BINARY']),
  739. 'enabled': config['USE_SINGLEFILE'],
  740. 'is_valid': bool(config['SINGLEFILE_VERSION']),
  741. },
  742. 'READABILITY_BINARY': {
  743. 'path': bin_path(config['READABILITY_BINARY']),
  744. 'version': config['READABILITY_VERSION'],
  745. 'hash': bin_hash(config['READABILITY_BINARY']),
  746. 'enabled': config['USE_READABILITY'],
  747. 'is_valid': bool(config['READABILITY_VERSION']),
  748. },
  749. 'MERCURY_BINARY': {
  750. 'path': bin_path(config['MERCURY_BINARY']),
  751. 'version': config['MERCURY_VERSION'],
  752. 'hash': bin_hash(config['MERCURY_BINARY']),
  753. 'enabled': config['USE_MERCURY'],
  754. 'is_valid': bool(config['MERCURY_VERSION']),
  755. },
  756. 'GIT_BINARY': {
  757. 'path': bin_path(config['GIT_BINARY']),
  758. 'version': config['GIT_VERSION'],
  759. 'hash': bin_hash(config['GIT_BINARY']),
  760. 'enabled': config['USE_GIT'],
  761. 'is_valid': bool(config['GIT_VERSION']),
  762. },
  763. 'YOUTUBEDL_BINARY': {
  764. 'path': bin_path(config['YOUTUBEDL_BINARY']),
  765. 'version': config['YOUTUBEDL_VERSION'],
  766. 'hash': bin_hash(config['YOUTUBEDL_BINARY']),
  767. 'enabled': config['USE_YOUTUBEDL'],
  768. 'is_valid': bool(config['YOUTUBEDL_VERSION']),
  769. },
  770. 'CHROME_BINARY': {
  771. 'path': bin_path(config['CHROME_BINARY']),
  772. 'version': config['CHROME_VERSION'],
  773. 'hash': bin_hash(config['CHROME_BINARY']),
  774. 'enabled': config['USE_CHROME'],
  775. 'is_valid': bool(config['CHROME_VERSION']),
  776. },
  777. 'RIPGREP_BINARY': {
  778. 'path': bin_path(config['RIPGREP_BINARY']),
  779. 'version': config['RIPGREP_VERSION'],
  780. 'hash': bin_hash(config['RIPGREP_BINARY']),
  781. 'enabled': config['USE_RIPGREP'],
  782. 'is_valid': bool(config['RIPGREP_VERSION']),
  783. },
  784. # TODO: add an entry for the sonic search backend?
  785. # 'SONIC_BINARY': {
  786. # 'path': bin_path(config['SONIC_BINARY']),
  787. # 'version': config['SONIC_VERSION'],
  788. # 'hash': bin_hash(config['SONIC_BINARY']),
  789. # 'enabled': config['USE_SONIC'],
  790. # 'is_valid': bool(config['SONIC_VERSION']),
  791. # },
  792. }
  793. def get_chrome_info(config: ConfigDict) -> ConfigValue:
  794. return {
  795. 'TIMEOUT': config['TIMEOUT'],
  796. 'RESOLUTION': config['RESOLUTION'],
  797. 'CHECK_SSL_VALIDITY': config['CHECK_SSL_VALIDITY'],
  798. 'CHROME_BINARY': bin_path(config['CHROME_BINARY']),
  799. 'CHROME_HEADLESS': config['CHROME_HEADLESS'],
  800. 'CHROME_SANDBOX': config['CHROME_SANDBOX'],
  801. 'CHROME_USER_AGENT': config['CHROME_USER_AGENT'],
  802. 'CHROME_USER_DATA_DIR': config['CHROME_USER_DATA_DIR'],
  803. }
  804. # ******************************************************************************
  805. # ******************************************************************************
  806. # ******************************** Load Config *********************************
  807. # ******* (compile the defaults, configs, and metadata all into CONFIG) ********
  808. # ******************************************************************************
  809. # ******************************************************************************
  810. def load_all_config():
  811. CONFIG: ConfigDict = {}
  812. for section_name, section_config in CONFIG_SCHEMA.items():
  813. CONFIG = load_config(section_config, CONFIG)
  814. return load_config(DYNAMIC_CONFIG_SCHEMA, CONFIG)
  815. # add all final config values in CONFIG to globals in this file
  816. CONFIG = load_all_config()
  817. globals().update(CONFIG)
  818. # this lets us do: from .config import DEBUG, MEDIA_TIMEOUT, ...
  819. # ******************************************************************************
  820. # ******************************************************************************
  821. # ******************************************************************************
  822. # ******************************************************************************
  823. # ******************************************************************************
  824. ########################### System Environment Setup ###########################
  825. # Set timezone to UTC and umask to OUTPUT_PERMISSIONS
  826. os.environ["TZ"] = 'UTC'
  827. os.umask(0o777 - int(DIR_OUTPUT_PERMISSIONS, base=8)) # noqa: F821
  828. # add ./node_modules/.bin to $PATH so we can use node scripts in extractors
  829. NODE_BIN_PATH = str((Path(CONFIG["OUTPUT_DIR"]).absolute() / 'node_modules' / '.bin'))
  830. sys.path.append(NODE_BIN_PATH)
  831. # disable stderr "you really shouldnt disable ssl" warnings with library config
  832. if not CONFIG['CHECK_SSL_VALIDITY']:
  833. import urllib3
  834. import requests
  835. requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)
  836. urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
  837. ########################### Config Validity Checkers ###########################
  838. def check_system_config(config: ConfigDict=CONFIG) -> None:
  839. ### Check system environment
  840. if config['USER'] == 'root':
  841. stderr('[!] ArchiveBox should never be run as root!', color='red')
  842. stderr(' For more information, see the security overview documentation:')
  843. stderr(' https://github.com/ArchiveBox/ArchiveBox/wiki/Security-Overview#do-not-run-as-root')
  844. raise SystemExit(2)
  845. ### Check Python environment
  846. if sys.version_info[:3] < (3, 6, 0):
  847. stderr(f'[X] Python version is not new enough: {config["PYTHON_VERSION"]} (>3.6 is required)', color='red')
  848. stderr(' See https://github.com/ArchiveBox/ArchiveBox/wiki/Troubleshooting#python for help upgrading your Python installation.')
  849. raise SystemExit(2)
  850. if int(CONFIG['DJANGO_VERSION'].split('.')[0]) < 3:
  851. stderr(f'[X] Django version is not new enough: {config["DJANGO_VERSION"]} (>3.0 is required)', color='red')
  852. stderr(' Upgrade django using pip or your system package manager: pip3 install --upgrade django')
  853. raise SystemExit(2)
  854. if config['PYTHON_ENCODING'] not in ('UTF-8', 'UTF8'):
  855. stderr(f'[X] Your system is running python3 scripts with a bad locale setting: {config["PYTHON_ENCODING"]} (it should be UTF-8).', color='red')
  856. stderr(' To fix it, add the line "export PYTHONIOENCODING=UTF-8" to your ~/.bashrc file (without quotes)')
  857. stderr(' Or if you\'re using ubuntu/debian, run "dpkg-reconfigure locales"')
  858. stderr('')
  859. stderr(' Confirm that it\'s fixed by opening a new shell and running:')
  860. stderr(' python3 -c "import sys; print(sys.stdout.encoding)" # should output UTF-8')
  861. raise SystemExit(2)
  862. # stderr('[i] Using Chrome binary: {}'.format(shutil.which(CHROME_BINARY) or CHROME_BINARY))
  863. # stderr('[i] Using Chrome data dir: {}'.format(os.path.abspath(CHROME_USER_DATA_DIR)))
  864. if config['CHROME_USER_DATA_DIR'] is not None:
  865. if not (Path(config['CHROME_USER_DATA_DIR']) / 'Default').exists():
  866. stderr('[X] Could not find profile "Default" in CHROME_USER_DATA_DIR.', color='red')
  867. stderr(f' {config["CHROME_USER_DATA_DIR"]}')
  868. stderr(' Make sure you set it to a Chrome user data directory containing a Default profile folder.')
  869. stderr(' For more info see:')
  870. stderr(' https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration#CHROME_USER_DATA_DIR')
  871. if '/Default' in str(config['CHROME_USER_DATA_DIR']):
  872. stderr()
  873. stderr(' Try removing /Default from the end e.g.:')
  874. stderr(' CHROME_USER_DATA_DIR="{}"'.format(config['CHROME_USER_DATA_DIR'].split('/Default')[0]))
  875. raise SystemExit(2)
  876. def check_dependencies(config: ConfigDict=CONFIG, show_help: bool=True) -> None:
  877. invalid_dependencies = [
  878. (name, info) for name, info in config['DEPENDENCIES'].items()
  879. if info['enabled'] and not info['is_valid']
  880. ]
  881. if invalid_dependencies and show_help:
  882. stderr(f'[!] Warning: Missing {len(invalid_dependencies)} recommended dependencies', color='lightyellow')
  883. for dependency, info in invalid_dependencies:
  884. stderr(
  885. ' ! {}: {} ({})'.format(
  886. dependency,
  887. info['path'] or 'unable to find binary',
  888. info['version'] or 'unable to detect version',
  889. )
  890. )
  891. if dependency in ('YOUTUBEDL_BINARY', 'CHROME_BINARY', 'SINGLEFILE_BINARY', 'READABILITY_BINARY', 'MERCURY_BINARY'):
  892. hint(('To install all packages automatically run: archivebox setup',
  893. f'or to disable it and silence this warning: archivebox config --set SAVE_{dependency.rsplit("_", 1)[0]}=False',
  894. ''), prefix=' ')
  895. stderr('')
  896. if config['TIMEOUT'] < 5:
  897. stderr(f'[!] Warning: TIMEOUT is set too low! (currently set to TIMEOUT={config["TIMEOUT"]} seconds)', color='red')
  898. stderr(' You must allow *at least* 5 seconds for indexing and archive methods to run succesfully.')
  899. stderr(' (Setting it to somewhere between 30 and 3000 seconds is recommended)')
  900. stderr()
  901. stderr(' If you want to make ArchiveBox run faster, disable specific archive methods instead:')
  902. stderr(' https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration#archive-method-toggles')
  903. stderr()
  904. elif config['USE_CHROME'] and config['TIMEOUT'] < 15:
  905. stderr(f'[!] Warning: TIMEOUT is set too low! (currently set to TIMEOUT={config["TIMEOUT"]} seconds)', color='red')
  906. stderr(' Chrome will fail to archive all sites if set to less than ~15 seconds.')
  907. stderr(' (Setting it to somewhere between 30 and 300 seconds is recommended)')
  908. stderr()
  909. stderr(' If you want to make ArchiveBox run faster, disable specific archive methods instead:')
  910. stderr(' https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration#archive-method-toggles')
  911. stderr()
  912. if config['USE_YOUTUBEDL'] and config['MEDIA_TIMEOUT'] < 20:
  913. stderr(f'[!] Warning: MEDIA_TIMEOUT is set too low! (currently set to MEDIA_TIMEOUT={config["MEDIA_TIMEOUT"]} seconds)', color='red')
  914. stderr(' Youtube-dl will fail to archive all media if set to less than ~20 seconds.')
  915. stderr(' (Setting it somewhere over 60 seconds is recommended)')
  916. stderr()
  917. stderr(' If you want to disable media archiving entirely, set SAVE_MEDIA=False instead:')
  918. stderr(' https://github.com/ArchiveBox/ArchiveBox/wiki/Configuration#save_media')
  919. stderr()
  920. def check_data_folder(out_dir: Union[str, Path, None]=None, config: ConfigDict=CONFIG) -> None:
  921. output_dir = out_dir or config['OUTPUT_DIR']
  922. assert isinstance(output_dir, (str, Path))
  923. archive_dir_exists = (Path(output_dir) / ARCHIVE_DIR_NAME).exists()
  924. if not archive_dir_exists:
  925. stderr('[X] No archivebox index found in the current directory.', color='red')
  926. stderr(f' {output_dir}', color='lightyellow')
  927. stderr()
  928. stderr(' {lightred}Hint{reset}: Are you running archivebox in the right folder?'.format(**config['ANSI']))
  929. stderr(' cd path/to/your/archive/folder')
  930. stderr(' archivebox [command]')
  931. stderr()
  932. stderr(' {lightred}Hint{reset}: To create a new archive collection or import existing data in this folder, run:'.format(**config['ANSI']))
  933. stderr(' archivebox init')
  934. raise SystemExit(2)
  935. def check_migrations(out_dir: Union[str, Path, None]=None, config: ConfigDict=CONFIG):
  936. output_dir = out_dir or config['OUTPUT_DIR']
  937. from .index.sql import list_migrations
  938. pending_migrations = [name for status, name in list_migrations() if not status]
  939. if pending_migrations:
  940. stderr('[X] This collection was created with an older version of ArchiveBox and must be upgraded first.', color='lightyellow')
  941. stderr(f' {output_dir}')
  942. stderr()
  943. stderr(f' To upgrade it to the latest version and apply the {len(pending_migrations)} pending migrations, run:')
  944. stderr(' archivebox init')
  945. raise SystemExit(3)
  946. (Path(output_dir) / SOURCES_DIR_NAME).mkdir(exist_ok=True)
  947. (Path(output_dir) / LOGS_DIR_NAME).mkdir(exist_ok=True)
  948. def setup_django(out_dir: Path=None, check_db=False, config: ConfigDict=CONFIG, in_memory_db=False) -> None:
  949. check_system_config()
  950. output_dir = out_dir or Path(config['OUTPUT_DIR'])
  951. assert isinstance(output_dir, Path) and isinstance(config['PACKAGE_DIR'], Path)
  952. try:
  953. from django.core.management import call_command
  954. sys.path.append(str(config['PACKAGE_DIR']))
  955. os.environ.setdefault('OUTPUT_DIR', str(output_dir))
  956. assert (config['PACKAGE_DIR'] / 'core' / 'settings.py').exists(), 'settings.py was not found at archivebox/core/settings.py'
  957. os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'core.settings')
  958. # Check to make sure JSON extension is available in our Sqlite3 instance
  959. try:
  960. cursor = sqlite3.connect(':memory:').cursor()
  961. cursor.execute('SELECT JSON(\'{"a": "b"}\')')
  962. except sqlite3.OperationalError as exc:
  963. stderr(f'[X] Your SQLite3 version is missing the required JSON1 extension: {exc}', color='red')
  964. hint([
  965. 'Upgrade your Python version or install the extension manually:',
  966. 'https://code.djangoproject.com/wiki/JSON1Extension'
  967. ])
  968. if in_memory_db:
  969. # some commands (e.g. oneshot) dont store a long-lived sqlite3 db file on disk.
  970. # in those cases we create a temporary in-memory db and run the migrations
  971. # immediately to get a usable in-memory-database at startup
  972. os.environ.setdefault("ARCHIVEBOX_DATABASE_NAME", ":memory:")
  973. django.setup()
  974. call_command("migrate", interactive=False, verbosity=0)
  975. else:
  976. # Otherwise use default sqlite3 file-based database and initialize django
  977. # without running migrations automatically (user runs them manually by calling init)
  978. django.setup()
  979. from django.conf import settings
  980. # log startup message to the error log
  981. with open(settings.ERROR_LOG, "a", encoding='utf-8') as f:
  982. command = ' '.join(sys.argv)
  983. ts = datetime.now(timezone.utc).strftime('%Y-%m-%d__%H:%M:%S')
  984. f.write(f"\n> {command}; ts={ts} version={config['VERSION']} docker={config['IN_DOCKER']} is_tty={config['IS_TTY']}\n")
  985. if check_db:
  986. # Enable WAL mode in sqlite3
  987. from django.db import connection
  988. with connection.cursor() as cursor:
  989. current_mode = cursor.execute("PRAGMA journal_mode")
  990. if current_mode != 'wal':
  991. cursor.execute("PRAGMA journal_mode=wal;")
  992. # Create cache table in DB if needed
  993. try:
  994. from django.core.cache import cache
  995. cache.get('test', None)
  996. except django.db.utils.OperationalError:
  997. call_command("createcachetable", verbosity=0)
  998. # if archivebox gets imported multiple times, we have to close
  999. # the sqlite3 whenever we init from scratch to avoid multiple threads
  1000. # sharing the same connection by accident
  1001. from django.db import connections
  1002. for conn in connections.all():
  1003. conn.close_if_unusable_or_obsolete()
  1004. sql_index_path = Path(output_dir) / SQL_INDEX_FILENAME
  1005. assert sql_index_path.exists(), (
  1006. f'No database file {SQL_INDEX_FILENAME} found in: {config["OUTPUT_DIR"]} (Are you in an ArchiveBox collection directory?)')
  1007. except KeyboardInterrupt:
  1008. raise SystemExit(2)