logging_util.py 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575
  1. __package__ = 'archivebox'
  2. import re
  3. import os
  4. import sys
  5. import time
  6. import argparse
  7. from multiprocessing import Process
  8. from datetime import datetime
  9. from dataclasses import dataclass
  10. from typing import Optional, List, Dict, Union, IO, TYPE_CHECKING
  11. if TYPE_CHECKING:
  12. from .index.schema import Link, ArchiveResult
  13. from .index.json import MAIN_INDEX_HEADER
  14. from .util import enforce_types
  15. from .config import (
  16. ConfigDict,
  17. PYTHON_ENCODING,
  18. ANSI,
  19. IS_TTY,
  20. TERM_WIDTH,
  21. OUTPUT_DIR,
  22. SOURCES_DIR_NAME,
  23. HTML_INDEX_FILENAME,
  24. stderr,
  25. )
  26. @dataclass
  27. class RuntimeStats:
  28. """mutable stats counter for logging archiving timing info to CLI output"""
  29. skipped: int = 0
  30. succeeded: int = 0
  31. failed: int = 0
  32. parse_start_ts: Optional[datetime] = None
  33. parse_end_ts: Optional[datetime] = None
  34. index_start_ts: Optional[datetime] = None
  35. index_end_ts: Optional[datetime] = None
  36. archiving_start_ts: Optional[datetime] = None
  37. archiving_end_ts: Optional[datetime] = None
  38. # globals are bad, mmkay
  39. _LAST_RUN_STATS = RuntimeStats()
  40. class SmartFormatter(argparse.HelpFormatter):
  41. """Patched formatter that prints newlines in argparse help strings"""
  42. def _split_lines(self, text, width):
  43. if '\n' in text:
  44. return text.splitlines()
  45. return argparse.HelpFormatter._split_lines(self, text, width)
  46. def reject_stdin(caller: str, stdin: Optional[IO]=sys.stdin) -> None:
  47. """Tell the user they passed stdin to a command that doesn't accept it"""
  48. if stdin and not stdin.isatty():
  49. stdin_raw_text = stdin.read().strip()
  50. if stdin_raw_text:
  51. stderr(f'[X] The "{caller}" command does not accept stdin.', color='red')
  52. stderr(f' Run archivebox "{caller} --help" to see usage and examples.')
  53. stderr()
  54. raise SystemExit(1)
  55. def accept_stdin(stdin: Optional[IO]=sys.stdin) -> Optional[str]:
  56. """accept any standard input and return it as a string or None"""
  57. if not stdin:
  58. return None
  59. elif stdin and not stdin.isatty():
  60. stdin_str = stdin.read().strip()
  61. return stdin_str or None
  62. return None
  63. class TimedProgress:
  64. """Show a progress bar and measure elapsed time until .end() is called"""
  65. def __init__(self, seconds, prefix=''):
  66. from .config import SHOW_PROGRESS
  67. self.SHOW_PROGRESS = SHOW_PROGRESS
  68. if self.SHOW_PROGRESS:
  69. self.p = Process(target=progress_bar, args=(seconds, prefix))
  70. self.p.start()
  71. self.stats = {'start_ts': datetime.now(), 'end_ts': None}
  72. def end(self):
  73. """immediately end progress, clear the progressbar line, and save end_ts"""
  74. end_ts = datetime.now()
  75. self.stats['end_ts'] = end_ts
  76. if self.SHOW_PROGRESS:
  77. # terminate if we havent already terminated
  78. try:
  79. self.p.terminate()
  80. self.p.join()
  81. self.p.close()
  82. # clear whole terminal line
  83. try:
  84. sys.stdout.write('\r{}{}\r'.format((' ' * TERM_WIDTH()), ANSI['reset']))
  85. except (IOError, BrokenPipeError):
  86. # ignore when the parent proc has stopped listening to our stdout
  87. pass
  88. except ValueError:
  89. pass
  90. @enforce_types
  91. def progress_bar(seconds: int, prefix: str='') -> None:
  92. """show timer in the form of progress bar, with percentage and seconds remaining"""
  93. chunk = '█' if PYTHON_ENCODING == 'UTF-8' else '#'
  94. last_width = TERM_WIDTH()
  95. chunks = last_width - len(prefix) - 20 # number of progress chunks to show (aka max bar width)
  96. try:
  97. for s in range(seconds * chunks):
  98. max_width = TERM_WIDTH()
  99. if max_width < last_width:
  100. # when the terminal size is shrunk, we have to write a newline
  101. # otherwise the progress bar will keep wrapping incorrectly
  102. sys.stdout.write('\r\n')
  103. sys.stdout.flush()
  104. chunks = max_width - len(prefix) - 20
  105. progress = s / chunks / seconds * 100
  106. bar_width = round(progress/(100/chunks))
  107. last_width = max_width
  108. # ████████████████████ 0.9% (1/60sec)
  109. sys.stdout.write('\r{0}{1}{2}{3} {4}% ({5}/{6}sec)'.format(
  110. prefix,
  111. ANSI['green'],
  112. (chunk * bar_width).ljust(chunks),
  113. ANSI['reset'],
  114. round(progress, 1),
  115. round(s/chunks),
  116. seconds,
  117. ))
  118. sys.stdout.flush()
  119. time.sleep(1 / chunks)
  120. # ██████████████████████████████████ 100.0% (60/60sec)
  121. sys.stdout.write('\r{0}{1}{2}{3} {4}% ({5}/{6}sec)\n'.format(
  122. prefix,
  123. ANSI['red'],
  124. chunk * chunks,
  125. ANSI['reset'],
  126. 100.0,
  127. seconds,
  128. seconds,
  129. ))
  130. sys.stdout.flush()
  131. except (KeyboardInterrupt, BrokenPipeError):
  132. print()
  133. pass
  134. def log_cli_command(subcommand: str, subcommand_args: List[str], stdin: Optional[str], pwd: str):
  135. from .config import VERSION, ANSI
  136. cmd = ' '.join(('archivebox', subcommand, *subcommand_args))
  137. stderr('{black}[i] [{now}] ArchiveBox v{VERSION}: {cmd}{reset}'.format(
  138. now=datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
  139. VERSION=VERSION,
  140. cmd=cmd,
  141. **ANSI,
  142. ))
  143. stderr('{black} > {pwd}{reset}'.format(pwd=pwd, **ANSI))
  144. stderr()
  145. ### Parsing Stage
  146. def log_importing_started(urls: Union[str, List[str]], depth: int, index_only: bool):
  147. _LAST_RUN_STATS.parse_start_ts = datetime.now()
  148. print('{green}[+] [{}] Adding {} links to index (crawl depth={}){}...{reset}'.format(
  149. _LAST_RUN_STATS.parse_start_ts.strftime('%Y-%m-%d %H:%M:%S'),
  150. len(urls) if isinstance(urls, list) else len(urls.split('\n')),
  151. depth,
  152. ' (index only)' if index_only else '',
  153. **ANSI,
  154. ))
  155. def log_source_saved(source_file: str):
  156. print(' > Saved verbatim input to {}/{}'.format(SOURCES_DIR_NAME, source_file.rsplit('/', 1)[-1]))
  157. def log_parsing_finished(num_parsed: int, parser_name: str):
  158. _LAST_RUN_STATS.parse_end_ts = datetime.now()
  159. print(' > Parsed {} URLs from input ({})'.format(num_parsed, parser_name))
  160. def log_deduping_finished(num_new_links: int):
  161. print(' > Found {} new URLs not already in index'.format(num_new_links))
  162. def log_crawl_started(new_links):
  163. print()
  164. print('{green}[*] Starting crawl of {} sites 1 hop out from starting point{reset}'.format(len(new_links), **ANSI))
  165. ### Indexing Stage
  166. def log_indexing_process_started(num_links: int):
  167. start_ts = datetime.now()
  168. _LAST_RUN_STATS.index_start_ts = start_ts
  169. print()
  170. print('{black}[*] [{}] Writing {} links to main index...{reset}'.format(
  171. start_ts.strftime('%Y-%m-%d %H:%M:%S'),
  172. num_links,
  173. **ANSI,
  174. ))
  175. def log_indexing_process_finished():
  176. end_ts = datetime.now()
  177. _LAST_RUN_STATS.index_end_ts = end_ts
  178. def log_indexing_started(out_path: str):
  179. if IS_TTY:
  180. sys.stdout.write(f' > {out_path}')
  181. def log_indexing_finished(out_path: str):
  182. print(f'\r √ {out_path}')
  183. ### Archiving Stage
  184. def log_archiving_started(num_links: int, resume: Optional[float]=None):
  185. start_ts = datetime.now()
  186. _LAST_RUN_STATS.archiving_start_ts = start_ts
  187. print()
  188. if resume:
  189. print('{green}[▶] [{}] Resuming archive updating for {} pages starting from {}...{reset}'.format(
  190. start_ts.strftime('%Y-%m-%d %H:%M:%S'),
  191. num_links,
  192. resume,
  193. **ANSI,
  194. ))
  195. else:
  196. print('{green}[▶] [{}] Collecting content for {} Snapshots in archive...{reset}'.format(
  197. start_ts.strftime('%Y-%m-%d %H:%M:%S'),
  198. num_links,
  199. **ANSI,
  200. ))
  201. def log_archiving_paused(num_links: int, idx: int, timestamp: str):
  202. end_ts = datetime.now()
  203. _LAST_RUN_STATS.archiving_end_ts = end_ts
  204. print()
  205. print('\n{lightyellow}[X] [{now}] Downloading paused on link {timestamp} ({idx}/{total}){reset}'.format(
  206. **ANSI,
  207. now=end_ts.strftime('%Y-%m-%d %H:%M:%S'),
  208. idx=idx+1,
  209. timestamp=timestamp,
  210. total=num_links,
  211. ))
  212. print()
  213. print(' {lightred}Hint:{reset} To view your archive index, open:'.format(**ANSI))
  214. print(' {}/{}'.format(OUTPUT_DIR, HTML_INDEX_FILENAME))
  215. print(' Continue archiving where you left off by running:')
  216. print(' archivebox update --resume={}'.format(timestamp))
  217. def log_archiving_finished(num_links: int):
  218. end_ts = datetime.now()
  219. _LAST_RUN_STATS.archiving_end_ts = end_ts
  220. assert _LAST_RUN_STATS.archiving_start_ts is not None
  221. seconds = end_ts.timestamp() - _LAST_RUN_STATS.archiving_start_ts.timestamp()
  222. if seconds > 60:
  223. duration = '{0:.2f} min'.format(seconds / 60)
  224. else:
  225. duration = '{0:.2f} sec'.format(seconds)
  226. print()
  227. print('{}[√] [{}] Update of {} pages complete ({}){}'.format(
  228. ANSI['green'],
  229. end_ts.strftime('%Y-%m-%d %H:%M:%S'),
  230. num_links,
  231. duration,
  232. ANSI['reset'],
  233. ))
  234. print(' - {} links skipped'.format(_LAST_RUN_STATS.skipped))
  235. print(' - {} links updated'.format(_LAST_RUN_STATS.succeeded + _LAST_RUN_STATS.failed))
  236. print(' - {} links had errors'.format(_LAST_RUN_STATS.failed))
  237. print()
  238. print(' {lightred}Hint:{reset} To view your archive index, open:'.format(**ANSI))
  239. print(' {}/{}'.format(OUTPUT_DIR, HTML_INDEX_FILENAME))
  240. print(' Or run the built-in webserver:')
  241. print(' archivebox server')
  242. def log_link_archiving_started(link: "Link", link_dir: str, is_new: bool):
  243. # [*] [2019-03-22 13:46:45] "Log Structured Merge Trees - ben stopford"
  244. # http://www.benstopford.com/2015/02/14/log-structured-merge-trees/
  245. # > output/archive/1478739709
  246. print('\n[{symbol_color}{symbol}{reset}] [{symbol_color}{now}{reset}] "{title}"'.format(
  247. symbol_color=ANSI['green' if is_new else 'black'],
  248. symbol='+' if is_new else '√',
  249. now=datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
  250. title=link.title or link.base_url,
  251. **ANSI,
  252. ))
  253. print(' {blue}{url}{reset}'.format(url=link.url, **ANSI))
  254. print(' {} {}'.format(
  255. '>' if is_new else '√',
  256. pretty_path(link_dir),
  257. ))
  258. def log_link_archiving_finished(link: "Link", link_dir: str, is_new: bool, stats: dict):
  259. total = sum(stats.values())
  260. if stats['failed'] > 0 :
  261. _LAST_RUN_STATS.failed += 1
  262. elif stats['skipped'] == total:
  263. _LAST_RUN_STATS.skipped += 1
  264. else:
  265. _LAST_RUN_STATS.succeeded += 1
  266. def log_archive_method_started(method: str):
  267. print(' > {}'.format(method))
  268. def log_archive_method_finished(result: "ArchiveResult"):
  269. """quote the argument with whitespace in a command so the user can
  270. copy-paste the outputted string directly to run the cmd
  271. """
  272. # Prettify CMD string and make it safe to copy-paste by quoting arguments
  273. quoted_cmd = ' '.join(
  274. '"{}"'.format(arg) if ' ' in arg else arg
  275. for arg in result.cmd
  276. )
  277. if result.status == 'failed':
  278. # Prettify error output hints string and limit to five lines
  279. hints = getattr(result.output, 'hints', None) or ()
  280. if hints:
  281. hints = hints if isinstance(hints, (list, tuple)) else hints.split('\n')
  282. hints = (
  283. ' {}{}{}'.format(ANSI['lightyellow'], line.strip(), ANSI['reset'])
  284. for line in hints[:5] if line.strip()
  285. )
  286. # Collect and prefix output lines with indentation
  287. output_lines = [
  288. '{lightyellow}Extractor failed:{reset}'.format(**ANSI),
  289. ' {reset}{} {red}{}{reset}'.format(
  290. result.output.__class__.__name__.replace('ArchiveError', ''),
  291. result.output,
  292. **ANSI,
  293. ),
  294. *hints,
  295. '{}Run to see full output:{}'.format(ANSI['lightred'], ANSI['reset']),
  296. *([' cd {};'.format(result.pwd)] if result.pwd else []),
  297. ' {}'.format(quoted_cmd),
  298. ]
  299. print('\n'.join(
  300. ' {}'.format(line)
  301. for line in output_lines
  302. if line
  303. ))
  304. print()
  305. def log_list_started(filter_patterns: Optional[List[str]], filter_type: str):
  306. print('{green}[*] Finding links in the archive index matching these {} patterns:{reset}'.format(
  307. filter_type,
  308. **ANSI,
  309. ))
  310. print(' {}'.format(' '.join(filter_patterns or ())))
  311. def log_list_finished(links):
  312. from .index.csv import links_to_csv
  313. print()
  314. print('---------------------------------------------------------------------------------------------------')
  315. print(links_to_csv(links, cols=['timestamp', 'is_archived', 'num_outputs', 'url'], header=True, ljust=16, separator=' | '))
  316. print('---------------------------------------------------------------------------------------------------')
  317. print()
  318. def log_removal_started(links: List["Link"], yes: bool, delete: bool):
  319. print('{lightyellow}[i] Found {} matching URLs to remove.{reset}'.format(len(links), **ANSI))
  320. if delete:
  321. file_counts = [link.num_outputs for link in links if os.path.exists(link.link_dir)]
  322. print(
  323. f' {len(links)} Links will be de-listed from the main index, and their archived content folders will be deleted from disk.\n'
  324. f' ({len(file_counts)} data folders with {sum(file_counts)} archived files will be deleted!)'
  325. )
  326. else:
  327. print(
  328. ' Matching links will be de-listed from the main index, but their archived content folders will remain in place on disk.\n'
  329. ' (Pass --delete if you also want to permanently delete the data folders)'
  330. )
  331. if not yes:
  332. print()
  333. print('{lightyellow}[?] Do you want to proceed with removing these {} links?{reset}'.format(len(links), **ANSI))
  334. try:
  335. assert input(' y/[n]: ').lower() == 'y'
  336. except (KeyboardInterrupt, EOFError, AssertionError):
  337. raise SystemExit(0)
  338. def log_removal_finished(all_links: int, to_keep: int):
  339. if all_links == 0:
  340. print()
  341. print('{red}[X] No matching links found.{reset}'.format(**ANSI))
  342. else:
  343. num_removed = all_links - to_keep
  344. print()
  345. print('{red}[√] Removed {} out of {} links from the archive index.{reset}'.format(
  346. num_removed,
  347. all_links,
  348. **ANSI,
  349. ))
  350. print(' Index now contains {} links.'.format(to_keep))
  351. def log_shell_welcome_msg():
  352. from .cli import list_subcommands
  353. print('{green}# ArchiveBox Imports{reset}'.format(**ANSI))
  354. print('{green}from archivebox.core.models import Snapshot, User{reset}'.format(**ANSI))
  355. print('{green}from archivebox import *\n {}{reset}'.format("\n ".join(list_subcommands().keys()), **ANSI))
  356. print()
  357. print('[i] Welcome to the ArchiveBox Shell!')
  358. print(' https://github.com/pirate/ArchiveBox/wiki/Usage#Shell-Usage')
  359. print()
  360. print(' {lightred}Hint:{reset} Example use:'.format(**ANSI))
  361. print(' print(Snapshot.objects.filter(is_archived=True).count())')
  362. print(' Snapshot.objects.get(url="https://example.com").as_json()')
  363. print(' add("https://example.com/some/new/url")')
  364. ### Helpers
  365. @enforce_types
  366. def pretty_path(path: str) -> str:
  367. """convert paths like .../ArchiveBox/archivebox/../output/abc into output/abc"""
  368. pwd = os.path.abspath('.')
  369. # parent = os.path.abspath(os.path.join(pwd, os.path.pardir))
  370. return path.replace(pwd + '/', './')
  371. @enforce_types
  372. def printable_filesize(num_bytes: Union[int, float]) -> str:
  373. for count in ['Bytes','KB','MB','GB']:
  374. if num_bytes > -1024.0 and num_bytes < 1024.0:
  375. return '%3.1f %s' % (num_bytes, count)
  376. num_bytes /= 1024.0
  377. return '%3.1f %s' % (num_bytes, 'TB')
  378. @enforce_types
  379. def printable_folders(folders: Dict[str, Optional["Link"]],
  380. json: bool=False,
  381. html: bool=False,
  382. csv: Optional[str]=None,
  383. index: bool=False) -> str:
  384. links = folders.values()
  385. if json:
  386. from .index.json import to_json
  387. if index:
  388. output = {
  389. **MAIN_INDEX_HEADER,
  390. 'num_links': len(links),
  391. 'updated': datetime.now(),
  392. 'last_run_cmd': sys.argv,
  393. 'links': links,
  394. }
  395. else:
  396. output = links
  397. return to_json(output, indent=4, sort_keys=True)
  398. elif html:
  399. from .index.html import main_index_template
  400. if index:
  401. output = main_index_template(links, True)
  402. else:
  403. from .index.html import MINIMAL_INDEX_TEMPLATE
  404. output = main_index_template(links, True, MINIMAL_INDEX_TEMPLATE)
  405. return output
  406. elif csv:
  407. from .index.csv import links_to_csv
  408. return links_to_csv(folders.values(), cols=csv.split(','), header=True)
  409. return '\n'.join(
  410. f'{folder} {link and link.url} "{link and link.title}"'
  411. for folder, link in folders.items()
  412. )
  413. @enforce_types
  414. def printable_config(config: ConfigDict, prefix: str='') -> str:
  415. return f'\n{prefix}'.join(
  416. f'{key}={val}'
  417. for key, val in config.items()
  418. if not (isinstance(val, dict) or callable(val))
  419. )
  420. @enforce_types
  421. def printable_folder_status(name: str, folder: Dict) -> str:
  422. if folder['enabled']:
  423. if folder['is_valid']:
  424. color, symbol, note = 'green', '√', 'valid'
  425. else:
  426. color, symbol, note, num_files = 'red', 'X', 'invalid', '?'
  427. else:
  428. color, symbol, note, num_files = 'lightyellow', '-', 'disabled', '-'
  429. if folder['path']:
  430. if os.path.exists(folder['path']):
  431. num_files = (
  432. f'{len(os.listdir(folder["path"]))} files'
  433. if os.path.isdir(folder['path']) else
  434. printable_filesize(os.path.getsize(folder['path']))
  435. )
  436. else:
  437. num_files = 'missing'
  438. if ' ' in folder['path']:
  439. folder['path'] = f'"{folder["path"]}"'
  440. return ' '.join((
  441. ANSI[color],
  442. symbol,
  443. ANSI['reset'],
  444. name.ljust(22),
  445. (folder["path"] or '').ljust(76),
  446. num_files.ljust(14),
  447. ANSI[color],
  448. note,
  449. ANSI['reset'],
  450. ))
  451. @enforce_types
  452. def printable_dependency_version(name: str, dependency: Dict) -> str:
  453. version = None
  454. if dependency['enabled']:
  455. if dependency['is_valid']:
  456. color, symbol, note, version = 'green', '√', 'valid', ''
  457. parsed_version_num = re.search(r'[\d\.]+', dependency['version'])
  458. if parsed_version_num:
  459. version = f'v{parsed_version_num[0]}'
  460. if not version:
  461. color, symbol, note, version = 'red', 'X', 'invalid', '?'
  462. else:
  463. color, symbol, note, version = 'lightyellow', '-', 'disabled', '-'
  464. if ' ' in (dependency["path"] or ''):
  465. dependency["path"] = f'"{dependency["path"]}"'
  466. return ' '.join((
  467. ANSI[color],
  468. symbol,
  469. ANSI['reset'],
  470. name.ljust(22),
  471. (dependency["path"] or '').ljust(76),
  472. version.ljust(14),
  473. ANSI[color],
  474. note,
  475. ANSI['reset'],
  476. ))