logs.py 9.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266
  1. import os
  2. import sys
  3. from datetime import datetime
  4. from dataclasses import dataclass
  5. from typing import Optional, List
  6. from .schema import Link, ArchiveResult
  7. from .config import ANSI, OUTPUT_DIR, IS_TTY
  8. @dataclass
  9. class RuntimeStats:
  10. """mutable stats counter for logging archiving timing info to CLI output"""
  11. skipped: int = 0
  12. succeeded: int = 0
  13. failed: int = 0
  14. parse_start_ts: Optional[datetime] = None
  15. parse_end_ts: Optional[datetime] = None
  16. index_start_ts: Optional[datetime] = None
  17. index_end_ts: Optional[datetime] = None
  18. archiving_start_ts: Optional[datetime] = None
  19. archiving_end_ts: Optional[datetime] = None
  20. # globals are bad, mmkay
  21. _LAST_RUN_STATS = RuntimeStats()
  22. def pretty_path(path: str) -> str:
  23. """convert paths like .../ArchiveBox/archivebox/../output/abc into output/abc"""
  24. pwd = os.path.abspath('.')
  25. # parent = os.path.abspath(os.path.join(pwd, os.path.pardir))
  26. return path.replace(pwd + '/', './')
  27. ### Parsing Stage
  28. def log_parsing_started(source_file: str):
  29. start_ts = datetime.now()
  30. _LAST_RUN_STATS.parse_start_ts = start_ts
  31. print('\n{green}[*] [{}] Parsing new links from output/sources/{}...{reset}'.format(
  32. start_ts.strftime('%Y-%m-%d %H:%M:%S'),
  33. source_file.rsplit('/', 1)[-1],
  34. **ANSI,
  35. ))
  36. def log_parsing_finished(num_parsed: int, num_new_links: int, parser_name: str):
  37. end_ts = datetime.now()
  38. _LAST_RUN_STATS.parse_end_ts = end_ts
  39. print(' > Parsed {} links as {} ({} new links added)'.format(num_parsed, parser_name, num_new_links))
  40. ### Indexing Stage
  41. def log_indexing_process_started(num_links: int):
  42. start_ts = datetime.now()
  43. _LAST_RUN_STATS.index_start_ts = start_ts
  44. print()
  45. print('{green}[*] [{}] Writing {} links to main index...{reset}'.format(
  46. start_ts.strftime('%Y-%m-%d %H:%M:%S'),
  47. num_links,
  48. **ANSI,
  49. ))
  50. def log_indexing_process_finished():
  51. end_ts = datetime.now()
  52. _LAST_RUN_STATS.index_end_ts = end_ts
  53. def log_indexing_started(out_path: str):
  54. if IS_TTY:
  55. sys.stdout.write(f' > {out_path}')
  56. def log_indexing_finished(out_path: str):
  57. print(f'\r √ {out_path}')
  58. ### Archiving Stage
  59. def log_archiving_started(num_links: int, resume: Optional[float]):
  60. start_ts = datetime.now()
  61. _LAST_RUN_STATS.archiving_start_ts = start_ts
  62. print()
  63. if resume:
  64. print('{green}[▶] [{}] Resuming archive updating for {} pages starting from {}...{reset}'.format(
  65. start_ts.strftime('%Y-%m-%d %H:%M:%S'),
  66. num_links,
  67. resume,
  68. **ANSI,
  69. ))
  70. else:
  71. print('{green}[▶] [{}] Updating content for {} pages in archive...{reset}'.format(
  72. start_ts.strftime('%Y-%m-%d %H:%M:%S'),
  73. num_links,
  74. **ANSI,
  75. ))
  76. def log_archiving_paused(num_links: int, idx: int, timestamp: str):
  77. end_ts = datetime.now()
  78. _LAST_RUN_STATS.archiving_end_ts = end_ts
  79. print()
  80. print('\n{lightyellow}[X] [{now}] Downloading paused on link {timestamp} ({idx}/{total}){reset}'.format(
  81. **ANSI,
  82. now=end_ts.strftime('%Y-%m-%d %H:%M:%S'),
  83. idx=idx+1,
  84. timestamp=timestamp,
  85. total=num_links,
  86. ))
  87. print(' To view your archive, open:')
  88. print(' {}/index.html'.format(OUTPUT_DIR))
  89. print(' Continue archiving where you left off by running:')
  90. print(' archivebox update --resume={}'.format(timestamp))
  91. def log_archiving_finished(num_links: int):
  92. end_ts = datetime.now()
  93. _LAST_RUN_STATS.archiving_end_ts = end_ts
  94. assert _LAST_RUN_STATS.archiving_start_ts is not None
  95. seconds = end_ts.timestamp() - _LAST_RUN_STATS.archiving_start_ts.timestamp()
  96. if seconds > 60:
  97. duration = '{0:.2f} min'.format(seconds / 60, 2)
  98. else:
  99. duration = '{0:.2f} sec'.format(seconds, 2)
  100. print()
  101. print('{}[√] [{}] Update of {} pages complete ({}){}'.format(
  102. ANSI['green'],
  103. end_ts.strftime('%Y-%m-%d %H:%M:%S'),
  104. num_links,
  105. duration,
  106. ANSI['reset'],
  107. ))
  108. print(' - {} links skipped'.format(_LAST_RUN_STATS.skipped))
  109. print(' - {} links updated'.format(_LAST_RUN_STATS.succeeded))
  110. print(' - {} links had errors'.format(_LAST_RUN_STATS.failed))
  111. print(' To view your archive, open:')
  112. print(' {}/index.html'.format(OUTPUT_DIR))
  113. def log_link_archiving_started(link: Link, link_dir: str, is_new: bool):
  114. # [*] [2019-03-22 13:46:45] "Log Structured Merge Trees - ben stopford"
  115. # http://www.benstopford.com/2015/02/14/log-structured-merge-trees/
  116. # > output/archive/1478739709
  117. print('\n[{symbol_color}{symbol}{reset}] [{symbol_color}{now}{reset}] "{title}"'.format(
  118. symbol_color=ANSI['green' if is_new else 'black'],
  119. symbol='+' if is_new else '√',
  120. now=datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
  121. title=link.title or link.base_url,
  122. **ANSI,
  123. ))
  124. print(' {blue}{url}{reset}'.format(url=link.url, **ANSI))
  125. print(' {} {}'.format(
  126. '>' if is_new else '√',
  127. pretty_path(link_dir),
  128. ))
  129. def log_link_archiving_finished(link: Link, link_dir: str, is_new: bool, stats: dict):
  130. total = sum(stats.values())
  131. if stats['failed'] > 0 :
  132. _LAST_RUN_STATS.failed += 1
  133. elif stats['skipped'] == total:
  134. _LAST_RUN_STATS.skipped += 1
  135. else:
  136. _LAST_RUN_STATS.succeeded += 1
  137. def log_archive_method_started(method: str):
  138. print(' > {}'.format(method))
  139. def log_archive_method_finished(result: ArchiveResult):
  140. """quote the argument with whitespace in a command so the user can
  141. copy-paste the outputted string directly to run the cmd
  142. """
  143. # Prettify CMD string and make it safe to copy-paste by quoting arguments
  144. quoted_cmd = ' '.join(
  145. '"{}"'.format(arg) if ' ' in arg else arg
  146. for arg in result.cmd
  147. )
  148. if result.status == 'failed':
  149. # Prettify error output hints string and limit to five lines
  150. hints = getattr(result.output, 'hints', None) or ()
  151. if hints:
  152. hints = hints if isinstance(hints, (list, tuple)) else hints.split('\n')
  153. hints = (
  154. ' {}{}{}'.format(ANSI['lightyellow'], line.strip(), ANSI['reset'])
  155. for line in hints[:5] if line.strip()
  156. )
  157. # Collect and prefix output lines with indentation
  158. output_lines = [
  159. '{lightred}Failed:{reset}'.format(**ANSI),
  160. ' {reset}{} {red}{}{reset}'.format(
  161. result.output.__class__.__name__.replace('ArchiveError', ''),
  162. result.output,
  163. **ANSI,
  164. ),
  165. *hints,
  166. '{}Run to see full output:{}'.format(ANSI['lightred'], ANSI['reset']),
  167. *([' cd {};'.format(result.pwd)] if result.pwd else []),
  168. ' {}'.format(quoted_cmd),
  169. ]
  170. print('\n'.join(
  171. ' {}'.format(line)
  172. for line in output_lines
  173. if line
  174. ))
  175. print()
  176. def log_list_started(filter_patterns: List[str], filter_type: str):
  177. print('{green}[*] Finding links in the archive index matching these {} patterns:{reset}'.format(
  178. filter_type,
  179. **ANSI,
  180. ))
  181. print(' {}'.format(' '.join(filter_patterns)))
  182. def log_list_finished(links):
  183. from .util import to_csv
  184. print()
  185. print('---------------------------------------------------------------------------------------------------')
  186. print(to_csv(links, csv_cols=['timestamp', 'is_archived', 'num_outputs', 'url'], header=True, ljust=16, separator=' | '))
  187. print('---------------------------------------------------------------------------------------------------')
  188. print()
  189. def log_removal_started(links: List[Link], yes: bool, delete: bool):
  190. print('{lightyellow}[i] Found {} matching URLs to remove.{reset}'.format(len(links), **ANSI))
  191. if delete:
  192. file_counts = [link.num_outputs for link in links if os.path.exists(link.link_dir)]
  193. print(
  194. f' {len(links)} Links will be de-listed from the main index, and their archived content folders will be deleted from disk.\n'
  195. f' ({len(file_counts)} data folders with {sum(file_counts)} archived files will be deleted!)'
  196. )
  197. else:
  198. print(
  199. f' Matching links will be de-listed from the main index, but their archived content folders will remain in place on disk.\n'
  200. f' (Pass --delete if you also want to permanently delete the data folders)'
  201. )
  202. if not yes:
  203. print()
  204. print('{lightyellow}[?] Do you want to proceed with removing these {} links?{reset}'.format(len(links), **ANSI))
  205. try:
  206. assert input(' y/[n]: ').lower() == 'y'
  207. except (KeyboardInterrupt, EOFError, AssertionError):
  208. raise SystemExit(0)
  209. def log_removal_finished(all_links: int, to_keep: int):
  210. if all_links == 0:
  211. print()
  212. print('{red}[X] No matching links found.{reset}'.format(**ANSI))
  213. else:
  214. num_removed = all_links - to_keep
  215. print()
  216. print('{red}[√] Removed {} out of {} links from the archive index.{reset}'.format(
  217. num_removed,
  218. all_links,
  219. **ANSI,
  220. ))
  221. print(' Index now contains {} links.'.format(to_keep))