favicon.py 1.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364
  1. __package__ = 'archivebox.extractors'
  2. import os
  3. from typing import Optional
  4. from ..index.schema import Link, ArchiveResult, ArchiveOutput
  5. from ..system import chmod_file, run
  6. from ..util import enforce_types, domain
  7. from ..config import (
  8. TIMEOUT,
  9. SAVE_FAVICON,
  10. CURL_BINARY,
  11. CURL_VERSION,
  12. CHECK_SSL_VALIDITY,
  13. CURL_USER_AGENT,
  14. )
  15. from ..cli.logging import TimedProgress
  16. @enforce_types
  17. def should_save_favicon(link: Link, out_dir: Optional[str]=None) -> bool:
  18. out_dir = out_dir or link.link_dir
  19. if os.path.exists(os.path.join(out_dir, 'favicon.ico')):
  20. return False
  21. return SAVE_FAVICON
  22. @enforce_types
  23. def save_favicon(link: Link, out_dir: Optional[str]=None, timeout: int=TIMEOUT) -> ArchiveResult:
  24. """download site favicon from google's favicon api"""
  25. out_dir = out_dir or link.link_dir
  26. output: ArchiveOutput = 'favicon.ico'
  27. cmd = [
  28. CURL_BINARY,
  29. '--silent',
  30. '--max-time', str(timeout),
  31. '--location',
  32. '--output', str(output),
  33. *(['--user-agent', '{}'.format(CURL_USER_AGENT)] if CURL_USER_AGENT else []),
  34. *([] if CHECK_SSL_VALIDITY else ['--insecure']),
  35. 'https://www.google.com/s2/favicons?domain={}'.format(domain(link.url)),
  36. ]
  37. status = 'pending'
  38. timer = TimedProgress(timeout, prefix=' ')
  39. try:
  40. run(cmd, cwd=out_dir, timeout=timeout)
  41. chmod_file(output, cwd=out_dir)
  42. status = 'succeeded'
  43. except Exception as err:
  44. status = 'failed'
  45. output = err
  46. finally:
  47. timer.end()
  48. return ArchiveResult(
  49. cmd=cmd,
  50. pwd=out_dir,
  51. cmd_version=CURL_VERSION,
  52. output=output,
  53. status=status,
  54. **timer.stats,
  55. )