headers.py 2.0 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980
  1. __package__ = 'archivebox.extractors'
  2. from pathlib import Path
  3. from typing import Optional
  4. from ..index.schema import Link, ArchiveResult, ArchiveOutput
  5. from ..system import atomic_write
  6. from ..util import (
  7. enforce_types,
  8. get_headers,
  9. dedupe,
  10. )
  11. from ..config import (
  12. TIMEOUT,
  13. CURL_BINARY,
  14. CURL_ARGS,
  15. CURL_EXTRA_ARGS,
  16. CURL_USER_AGENT,
  17. CURL_VERSION,
  18. CHECK_SSL_VALIDITY,
  19. SAVE_HEADERS
  20. )
  21. from ..logging_util import TimedProgress
  22. def get_output_path():
  23. return 'headers.json'
  24. @enforce_types
  25. def should_save_headers(link: Link, out_dir: Optional[str]=None, overwrite: Optional[bool]=False) -> bool:
  26. out_dir = out_dir or Path(link.link_dir)
  27. if not overwrite and (out_dir / get_output_path()).exists():
  28. return False
  29. return SAVE_HEADERS
  30. @enforce_types
  31. def save_headers(link: Link, out_dir: Optional[str]=None, timeout: int=TIMEOUT) -> ArchiveResult:
  32. """Download site headers"""
  33. out_dir = Path(out_dir or link.link_dir)
  34. output_folder = out_dir.absolute()
  35. output: ArchiveOutput = get_output_path()
  36. status = 'succeeded'
  37. timer = TimedProgress(timeout, prefix=' ')
  38. # later options take precedence
  39. options = [
  40. *CURL_ARGS,
  41. *CURL_EXTRA_ARGS,
  42. '--head',
  43. '--max-time', str(timeout),
  44. *(['--user-agent', '{}'.format(CURL_USER_AGENT)] if CURL_USER_AGENT else []),
  45. *([] if CHECK_SSL_VALIDITY else ['--insecure']),
  46. ]
  47. cmd = [
  48. CURL_BINARY,
  49. *dedupe(options),
  50. link.url,
  51. ]
  52. try:
  53. json_headers = get_headers(link.url, timeout=timeout)
  54. output_folder.mkdir(exist_ok=True)
  55. atomic_write(str(output_folder / get_output_path()), json_headers)
  56. except (Exception, OSError) as err:
  57. status = 'failed'
  58. output = err
  59. finally:
  60. timer.end()
  61. return ArchiveResult(
  62. cmd=cmd,
  63. pwd=str(out_dir),
  64. cmd_version=CURL_VERSION,
  65. output=output,
  66. status=status,
  67. **timer.stats,
  68. )