headers.py 1.8 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970
  1. __package__ = 'archivebox.extractors'
  2. from pathlib import Path
  3. from typing import Optional
  4. from ..index.schema import Link, ArchiveResult, ArchiveOutput
  5. from ..system import atomic_write
  6. from ..util import (
  7. enforce_types,
  8. get_headers,
  9. )
  10. from ..config import (
  11. TIMEOUT,
  12. CURL_BINARY,
  13. CURL_ARGS,
  14. CURL_USER_AGENT,
  15. CURL_VERSION,
  16. CHECK_SSL_VALIDITY,
  17. SAVE_HEADERS
  18. )
  19. from ..logging_util import TimedProgress
  20. @enforce_types
  21. def should_save_headers(link: Link, out_dir: Optional[str]=None, overwrite: Optional[bool]=False) -> bool:
  22. out_dir = out_dir or Path(link.link_dir)
  23. if not overwrite and (out_dir / 'headers.json').exists():
  24. return False
  25. return SAVE_HEADERS
  26. @enforce_types
  27. def save_headers(link: Link, out_dir: Optional[str]=None, timeout: int=TIMEOUT) -> ArchiveResult:
  28. """Download site headers"""
  29. out_dir = Path(out_dir or link.link_dir)
  30. output_folder = out_dir.absolute()
  31. output: ArchiveOutput = 'headers.json'
  32. status = 'succeeded'
  33. timer = TimedProgress(timeout, prefix=' ')
  34. cmd = [
  35. CURL_BINARY,
  36. *CURL_ARGS,
  37. '--head',
  38. '--max-time', str(timeout),
  39. *(['--user-agent', '{}'.format(CURL_USER_AGENT)] if CURL_USER_AGENT else []),
  40. *([] if CHECK_SSL_VALIDITY else ['--insecure']),
  41. link.url,
  42. ]
  43. try:
  44. json_headers = get_headers(link.url, timeout=timeout)
  45. output_folder.mkdir(exist_ok=True)
  46. atomic_write(str(output_folder / "headers.json"), json_headers)
  47. except (Exception, OSError) as err:
  48. status = 'failed'
  49. output = err
  50. finally:
  51. timer.end()
  52. return ArchiveResult(
  53. cmd=cmd,
  54. pwd=str(out_dir),
  55. cmd_version=CURL_VERSION,
  56. output=output,
  57. status=status,
  58. **timer.stats,
  59. )