system.py 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137
  1. __package__ = 'archivebox'
  2. import os
  3. import shutil
  4. import json as pyjson
  5. from typing import Optional, Union, Set, Tuple
  6. from crontab import CronTab
  7. from atomicwrites import atomic_write as awrite
  8. from subprocess import (
  9. Popen,
  10. PIPE,
  11. DEVNULL,
  12. CompletedProcess,
  13. TimeoutExpired,
  14. CalledProcessError,
  15. )
  16. from .util import enforce_types, ExtendedEncoder
  17. from .config import OUTPUT_PERMISSIONS
  18. def run(*popenargs, input=None, capture_output=False, timeout=None, check=False, **kwargs):
  19. """Patched of subprocess.run to fix blocking io making timeout=innefective"""
  20. if input is not None:
  21. if 'stdin' in kwargs:
  22. raise ValueError('stdin and input arguments may not both be used.')
  23. kwargs['stdin'] = PIPE
  24. if capture_output:
  25. if ('stdout' in kwargs) or ('stderr' in kwargs):
  26. raise ValueError('stdout and stderr arguments may not be used '
  27. 'with capture_output.')
  28. kwargs['stdout'] = PIPE
  29. kwargs['stderr'] = PIPE
  30. with Popen(*popenargs, **kwargs) as process:
  31. try:
  32. stdout, stderr = process.communicate(input, timeout=timeout)
  33. except TimeoutExpired:
  34. process.kill()
  35. try:
  36. stdout, stderr = process.communicate(input, timeout=2)
  37. except:
  38. pass
  39. raise TimeoutExpired(popenargs[0][0], timeout)
  40. except BaseException:
  41. process.kill()
  42. # We don't call process.wait() as .__exit__ does that for us.
  43. raise
  44. retcode = process.poll()
  45. if check and retcode:
  46. raise CalledProcessError(retcode, process.args,
  47. output=stdout, stderr=stderr)
  48. return CompletedProcess(process.args, retcode, stdout, stderr)
  49. def atomic_write(path: str, contents: Union[dict, str, bytes], overwrite: bool=True) -> None:
  50. """Safe atomic write to filesystem by writing to temp file + atomic rename"""
  51. with awrite(path, overwrite=overwrite) as f:
  52. if isinstance(contents, dict):
  53. pyjson.dump(contents, f, indent=4, sort_keys=True, cls=ExtendedEncoder)
  54. else:
  55. f.write(contents)
  56. @enforce_types
  57. def chmod_file(path: str, cwd: str='.', permissions: str=OUTPUT_PERMISSIONS, timeout: int=30) -> None:
  58. """chmod -R <permissions> <cwd>/<path>"""
  59. if not os.path.exists(os.path.join(cwd, path)):
  60. raise Exception('Failed to chmod: {} does not exist (did the previous step fail?)'.format(path))
  61. chmod_result = run(['chmod', '-R', permissions, path], cwd=cwd, stdout=DEVNULL, stderr=PIPE, timeout=timeout)
  62. if chmod_result.returncode == 1:
  63. print(' ', chmod_result.stderr.decode())
  64. raise Exception('Failed to chmod {}/{}'.format(cwd, path))
  65. @enforce_types
  66. def copy_and_overwrite(from_path: str, to_path: str):
  67. """copy a given file or directory to a given path, overwriting the destination"""
  68. if os.path.isdir(from_path):
  69. shutil.rmtree(to_path, ignore_errors=True)
  70. shutil.copytree(from_path, to_path)
  71. else:
  72. with open(from_path, 'rb') as src:
  73. contents = src.read()
  74. atomic_write(to_path, contents)
  75. @enforce_types
  76. def get_dir_size(path: str, recursive: bool=True, pattern: Optional[str]=None) -> Tuple[int, int, int]:
  77. """get the total disk size of a given directory, optionally summing up
  78. recursively and limiting to a given filter list
  79. """
  80. num_bytes, num_dirs, num_files = 0, 0, 0
  81. for entry in os.scandir(path):
  82. if (pattern is not None) and (pattern not in entry.path):
  83. continue
  84. if entry.is_dir(follow_symlinks=False):
  85. if not recursive:
  86. continue
  87. num_dirs += 1
  88. bytes_inside, dirs_inside, files_inside = get_dir_size(entry.path)
  89. num_bytes += bytes_inside
  90. num_dirs += dirs_inside
  91. num_files += files_inside
  92. else:
  93. num_bytes += entry.stat(follow_symlinks=False).st_size
  94. num_files += 1
  95. return num_bytes, num_dirs, num_files
  96. CRON_COMMENT = 'archivebox_schedule'
  97. @enforce_types
  98. def dedupe_cron_jobs(cron: CronTab) -> CronTab:
  99. deduped: Set[Tuple[str, str]] = set()
  100. for job in list(cron):
  101. unique_tuple = (str(job.slices), job.command)
  102. if unique_tuple not in deduped:
  103. deduped.add(unique_tuple)
  104. cron.remove(job)
  105. for schedule, command in deduped:
  106. job = cron.new(command=command, comment=CRON_COMMENT)
  107. job.setall(schedule)
  108. job.enable()
  109. return cron