system.py 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162
  1. __package__ = 'archivebox'
  2. import os
  3. import shutil
  4. from json import dump
  5. from pathlib import Path
  6. from typing import Optional, Union, Set, Tuple
  7. from subprocess import run as subprocess_run
  8. from crontab import CronTab
  9. from atomicwrites import atomic_write as lib_atomic_write
  10. from .util import enforce_types, ExtendedEncoder
  11. from .config import OUTPUT_PERMISSIONS
  12. def run(*args, input=None, capture_output=True, text=False, **kwargs):
  13. """Patched of subprocess.run to fix blocking io making timeout=innefective"""
  14. if input is not None:
  15. if 'stdin' in kwargs:
  16. raise ValueError('stdin and input arguments may not both be used.')
  17. if capture_output:
  18. if ('stdout' in kwargs) or ('stderr' in kwargs):
  19. raise ValueError('stdout and stderr arguments may not be used '
  20. 'with capture_output.')
  21. return subprocess_run(*args, input=input, capture_output=capture_output, text=text, **kwargs)
  22. @enforce_types
  23. def atomic_write(path: Union[Path, str], contents: Union[dict, str, bytes], overwrite: bool=True) -> None:
  24. """Safe atomic write to filesystem by writing to temp file + atomic rename"""
  25. mode = 'wb+' if isinstance(contents, bytes) else 'w'
  26. # print('\n> Atomic Write:', mode, path, len(contents), f'overwrite={overwrite}')
  27. try:
  28. with lib_atomic_write(path, mode=mode, overwrite=overwrite) as f:
  29. if isinstance(contents, dict):
  30. dump(contents, f, indent=4, sort_keys=True, cls=ExtendedEncoder)
  31. elif isinstance(contents, (bytes, str)):
  32. f.write(contents)
  33. except OSError as e:
  34. print(f"[X] OSError: Failed to write {path} with fcntl.F_FULLFSYNC. ({e})")
  35. print(" For data integrity, ArchiveBox requires a filesystem that supports atomic writes.")
  36. print(" Some filesystems and network drives don't implement FSYNC, and require workarounds.")
  37. os.chmod(path, int(OUTPUT_PERMISSIONS, base=8))
  38. @enforce_types
  39. def chmod_file(path: str, cwd: str='.', permissions: str=OUTPUT_PERMISSIONS) -> None:
  40. """chmod -R <permissions> <cwd>/<path>"""
  41. root = Path(cwd) / path
  42. if not root.exists():
  43. raise Exception('Failed to chmod: {} does not exist (did the previous step fail?)'.format(path))
  44. if not root.is_dir():
  45. os.chmod(root, int(OUTPUT_PERMISSIONS, base=8))
  46. else:
  47. for subpath in Path(path).glob('**/*'):
  48. os.chmod(subpath, int(OUTPUT_PERMISSIONS, base=8))
  49. @enforce_types
  50. def copy_and_overwrite(from_path: Union[str, Path], to_path: Union[str, Path]):
  51. """copy a given file or directory to a given path, overwriting the destination"""
  52. if Path(from_path).is_dir():
  53. shutil.rmtree(to_path, ignore_errors=True)
  54. shutil.copytree(from_path, to_path)
  55. else:
  56. with open(from_path, 'rb') as src:
  57. contents = src.read()
  58. atomic_write(to_path, contents)
  59. @enforce_types
  60. def get_dir_size(path: Union[str, Path], recursive: bool=True, pattern: Optional[str]=None) -> Tuple[int, int, int]:
  61. """get the total disk size of a given directory, optionally summing up
  62. recursively and limiting to a given filter list
  63. """
  64. num_bytes, num_dirs, num_files = 0, 0, 0
  65. for entry in os.scandir(path):
  66. if (pattern is not None) and (pattern not in entry.path):
  67. continue
  68. if entry.is_dir(follow_symlinks=False):
  69. if not recursive:
  70. continue
  71. num_dirs += 1
  72. bytes_inside, dirs_inside, files_inside = get_dir_size(entry.path)
  73. num_bytes += bytes_inside
  74. num_dirs += dirs_inside
  75. num_files += files_inside
  76. else:
  77. num_bytes += entry.stat(follow_symlinks=False).st_size
  78. num_files += 1
  79. return num_bytes, num_dirs, num_files
  80. CRON_COMMENT = 'archivebox_schedule'
  81. @enforce_types
  82. def dedupe_cron_jobs(cron: CronTab) -> CronTab:
  83. deduped: Set[Tuple[str, str]] = set()
  84. for job in list(cron):
  85. unique_tuple = (str(job.slices), job.command)
  86. if unique_tuple not in deduped:
  87. deduped.add(unique_tuple)
  88. cron.remove(job)
  89. for schedule, command in deduped:
  90. job = cron.new(command=command, comment=CRON_COMMENT)
  91. job.setall(schedule)
  92. job.enable()
  93. return cron
  94. class suppress_output(object):
  95. '''
  96. A context manager for doing a "deep suppression" of stdout and stderr in
  97. Python, i.e. will suppress all print, even if the print originates in a
  98. compiled C/Fortran sub-function.
  99. This will not suppress raised exceptions, since exceptions are printed
  100. to stderr just before a script exits, and after the context manager has
  101. exited (at least, I think that is why it lets exceptions through).
  102. with suppress_stdout_stderr():
  103. rogue_function()
  104. '''
  105. def __init__(self, stdout=True, stderr=True):
  106. # Open a pair of null files
  107. # Save the actual stdout (1) and stderr (2) file descriptors.
  108. self.stdout, self.stderr = stdout, stderr
  109. if stdout:
  110. self.null_stdout = os.open(os.devnull, os.O_RDWR)
  111. self.real_stdout = os.dup(1)
  112. if stderr:
  113. self.null_stderr = os.open(os.devnull, os.O_RDWR)
  114. self.real_stderr = os.dup(2)
  115. def __enter__(self):
  116. # Assign the null pointers to stdout and stderr.
  117. if self.stdout:
  118. os.dup2(self.null_stdout, 1)
  119. if self.stderr:
  120. os.dup2(self.null_stderr, 2)
  121. def __exit__(self, *_):
  122. # Re-assign the real stdout/stderr back to (1) and (2)
  123. if self.stdout:
  124. os.dup2(self.real_stdout, 1)
  125. os.close(self.null_stdout)
  126. if self.stderr:
  127. os.dup2(self.real_stderr, 2)
  128. os.close(self.null_stderr)