test_init.py 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134
  1. # archivebox init
  2. # archivebox add
  3. import os
  4. import subprocess
  5. from pathlib import Path
  6. import json
  7. import sqlite3
  8. from archivebox.config import OUTPUT_PERMISSIONS
  9. from .fixtures import *
  10. def test_init(tmp_path, process):
  11. assert "Initializing a new ArchiveBox collection in this folder..." in process.stdout.decode("utf-8")
  12. def test_update(tmp_path, process):
  13. os.chdir(tmp_path)
  14. update_process = subprocess.run(['archivebox', 'init'], capture_output=True)
  15. assert "Updating existing ArchiveBox collection in this folder" in update_process.stdout.decode("utf-8")
  16. def test_add_link(tmp_path, process, disable_extractors_dict):
  17. disable_extractors_dict.update({"USE_WGET": "true"})
  18. os.chdir(tmp_path)
  19. add_process = subprocess.run(['archivebox', 'add', 'http://127.0.0.1:8080/static/example.com.html'],
  20. capture_output=True, env=disable_extractors_dict)
  21. archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
  22. assert "index.json" in [x.name for x in archived_item_path.iterdir()]
  23. with open(archived_item_path / "index.json", "r") as f:
  24. output_json = json.load(f)
  25. assert "Example Domain" == output_json['history']['title'][0]['output']
  26. with open(tmp_path / "index.html", "r") as f:
  27. output_html = f.read()
  28. assert "Example Domain" in output_html
  29. def test_add_link_support_stdin(tmp_path, process, disable_extractors_dict):
  30. disable_extractors_dict.update({"USE_WGET": "true"})
  31. os.chdir(tmp_path)
  32. stdin_process = subprocess.Popen(["archivebox", "add"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
  33. env=disable_extractors_dict)
  34. stdin_process.communicate(input="http://127.0.0.1:8080/static/example.com.html".encode())
  35. archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
  36. assert "index.json" in [x.name for x in archived_item_path.iterdir()]
  37. with open(archived_item_path / "index.json", "r") as f:
  38. output_json = json.load(f)
  39. assert "Example Domain" == output_json['history']['title'][0]['output']
  40. def test_correct_permissions_output_folder(tmp_path, process):
  41. index_files = ['index.json', 'index.html', 'index.sqlite3', 'archive']
  42. for file in index_files:
  43. file_path = tmp_path / file
  44. assert oct(file_path.stat().st_mode)[-3:] == OUTPUT_PERMISSIONS
  45. def test_correct_permissions_add_command_results(tmp_path, process, disable_extractors_dict):
  46. os.chdir(tmp_path)
  47. add_process = subprocess.run(['archivebox', 'add', 'http://127.0.0.1:8080/static/example.com.html'], capture_output=True,
  48. env=disable_extractors_dict)
  49. archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
  50. for path in archived_item_path.iterdir():
  51. assert oct(path.stat().st_mode)[-3:] == OUTPUT_PERMISSIONS
  52. def test_collision_urls_different_timestamps(tmp_path, process, disable_extractors_dict):
  53. os.chdir(tmp_path)
  54. subprocess.run(['archivebox', 'add', 'http://127.0.0.1:8080/static/example.com.html'], capture_output=True,
  55. env=disable_extractors_dict)
  56. subprocess.run(['archivebox', 'add', 'http://127.0.0.1:8080/static/iana.org.html'], capture_output=True,
  57. env=disable_extractors_dict)
  58. archive_folders = [x.name for x in (tmp_path / "archive").iterdir()]
  59. first_archive = tmp_path / "archive" / str(min([float(folder) for folder in archive_folders]))
  60. json_index = str(first_archive / "index.json")
  61. with open(json_index, "r") as f:
  62. link_details = json.loads(f.read())
  63. link_details["url"] = "http://127.0.0.1:8080/static/iana.org.html"
  64. with open(json_index, "w") as f:
  65. json.dump(link_details, f)
  66. init_process = subprocess.run(['archivebox', 'init'], capture_output=True, env=disable_extractors_dict)
  67. # 1 from duplicated url, 1 from corrupted index
  68. assert "Skipped adding 2 invalid link data directories" in init_process.stdout.decode("utf-8")
  69. assert init_process.returncode == 0
  70. def test_collision_timestamps_different_urls(tmp_path, process, disable_extractors_dict):
  71. os.chdir(tmp_path)
  72. subprocess.run(['archivebox', 'add', 'http://127.0.0.1:8080/static/example.com.html'], capture_output=True,
  73. env=disable_extractors_dict)
  74. subprocess.run(['archivebox', 'add', 'http://127.0.0.1:8080/static/iana.org.html'], capture_output=True,
  75. env=disable_extractors_dict)
  76. archive_folders = [x.name for x in (tmp_path / "archive").iterdir()]
  77. first_archive = tmp_path / "archive" / str(min([float(folder) for folder in archive_folders]))
  78. archive_folders.remove(first_archive.name)
  79. json_index = str(first_archive / "index.json")
  80. with open(json_index, "r") as f:
  81. link_details = json.loads(f.read())
  82. link_details["timestamp"] = archive_folders[0]
  83. with open(json_index, "w") as f:
  84. json.dump(link_details, f)
  85. init_process = subprocess.run(['archivebox', 'init'], capture_output=True, env=disable_extractors_dict)
  86. assert "Skipped adding 1 invalid link data directories" in init_process.stdout.decode("utf-8")
  87. assert init_process.returncode == 0
  88. def test_orphaned_folders(tmp_path, process, disable_extractors_dict):
  89. os.chdir(tmp_path)
  90. subprocess.run(['archivebox', 'add', 'http://127.0.0.1:8080/static/example.com.html'], capture_output=True,
  91. env=disable_extractors_dict)
  92. conn = sqlite3.connect("index.sqlite3")
  93. c = conn.cursor()
  94. c.execute("DELETE from core_snapshot")
  95. conn.commit()
  96. conn.close()
  97. init_process = subprocess.run(['archivebox', 'init'], capture_output=True, env=disable_extractors_dict)
  98. assert "Added 1 orphaned links from existing JSON index" in init_process.stdout.decode("utf-8")
  99. assert init_process.returncode == 0
  100. def test_unrecognized_folders(tmp_path, process, disable_extractors_dict):
  101. os.chdir(tmp_path)
  102. subprocess.run(['archivebox', 'add', 'http://127.0.0.1:8080/static/example.com.html'], capture_output=True,
  103. env=disable_extractors_dict)
  104. (tmp_path / "archive" / "some_random_folder").mkdir()
  105. init_process = subprocess.run(['archivebox', 'init'], capture_output=True, env=disable_extractors_dict)
  106. assert "Skipped adding 1 invalid link data directories" in init_process.stdout.decode("utf-8")
  107. assert init_process.returncode == 0