test_add.py 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293
  1. import subprocess
  2. import json
  3. import sqlite3
  4. from .fixtures import *
  5. def test_depth_flag_is_accepted(process, disable_extractors_dict):
  6. arg_process = subprocess.run(["archivebox", "add", "http://127.0.0.1:8080/static/example.com.html", "--depth=0"],
  7. capture_output=True, env=disable_extractors_dict)
  8. assert 'unrecognized arguments: --depth' not in arg_process.stderr.decode("utf-8")
  9. def test_depth_flag_fails_if_it_is_not_0_or_1(process, disable_extractors_dict):
  10. arg_process = subprocess.run(
  11. ["archivebox", "add", "--depth=5", "http://127.0.0.1:8080/static/example.com.html"],
  12. capture_output=True,
  13. env=disable_extractors_dict,
  14. )
  15. assert 'invalid choice' in arg_process.stderr.decode("utf-8")
  16. arg_process = subprocess.run(
  17. ["archivebox", "add", "--depth=-1", "http://127.0.0.1:8080/static/example.com.html"],
  18. capture_output=True,
  19. env=disable_extractors_dict,
  20. )
  21. assert 'invalid choice' in arg_process.stderr.decode("utf-8")
  22. def test_depth_flag_0_crawls_only_the_arg_page(tmp_path, process, disable_extractors_dict):
  23. arg_process = subprocess.run(
  24. ["archivebox", "add", "--depth=0", "http://127.0.0.1:8080/static/example.com.html"],
  25. capture_output=True,
  26. env=disable_extractors_dict,
  27. )
  28. archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
  29. with open(archived_item_path / "index.json", "r", encoding='utf-8') as f:
  30. output_json = json.load(f)
  31. assert output_json["base_url"] == "127.0.0.1:8080/static/example.com.html"
  32. def test_depth_flag_1_crawls_the_page_AND_links(tmp_path, process, disable_extractors_dict):
  33. arg_process = subprocess.run(
  34. ["archivebox", "add", "--depth=1", "http://127.0.0.1:8080/static/example.com.html"],
  35. capture_output=True,
  36. env=disable_extractors_dict,
  37. )
  38. conn = sqlite3.connect("index.sqlite3")
  39. c = conn.cursor()
  40. urls = c.execute("SELECT url from core_snapshot").fetchall()
  41. conn.commit()
  42. conn.close()
  43. urls = list(map(lambda x: x[0], urls))
  44. assert "http://127.0.0.1:8080/static/example.com.html" in urls
  45. assert "http://127.0.0.1:8080/static/iana.org.html" in urls
  46. def test_overwrite_flag_is_accepted(process, disable_extractors_dict):
  47. subprocess.run(
  48. ["archivebox", "add", "--depth=0", "http://127.0.0.1:8080/static/example.com.html"],
  49. capture_output=True,
  50. env=disable_extractors_dict,
  51. )
  52. arg_process = subprocess.run(
  53. ["archivebox", "add", "--overwrite", "http://127.0.0.1:8080/static/example.com.html"],
  54. capture_output=True,
  55. env=disable_extractors_dict,
  56. )
  57. assert 'unrecognized arguments: --overwrite' not in arg_process.stderr.decode("utf-8")
  58. assert 'favicon' in arg_process.stdout.decode('utf-8'), 'archive methods probably didnt run, did overwrite work?'
  59. def test_add_updates_history_json_index(tmp_path, process, disable_extractors_dict):
  60. subprocess.run(
  61. ["archivebox", "add", "--depth=0", "http://127.0.0.1:8080/static/example.com.html"],
  62. capture_output=True,
  63. env=disable_extractors_dict,
  64. )
  65. archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
  66. with open(archived_item_path / "index.json", "r", encoding="utf-8") as f:
  67. output_json = json.load(f)
  68. assert output_json["history"] != {}
  69. def test_extract_input_uses_only_passed_extractors(tmp_path, process):
  70. subprocess.run(["archivebox", "add", "http://127.0.0.1:8080/static/example.com.html", "--extract", "wget"],
  71. capture_output=True)
  72. archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
  73. assert (archived_item_path / "warc").exists()
  74. assert not (archived_item_path / "singlefile.html").exists()