Переглянути джерело

feat: Remove index.json and index.html generation from the regular process

Cristian 5 роки тому
батько
коміт
ae1484b8bf
5 змінених файлів з 25 додано та 30 видалено
  1. 1 10
      archivebox/index/__init__.py
  2. 0 6
      archivebox/main.py
  3. 15 7
      tests/test_add.py
  4. 6 2
      tests/test_init.py
  5. 3 5
      tests/test_title.py

+ 1 - 10
archivebox/index/__init__.py

@@ -225,7 +225,7 @@ def timed_index_update(out_path: Path):
 
 @enforce_types
 def write_main_index(links: List[Link], out_dir: Path=OUTPUT_DIR, finished: bool=False) -> None:
-    """create index.html file for a given list of links"""
+    """Writes links to sqlite3 file for a given list of links"""
 
     log_indexing_process_started(len(links))
 
@@ -234,8 +234,6 @@ def write_main_index(links: List[Link], out_dir: Path=OUTPUT_DIR, finished: bool
             write_sql_main_index(links, out_dir=out_dir)
             os.chmod(out_dir / SQL_INDEX_FILENAME, int(OUTPUT_PERMISSIONS, base=8)) # set here because we don't write it with atomic writes
 
-        if finished:
-            write_static_index(links, out_dir=out_dir)
     except (KeyboardInterrupt, SystemExit):
         stderr('[!] Warning: Still writing index to disk...', color='lightyellow')
         stderr('    Run archivebox init to fix any inconsisntencies from an ungraceful exit.')
@@ -246,13 +244,6 @@ def write_main_index(links: List[Link], out_dir: Path=OUTPUT_DIR, finished: bool
 
     log_indexing_process_finished()
 
-@enforce_types
-def write_static_index(links: List[Link], out_dir: Path=OUTPUT_DIR) -> None:
-    with timed_index_update(out_dir / JSON_INDEX_FILENAME):
-        write_json_main_index(links)
-    with timed_index_update(out_dir / HTML_INDEX_FILENAME):
-        write_html_main_index(links, out_dir=out_dir, finished=True)
-
 @enforce_types
 def get_empty_snapshot_queryset(out_dir: Path=OUTPUT_DIR):
     setup_django(out_dir, check_db=True)

+ 0 - 6
archivebox/main.py

@@ -31,7 +31,6 @@ from .index import (
     parse_links_from_source,
     dedupe_links,
     write_main_index,
-    write_static_index,
     snapshot_filter,
     get_indexed_folders,
     get_archived_folders,
@@ -561,10 +560,7 @@ def add(urls: Union[str, List[str]],
         archive_links(imported_links, overwrite=True, out_dir=out_dir)
     elif new_links:
         archive_links(new_links, overwrite=False, out_dir=out_dir)
-    else:
-        return all_links
     
-    write_static_index([link.as_link_with_details() for link in all_links], out_dir=out_dir)
     return all_links
 
 @enforce_types
@@ -641,7 +637,6 @@ def remove(filter_str: Optional[str]=None,
 
     remove_from_sql_main_index(snapshots=snapshots, out_dir=out_dir)
     all_snapshots = load_main_index(out_dir=out_dir)
-    write_static_index([link.as_link_with_details() for link in all_snapshots], out_dir=out_dir)
     log_removal_finished(all_snapshots.count(), to_remove)
     
     return all_snapshots
@@ -698,7 +693,6 @@ def update(resume: Optional[float]=None,
 
     # Step 4: Re-write links index with updated titles, icons, and resources
     all_links = load_main_index(out_dir=out_dir)
-    write_static_index([link.as_link_with_details() for link in all_links], out_dir=out_dir)
     return all_links
 
 @enforce_types

+ 15 - 7
tests/test_add.py

@@ -1,5 +1,6 @@
 import subprocess
 import json
+import sqlite3
 
 from .fixtures import *
 
@@ -43,11 +44,16 @@ def test_depth_flag_1_crawls_the_page_AND_links(tmp_path, process, disable_extra
         capture_output=True,
         env=disable_extractors_dict,
     )
-    
-    with open(tmp_path / "index.json", "r") as f:
-        archive_file = f.read()
-    assert "http://127.0.0.1:8080/static/example.com.html" in archive_file
-    assert "http://127.0.0.1:8080/static/iana.org.html" in archive_file
+
+    conn = sqlite3.connect("index.sqlite3")
+    c = conn.cursor()
+    urls = c.execute("SELECT url from core_snapshot").fetchall()
+    conn.commit()
+    conn.close()
+
+    urls = list(map(lambda x: x[0], urls))
+    assert "http://127.0.0.1:8080/static/example.com.html" in urls 
+    assert "http://127.0.0.1:8080/static/iana.org.html" in urls
 
 
 def test_overwrite_flag_is_accepted(process, disable_extractors_dict):
@@ -71,6 +77,8 @@ def test_add_updates_history_json_index(tmp_path, process, disable_extractors_di
         env=disable_extractors_dict,
     )
 
-    with open(tmp_path / "index.json", "r") as f:
+    archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
+
+    with open(archived_item_path / "index.json", "r") as f:
         output_json = json.load(f)
-    assert output_json["links"][0]["history"] != {}
+    assert output_json["history"] != {}

+ 6 - 2
tests/test_init.py

@@ -32,10 +32,11 @@ def test_add_link(tmp_path, process, disable_extractors_dict):
         output_json = json.load(f)
     assert "Example Domain" == output_json['history']['title'][0]['output']
 
-    with open(tmp_path / "index.html", "r") as f:
+    with open(archived_item_path / "index.html", "r") as f:
         output_html = f.read()
     assert "Example Domain" in output_html
 
+
 def test_add_link_support_stdin(tmp_path, process, disable_extractors_dict):
     disable_extractors_dict.update({"USE_WGET": "true"})
     os.chdir(tmp_path)
@@ -51,7 +52,7 @@ def test_add_link_support_stdin(tmp_path, process, disable_extractors_dict):
     assert "Example Domain" == output_json['history']['title'][0]['output']
 
 def test_correct_permissions_output_folder(tmp_path, process):
-    index_files = ['index.json', 'index.html', 'index.sqlite3', 'archive']
+    index_files = ['index.sqlite3', 'archive']
     for file in index_files:
         file_path = tmp_path / file
         assert oct(file_path.stat().st_mode)[-3:] == OUTPUT_PERMISSIONS
@@ -113,6 +114,9 @@ def test_orphaned_folders(tmp_path, process, disable_extractors_dict):
     os.chdir(tmp_path)
     subprocess.run(['archivebox', 'add', 'http://127.0.0.1:8080/static/example.com.html'], capture_output=True,
                      env=disable_extractors_dict)
+    list_process = subprocess.run(["archivebox", "list", "--json", "--with-headers"], capture_output=True)
+    with open(tmp_path / "index.json", "wb") as f:
+        f.write(list_process.stdout)
     conn = sqlite3.connect("index.sqlite3")
     c = conn.cursor()
     c.execute("DELETE from core_snapshot")

+ 3 - 5
tests/test_title.py

@@ -6,10 +6,8 @@ def test_title_is_htmlencoded_in_index_html(tmp_path, process, disable_extractor
     Unencoded content should not be rendered as it facilitates xss injections
     and breaks the layout.
     """
-    add_process = subprocess.run(['archivebox', 'add', 'http://localhost:8080/static/title_with_html.com.html'],
+    subprocess.run(['archivebox', 'add', 'http://localhost:8080/static/title_with_html.com.html'],
                                  capture_output=True, env=disable_extractors_dict)
+    list_process = subprocess.run(["archivebox", "list", "--html"], capture_output=True)
 
-    with open(tmp_path / "index.html", "r") as f:
-        output_html = f.read()
-
-    assert "<textarea>" not in output_html
+    assert "<textarea>" not in list_process.stdout.decode("utf-8")