diff --git a/archivebox/index/__init__.py b/archivebox/index/__init__.py index a496e03c..7c85c0ea 100644 --- a/archivebox/index/__init__.py +++ b/archivebox/index/__init__.py @@ -225,7 +225,7 @@ def timed_index_update(out_path: Path): @enforce_types def write_main_index(links: List[Link], out_dir: Path=OUTPUT_DIR, finished: bool=False) -> None: - """create index.html file for a given list of links""" + """Writes links to sqlite3 file for a given list of links""" log_indexing_process_started(len(links)) @@ -234,8 +234,6 @@ def write_main_index(links: List[Link], out_dir: Path=OUTPUT_DIR, finished: bool write_sql_main_index(links, out_dir=out_dir) os.chmod(out_dir / SQL_INDEX_FILENAME, int(OUTPUT_PERMISSIONS, base=8)) # set here because we don't write it with atomic writes - if finished: - write_static_index(links, out_dir=out_dir) except (KeyboardInterrupt, SystemExit): stderr('[!] Warning: Still writing index to disk...', color='lightyellow') stderr(' Run archivebox init to fix any inconsisntencies from an ungraceful exit.') @@ -246,13 +244,6 @@ def write_main_index(links: List[Link], out_dir: Path=OUTPUT_DIR, finished: bool log_indexing_process_finished() -@enforce_types -def write_static_index(links: List[Link], out_dir: Path=OUTPUT_DIR) -> None: - with timed_index_update(out_dir / JSON_INDEX_FILENAME): - write_json_main_index(links) - with timed_index_update(out_dir / HTML_INDEX_FILENAME): - write_html_main_index(links, out_dir=out_dir, finished=True) - @enforce_types def get_empty_snapshot_queryset(out_dir: Path=OUTPUT_DIR): setup_django(out_dir, check_db=True) diff --git a/archivebox/main.py b/archivebox/main.py index b7ce4034..918098c8 100644 --- a/archivebox/main.py +++ b/archivebox/main.py @@ -31,7 +31,6 @@ from .index import ( parse_links_from_source, dedupe_links, write_main_index, - write_static_index, snapshot_filter, get_indexed_folders, get_archived_folders, @@ -561,10 +560,7 @@ def add(urls: Union[str, List[str]], archive_links(imported_links, overwrite=True, out_dir=out_dir) elif new_links: archive_links(new_links, overwrite=False, out_dir=out_dir) - else: - return all_links - write_static_index([link.as_link_with_details() for link in all_links], out_dir=out_dir) return all_links @enforce_types @@ -641,7 +637,6 @@ def remove(filter_str: Optional[str]=None, remove_from_sql_main_index(snapshots=snapshots, out_dir=out_dir) all_snapshots = load_main_index(out_dir=out_dir) - write_static_index([link.as_link_with_details() for link in all_snapshots], out_dir=out_dir) log_removal_finished(all_snapshots.count(), to_remove) return all_snapshots @@ -698,7 +693,6 @@ def update(resume: Optional[float]=None, # Step 4: Re-write links index with updated titles, icons, and resources all_links = load_main_index(out_dir=out_dir) - write_static_index([link.as_link_with_details() for link in all_links], out_dir=out_dir) return all_links @enforce_types diff --git a/tests/test_add.py b/tests/test_add.py index 493864f1..5e672e8d 100644 --- a/tests/test_add.py +++ b/tests/test_add.py @@ -1,5 +1,6 @@ import subprocess import json +import sqlite3 from .fixtures import * @@ -43,11 +44,16 @@ def test_depth_flag_1_crawls_the_page_AND_links(tmp_path, process, disable_extra capture_output=True, env=disable_extractors_dict, ) - - with open(tmp_path / "index.json", "r") as f: - archive_file = f.read() - assert "http://127.0.0.1:8080/static/example.com.html" in archive_file - assert "http://127.0.0.1:8080/static/iana.org.html" in archive_file + + conn = sqlite3.connect("index.sqlite3") + c = conn.cursor() + urls = c.execute("SELECT url from core_snapshot").fetchall() + conn.commit() + conn.close() + + urls = list(map(lambda x: x[0], urls)) + assert "http://127.0.0.1:8080/static/example.com.html" in urls + assert "http://127.0.0.1:8080/static/iana.org.html" in urls def test_overwrite_flag_is_accepted(process, disable_extractors_dict): @@ -71,6 +77,8 @@ def test_add_updates_history_json_index(tmp_path, process, disable_extractors_di env=disable_extractors_dict, ) - with open(tmp_path / "index.json", "r") as f: + archived_item_path = list(tmp_path.glob('archive/**/*'))[0] + + with open(archived_item_path / "index.json", "r") as f: output_json = json.load(f) - assert output_json["links"][0]["history"] != {} \ No newline at end of file + assert output_json["history"] != {} \ No newline at end of file diff --git a/tests/test_init.py b/tests/test_init.py index ae07e5da..86c1322d 100644 --- a/tests/test_init.py +++ b/tests/test_init.py @@ -32,10 +32,11 @@ def test_add_link(tmp_path, process, disable_extractors_dict): output_json = json.load(f) assert "Example Domain" == output_json['history']['title'][0]['output'] - with open(tmp_path / "index.html", "r") as f: + with open(archived_item_path / "index.html", "r") as f: output_html = f.read() assert "Example Domain" in output_html + def test_add_link_support_stdin(tmp_path, process, disable_extractors_dict): disable_extractors_dict.update({"USE_WGET": "true"}) os.chdir(tmp_path) @@ -51,7 +52,7 @@ def test_add_link_support_stdin(tmp_path, process, disable_extractors_dict): assert "Example Domain" == output_json['history']['title'][0]['output'] def test_correct_permissions_output_folder(tmp_path, process): - index_files = ['index.json', 'index.html', 'index.sqlite3', 'archive'] + index_files = ['index.sqlite3', 'archive'] for file in index_files: file_path = tmp_path / file assert oct(file_path.stat().st_mode)[-3:] == OUTPUT_PERMISSIONS @@ -113,6 +114,9 @@ def test_orphaned_folders(tmp_path, process, disable_extractors_dict): os.chdir(tmp_path) subprocess.run(['archivebox', 'add', 'http://127.0.0.1:8080/static/example.com.html'], capture_output=True, env=disable_extractors_dict) + list_process = subprocess.run(["archivebox", "list", "--json", "--with-headers"], capture_output=True) + with open(tmp_path / "index.json", "wb") as f: + f.write(list_process.stdout) conn = sqlite3.connect("index.sqlite3") c = conn.cursor() c.execute("DELETE from core_snapshot") diff --git a/tests/test_title.py b/tests/test_title.py index 24b2cc28..0d7354e6 100644 --- a/tests/test_title.py +++ b/tests/test_title.py @@ -6,10 +6,8 @@ def test_title_is_htmlencoded_in_index_html(tmp_path, process, disable_extractor Unencoded content should not be rendered as it facilitates xss injections and breaks the layout. """ - add_process = subprocess.run(['archivebox', 'add', 'http://localhost:8080/static/title_with_html.com.html'], + subprocess.run(['archivebox', 'add', 'http://localhost:8080/static/title_with_html.com.html'], capture_output=True, env=disable_extractors_dict) + list_process = subprocess.run(["archivebox", "list", "--html"], capture_output=True) - with open(tmp_path / "index.html", "r") as f: - output_html = f.read() - - assert "