fix: Save history in main index (to mimic previous behaviour)

This commit is contained in:
Cristian 2020-08-28 11:08:03 -05:00 committed by Cristian Vargas
parent 7e9d195d13
commit 2aa8d69b72
3 changed files with 19 additions and 4 deletions

View file

@ -47,6 +47,10 @@ class Snapshot(models.Model):
def as_link(self) -> Link:
return Link.from_json(self.as_json())
def as_link_with_details(self) -> Link:
from ..index import load_link_details
return load_link_details(self.as_link())
@cached_property
def bookmarked(self):
return parse_date(self.timestamp)

View file

@ -560,8 +560,8 @@ def add(urls: Union[str, List[str]],
archive_links(new_links, overwrite=False, out_dir=out_dir)
else:
return all_links
write_static_index([link.as_link() for link in all_links], out_dir=out_dir)
write_static_index([link.as_link_with_details() for link in all_links], out_dir=out_dir)
return all_links
@enforce_types
@ -638,7 +638,7 @@ def remove(filter_str: Optional[str]=None,
remove_from_sql_main_index(snapshots=snapshots, out_dir=out_dir)
all_snapshots = load_main_index(out_dir=out_dir)
write_static_index([link.as_link() for link in all_snapshots], out_dir=out_dir)
write_static_index([link.as_link_with_details() for link in all_snapshots], out_dir=out_dir)
log_removal_finished(all_snapshots.count(), to_remove)
return all_snapshots
@ -695,7 +695,7 @@ def update(resume: Optional[float]=None,
# Step 4: Re-write links index with updated titles, icons, and resources
all_links = load_main_index(out_dir=out_dir)
write_static_index([link.as_link() for link in all_links], out_dir=out_dir)
write_static_index([link.as_link_with_details() for link in all_links], out_dir=out_dir)
return all_links
@enforce_types

View file

@ -63,3 +63,14 @@ def test_overwrite_flag_is_accepted(process, disable_extractors_dict):
)
assert 'unrecognized arguments: --overwrite' not in arg_process.stderr.decode("utf-8")
assert 'favicon' in arg_process.stdout.decode('utf-8'), 'archive methods probably didnt run, did overwrite work?'
def test_add_updates_history_json_index(tmp_path, process, disable_extractors_dict):
subprocess.run(
["archivebox", "add", "--depth=0", "http://127.0.0.1:8080/static/example.com.html"],
capture_output=True,
env=disable_extractors_dict,
)
with open(tmp_path / "index.json", "r") as f:
output_json = json.load(f)
assert output_json["links"][0]["history"] != {}