From f52747b5168d39591451f2074873d7613d6024f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89tienne=20Reuh=20Fildadut?= Date: Thu, 9 Oct 2025 17:14:41 +0200 Subject: [PATCH] feat: handle item updates --- README.md | 2 +- main.py | 25 ++++++++++++++++++------- 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 16fc1b4..c44714c 100644 --- a/README.md +++ b/README.md @@ -23,5 +23,5 @@ - [x] Edge cases: mark as read during sync (if marked as read on server or not) - [x] Proper filename escaping - [ ] Command to force regenerate all HTML files (incl. recompute datetimes & paths) -- [ ] Handle item updates +- [x] Handle item updates - [ ] Actually think about the issues created by the duplicate warning diff --git a/main.py b/main.py index ef2bedf..022cb95 100644 --- a/main.py +++ b/main.py @@ -146,7 +146,7 @@ def synchronize_with_server(config, client_session): config.update_lock.touch() print("Synchronizing with server...") - new_items = 0 + new_items, updated_items = 0, 0 grabbed_item_paths = [] folders = client_session.list_folders() @@ -154,7 +154,7 @@ def synchronize_with_server(config, client_session): print(f" Updating folder {folder_name}") def process(item_ids): - nonlocal new_items, grabbed_item_paths + nonlocal new_items, updated_items, grabbed_item_paths if len(item_ids) > 0: item_contents = client_session.get_stream_items_contents(item_ids=item_ids) for item_content in item_contents.items: @@ -177,13 +177,24 @@ def synchronize_with_server(config, client_session): json_path = config.json_root / f"{ sha256(item_json["id"].encode("utf-8")).hexdigest() }.json" grabbed_item_paths.append(json_path) + + write_files, updating = False, False if not json_path.exists(): + write_files = True + new_items += 1 + else: + old_item_json = json.load(json_path.open("r")) + if item_json["updated"] > old_item_json["updated"]: + write_files, updating = True, True + updated_items += 1 + + if write_files: # write JSON with json_path.open("w") as f: json.dump(item_json, f) # write HTML - generate_html_for_item(config, item_json) - new_items += 1 + generate_html_for_item(config, item_json, regenerate=updating) + continuation = None while continuation != '': @@ -203,13 +214,13 @@ def synchronize_with_server(config, client_session): item_path.unlink() removed_items += 1 - print(f"Synchronization successful ({new_items} new items, {removed_items} removed)") + print(f"Synchronization successful ({new_items} new items, {updated_items} updated, {removed_items} removed)") config.update_lock.unlink() -def generate_html_for_item(config, item_json): +def generate_html_for_item(config, item_json, regenerate=False): # Write HTML file for a JSON object html_path = config.html_root / item_json["html_path"] - if html_path.exists(): + if html_path.exists() and not regenerate: print(f"WARNING: a file already exist for {html_path}. Either the feed has duplicate entries, or something has gone terribly wrong.") else: with html_path.open("w") as f: