1
0
Fork 0
mirror of https://codeberg.org/Reuh/feather.git synced 2025-10-28 02:29:32 +00:00

feat: add daemon mode

This commit is contained in:
Étienne Fildadut 2025-10-09 18:32:36 +02:00
parent 4cf6da5f0a
commit 4d5b9a78d9
3 changed files with 37 additions and 4 deletions

View file

@ -8,7 +8,9 @@
### Updating with the server ### Updating with the server
Call `feather update` to synchronize all local data with the server (read items, new items from the server, etc.). Call `feather sync` to synchronize all local data with the server (read items, new items from the server, etc.).
`feather daemon`
### Configuration ### Configuration
@ -37,7 +39,8 @@ After changing the configuration, you can call `feather regenerate` to regenerat
## TODO ## TODO
- [ ] Write documentation - [ ] Write documentation
- [ ] Perform mark-as-read operation more often than sync (inotify, daemon, etc.) - [x] Perform mark-as-read operation more often than sync (inotify, daemon, etc.)
- [ ] inotify might still be nice
- [x] Make HTML filename configurable - [x] Make HTML filename configurable
- [x] Make HTML template configurable - [x] Make HTML template configurable
- [ ] Nested categories - [ ] Nested categories

View file

@ -64,3 +64,10 @@ timezone = "Etc/UTC"
# This will be used in filenames so it's a good idea to use something sortable... # This will be used in filenames so it's a good idea to use something sortable...
# Can be set through the environment variable DATETIME_FORMAT. # Can be set through the environment variable DATETIME_FORMAT.
format = "%Y-%m-%d %H:%M" format = "%Y-%m-%d %H:%M"
[daemon]
# When running in daemon mode, feather will download changes from the server (new items, items read state) every <sync_down_every> seconds.
sync_down_every = 900
# When running in daemon mode, feather will upload local changes to the server (read items) every <sync_up_every> seconds.
sync_up_every = 60

27
main.py
View file

@ -5,6 +5,7 @@ import google_reader
import tomllib import tomllib
import sys import sys
import argparse import argparse
import asyncio
from datetime import datetime from datetime import datetime
from zoneinfo import ZoneInfo from zoneinfo import ZoneInfo
from pathlib import Path from pathlib import Path
@ -54,6 +55,8 @@ class Config:
self.item_filename_template: Template = Template(str(get_config("html", "filename_template")), autoescape=False) self.item_filename_template: Template = Template(str(get_config("html", "filename_template")), autoescape=False)
self.max_filename_length: int = int(get_config("html", "max_filename_length")) self.max_filename_length: int = int(get_config("html", "max_filename_length"))
self.filename_translation = str.maketrans(get_config("html", "filename_replacement")) self.filename_translation = str.maketrans(get_config("html", "filename_replacement"))
self.daemon_sync_up_every: int = int(get_config("daemon", "sync_up_every"))
self.daemon_sync_down_every: int = int(get_config("daemon", "sync_down_every"))
# Computed config fields # Computed config fields
self.update_lock = self.json_root / "update.lock" self.update_lock = self.json_root / "update.lock"
@ -260,6 +263,20 @@ def synchronize_remote_changes(config, client_session):
synchronize_with_server(config, client_session) synchronize_with_server(config, client_session)
remove_empty_html_directories(config) remove_empty_html_directories(config)
async def daemon_sync_up_loop(config, client_session):
while True:
synchronize_local_changes(config, client_session)
await asyncio.sleep(config.daemon_sync_up_every)
async def daemon_sync_down_loop(config, client_session):
while True:
synchronize_remote_changes(config, client_session)
await asyncio.sleep(config.daemon_sync_down_every)
async def daemon(config, client_session):
print(f"Started in daemon mode; changes will be downloaded from the server every {config.daemon_sync_down_every}s and uploaded every {config.daemon_sync_up_every}s")
async with asyncio.TaskGroup() as tg:
tg.create_task(daemon_sync_up_loop(config, client_session))
tg.create_task(daemon_sync_down_loop(config, client_session))
def regenerate_files(config): def regenerate_files(config):
for json_path in config.json_root.glob("*.json"): for json_path in config.json_root.glob("*.json"):
item_json = json.load(json_path.open("r")) item_json = json.load(json_path.open("r"))
@ -279,8 +296,8 @@ def main():
description="file-based RSS reader" description="file-based RSS reader"
) )
parser.add_argument( parser.add_argument(
"action", choices=("sync", "sync-up", "sync-down", "regenerate"), "action", choices=("sync", "sync-up", "sync-down", "daemon", "regenerate"),
help="sync: perform a full synchronization with the server; sync-up: only synchronize local changes to the server (e.g. items read locally); sync-down: only synchronize remote change from the server (e.g. new items or items read from another device); regenerate: regenerate all HTML files from the local data" help="sync: perform a full synchronization with the server; sync-up: only synchronize local changes to the server (e.g. items read locally); sync-down: only synchronize remote change from the server (e.g. new items or items read from another device); daemon: start in daemon mode (will keep performing synchronizations periodically until process is stopped); regenerate: regenerate all HTML files from the local data"
) )
args = parser.parse_args() args = parser.parse_args()
@ -294,6 +311,12 @@ def main():
elif args.action == "sync-down": elif args.action == "sync-down":
client_session = ClientSession(config) client_session = ClientSession(config)
synchronize_remote_changes(config, client_session) synchronize_remote_changes(config, client_session)
elif args.action == "daemon":
client_session = ClientSession(config)
try:
asyncio.run(daemon(config, client_session))
except KeyboardInterrupt:
pass
elif args.action == "regenerate": elif args.action == "regenerate":
regenerate_files(config) regenerate_files(config)