diff --git a/README.md b/README.md index 20153dc..4db6727 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ After changing the configuration, you can call `feather regenerate` to regenerat ### Docker -`podman run -d -v ./config.toml:/feather/config.toml -v ./data:/feather/data -v ./reader:/feather/reader --name feather feather daemon` +`podman run -d -v ./config.toml:/feather/config.toml -v feather-data:/feather/data -v ./reader:/feather/reader --name feather feather daemon` ### Raw @@ -52,3 +52,4 @@ After changing the configuration, you can call `feather regenerate` to regenerat - [x] Command to force regenerate all HTML files (incl. recompute datetimes & paths) - [x] Handle item updates - [ ] Actually think about the issues created by the duplicate warning +- [ ] Set generated files creation/modification date instead of putting date in filename diff --git a/config.default.toml b/config.default.toml index ff3c8a3..07d22c4 100644 --- a/config.default.toml +++ b/config.default.toml @@ -34,7 +34,7 @@ template = ''' -
+

{{ published_formatted }} {{ origin_title }} diff --git a/main.py b/main.py index 21012ec..82c6fc6 100755 --- a/main.py +++ b/main.py @@ -8,6 +8,7 @@ import tomllib import sys import argparse import asyncio +import signal from datetime import datetime from zoneinfo import ZoneInfo from pathlib import Path @@ -276,8 +277,12 @@ async def daemon_sync_down_loop(config, client_session): async def daemon(config, client_session): print(f"Started in daemon mode; changes will be downloaded from the server every {config.daemon_sync_down_every}s and uploaded every {config.daemon_sync_up_every}s") async with asyncio.TaskGroup() as tg: - tg.create_task(daemon_sync_up_loop(config, client_session)) - tg.create_task(daemon_sync_down_loop(config, client_session)) + tup = tg.create_task(daemon_sync_up_loop(config, client_session)) + tdown = tg.create_task(daemon_sync_down_loop(config, client_session)) + def cancel_tasks(): + tup.cancel() + tdown.cancel() + asyncio.get_running_loop().add_signal_handler(signal.SIGTERM, cancel_tasks) def regenerate_files(config): for json_path in config.json_root.glob("*.json"):