diff --git a/README.md b/README.md index 521cd8d..db09818 100644 --- a/README.md +++ b/README.md @@ -8,19 +8,19 @@ start with pictures/gif each time Directories, sorting/searching by date/title (a.k.a. using a file manager) -### Reading an item +### Reading an article -opening an item +opening an article -### Marking items as read +### Marking articles as read Delete -See read items in the trash can +See read articles in the trash can ### Updating with the server -Call `feather sync` to synchronize all local data with the server (read items, new items from the server, etc.). +Call `feather sync` to synchronize all local data with the server (read articles, new articles from the server, etc.). `feather daemon` diff --git a/src/feather/cli.py b/src/feather/cli.py index b15f9ca..ae9a75a 100644 --- a/src/feather/cli.py +++ b/src/feather/cli.py @@ -13,8 +13,9 @@ def main(): description="file-based RSS reader client" ) parser.add_argument( - "action", choices=("sync", "sync-up", "sync-down", "daemon", "regenerate", "clear-data"), - help="sync: perform a full synchronization with the server; sync-up: only synchronize local changes to the server (e.g. items read locally); sync-down: only synchronize remote change from the server (e.g. new items or items read from another device); daemon: start in daemon mode (will keep performing synchronizations periodically until process is stopped); regenerate: regenerate all HTML files from the local data; clear-data: remove all local data" + "action", + choices=("sync", "sync-up", "sync-down", "daemon", "regenerate", "clear-data"), + help="sync: perform a full synchronization with the server; sync-up: only synchronize local changes to the server (e.g. articles read locally); sync-down: only synchronize remote change from the server (e.g. new articles or articles read from another device); daemon: start in daemon mode (will keep performing synchronizations periodically until process is stopped); regenerate: regenerate all HTML files from the local data; clear-data: remove all local data", ) args = parser.parse_args() diff --git a/src/feather/client.py b/src/feather/client.py index 4a804e3..30867c1 100644 --- a/src/feather/client.py +++ b/src/feather/client.py @@ -14,7 +14,7 @@ class ClientSession(ABC): config: Config @abstractmethod - def mark_as_read(self, item_ids: list[ArticleId]): + def mark_as_read(self, articles_ids: list[ArticleId]): """Mark all the given articles as read.""" pass @@ -41,8 +41,8 @@ class GReaderSession(ClientSession): self.auth_token = self.greader.login(config.server_user, config.server_password) self.csrf_token = self.greader.get_token(self.auth_token) - def mark_as_read(self, item_ids: list[ArticleId]): - self.greader.edit_tags(self.auth_token, self.csrf_token, item_ids=item_ids, add_tags=[google_reader.STREAM_READ]) + def mark_as_read(self, articles_ids: list[ArticleId]): + self.greader.edit_tags(self.auth_token, self.csrf_token, item_ids=articles_ids, add_tags=[google_reader.STREAM_READ]) def list_categories(self) -> list[Category]: categories = [tag for tag in self.greader.list_tags(self.auth_token) if tag.type == "folder"] @@ -95,8 +95,8 @@ class TTRSession(ClientSession): self.ttrss.login() self.feeds = {} - def mark_as_read(self, item_ids: list[ArticleId]): - self.ttrss.mark_read(item_ids) + def mark_as_read(self, articles_ids: list[ArticleId]): + self.ttrss.mark_read(articles_ids) def list_categories(self) -> list[Category]: self.feeds = {} diff --git a/src/feather/config.default.toml b/src/feather/config.default.toml index 55aff63..45e785b 100644 --- a/src/feather/config.default.toml +++ b/src/feather/config.default.toml @@ -16,7 +16,7 @@ user = "username" # (Required) Password/API password used to connect to the server # Can be set through the environment variable SERVER_PASSWORD. password = "password" -# How many items to retrieve at most from the server in a single request. Lower values will make synchronization slower, higher values might make the server complain. +# How many articles to retrieve at most from the server in a single request. Lower values will make synchronization slower, higher values might make the server complain. # If you are missing articles after a sync, it might be because this value is too high. # If you are using the Google Reader API: servers should be okay with up to 1000. # If you are using the ttrss API: servers should be okay with up to 200. @@ -123,8 +123,8 @@ timezone = "Etc/UTC" format = "%Y-%m-%d %H:%M" [daemon] -# When running in daemon mode, feather will download changes from the server (new items, items read state) every seconds. +# When running in daemon mode, feather will download changes from the server (new articles, articles read state) every seconds. sync_down_every = 900 -# When running in daemon mode, feather will upload local changes to the server (read items) every seconds. +# When running in daemon mode, feather will upload local changes to the server (read articles) every seconds. sync_up_every = 60 diff --git a/src/feather/data.py b/src/feather/data.py index 9e4cb2f..abb3e99 100644 --- a/src/feather/data.py +++ b/src/feather/data.py @@ -112,12 +112,12 @@ class Article(ABC): def write_json(self): stored_fields = ("id", "unread", "title", "published", "published_formatted", "updated", "updated_formatted", "author", "summary", "content", "feed_title", "feed_url", "feed_icon_url", "feed_order", "article_url", "comments_url", "language", "image_url", "html_path") - item_json = { field: getattr(self, field) for field in stored_fields } - item_json["category"] = self.category.asdict() + article_json = { field: getattr(self, field) for field in stored_fields } + article_json["category"] = self.category.asdict() if self.json_path.exists(): raise Exception with self.json_path.open("w") as f: - json.dump(item_json, f) + json.dump(article_json, f) def delete_json(self): self.json_path.unlink() @@ -155,7 +155,7 @@ class FileArticle(Article): self.config = config self.json_path = json_path - item_json = json.load(json_path.open("r")) - for field in item_json: - setattr(self, field, item_json[field]) - self.category = Category.fromdict(item_json["category"]) + article_json = json.load(json_path.open("r")) + for field in article_json: + setattr(self, field, article_json[field]) + self.category = Category.fromdict(article_json["category"]) diff --git a/src/feather/feather.py b/src/feather/feather.py index 37f8edf..ede8949 100755 --- a/src/feather/feather.py +++ b/src/feather/feather.py @@ -46,12 +46,12 @@ class FeatherApp: removed_directories.add(dirpath) def mark_deleted_as_read(self): - """Mark items that are in the JSON directory but with missing HTML file as read on the server""" + """Mark articles that are in the JSON directory but with missing HTML file as read on the server""" config = self.config client_session = self.get_client_session() if config.update_lock.exists(): - print("The previous synchronization was aborted, not marking any item as read in order to avoid collateral damage") + print("The previous synchronization was aborted, not marking any article as read in order to avoid collateral damage") return marked_as_read = 0 @@ -67,18 +67,18 @@ class FeatherApp: for i in range(0, len(to_mark_as_read), config.articles_per_query): client_session.mark_as_read(to_mark_as_read[i:i+config.articles_per_query]) - print(f"Marked {marked_as_read} items as read") + print(f"Marked {marked_as_read} articles as read") def synchronize_with_server(self): - """Synchronize items from the server, generating and deleting JSON and HTML files accordingly""" + """Synchronize articles from the server, generating and deleting JSON and HTML files accordingly""" config = self.config client_session = self.get_client_session() config.update_lock.touch() print("Synchronizing with server...") - new_items, updated_items = 0, 0 - grabbed_item_paths = set() + new_articles, updated_articles = 0, 0 + grabbed_article_paths = set() categories = client_session.list_categories() for category in categories: @@ -92,27 +92,27 @@ class FeatherApp: else: remaining = False - for item in articles: - json_path = item.json_path - grabbed_item_paths.add(json_path) + for article in articles: + json_path = article.json_path + grabbed_article_paths.add(json_path) if not json_path.exists(): - item.write() - new_items += 1 + article.write() + new_articles += 1 else: - old_item = FileArticle(config, json_path) - if item.updated > old_item.updated: - old_item.delete() - item.write() - updated_items += 1 + old_article = FileArticle(config, json_path) + if article.updated > old_article.updated: + old_article.delete() + article.write() + updated_articles += 1 - # Remove items that we didn't get from the server but are in the JSON directory - removed_items = 0 - for item_path in config.json_root.glob("*.json"): - if not item_path in grabbed_item_paths: - FileArticle(config, item_path).delete() - removed_items += 1 + # Remove articles that we didn't get from the server but are in the JSON directory + removed_articles = 0 + for article_path in config.json_root.glob("*.json"): + if article_path not in grabbed_article_paths: + FileArticle(config, article_path).delete() + removed_articles += 1 - print(f"Synchronization successful ({new_items} new items, {updated_items} updated, {removed_items} removed)") + print(f"Synchronization successful ({new_articles} new articles, {updated_articles} updated, {removed_articles} removed)") config.update_lock.unlink() def synchronize(self): @@ -122,12 +122,12 @@ class FeatherApp: self.remove_empty_html_directories() def synchronize_local_changes(self): - """Upload local changes (read items) to the server""" + """Upload local changes (read articles) to the server""" self.mark_deleted_as_read() self.remove_empty_html_directories() def synchronize_remote_changes(self): - """Download remote changes (new items, items read from another device) from the server""" + """Download remote changes (new articles, articles read from another device) from the server""" self.synchronize_with_server() self.remove_empty_html_directories()