diff --git a/.github/workflows/dependabot-approve-and-auto-merge.yml b/.github/workflows/dependabot-approve-and-auto-merge.yml index f989c8d0..139312a2 100644 --- a/.github/workflows/dependabot-approve-and-auto-merge.yml +++ b/.github/workflows/dependabot-approve-and-auto-merge.yml @@ -19,7 +19,7 @@ jobs: # will not occur. - name: Dependabot metadata id: dependabot-metadata - uses: dependabot/fetch-metadata@v2.1.0 + uses: dependabot/fetch-metadata@v2.2.0 with: github-token: "${{ secrets.GITHUB_TOKEN }}" # Here the PR gets approved. diff --git a/.github/workflows/develop.yml b/.github/workflows/develop.yml index f4a045f1..aeaf12e2 100644 --- a/.github/workflows/develop.yml +++ b/.github/workflows/develop.yml @@ -54,7 +54,7 @@ jobs: - name: Build and push id: docker_build - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: ./ file: ./Dockerfile diff --git a/.github/workflows/latest.yml b/.github/workflows/latest.yml index d7f599bd..e6c057d1 100644 --- a/.github/workflows/latest.yml +++ b/.github/workflows/latest.yml @@ -50,7 +50,7 @@ jobs: - name: Build and push id: docker_build - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: ./ file: ./Dockerfile diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml index 179a1358..8f840ac4 100644 --- a/.github/workflows/version.yml +++ b/.github/workflows/version.yml @@ -50,7 +50,7 @@ jobs: - name: Build and push id: docker_build - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: context: ./ file: ./Dockerfile diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7d08bc94..9b2f5ea1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: - id: pretty-format-json args: [--autofix, --indent, '4', --no-sort-keys] - repo: https://github.com/hhatto/autopep8 - rev: v2.2.0 + rev: v2.3.1 hooks: - id: autopep8 - repo: https://github.com/adrienverge/yamllint.git @@ -38,7 +38,7 @@ repos: name: isort (python) args: [--force-single-line-imports, --profile, black] - repo: https://github.com/asottile/pyupgrade - rev: v3.15.2 + rev: v3.16.0 hooks: - id: pyupgrade args: [--py3-plus] @@ -49,7 +49,7 @@ repos: language_version: python3 args: [--line-length, '130'] - repo: https://github.com/PyCQA/flake8 - rev: 7.0.0 + rev: 7.1.0 hooks: - id: flake8 args: [--config=.flake8] diff --git a/CHANGELOG b/CHANGELOG index cbf20739..1fa7ad71 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,13 +1,16 @@ -# Requirements Updated -- qbittorrent-api==2024.5.63 -- requests==2.32.3 -- schedule==1.2.2 - # New Updates -- Add config option `cat_update_all` to categorize only uncategorized torrents (Closes [#575](https://github.com/StuffAnThings/qbit_manage/issues/575)) +- BHD config options are now deprecated due to qbm no longer needing to use the api to detect unregistered torrents (Closes #595) +- Updates mover script to add logging and default to completed torrents only with new optional argument (`--status-filter`) # Bug Fixes -- Fixes [#560](https://github.com/StuffAnThings/qbit_manage/issues/560) - +- Adds new ignore message for unregistered torrents (Closes #592) +- Allow `max_seeding_time` to be unlimited (-1) if min_seeding_time is set (Closes #596) +- Fixes checking tracker status for udp/wss (Closes #586) +- Fixes Logging header not getting logged in every run (Closes #591) +- Fixes min_seeding_time tag removal when max_seeding_time is -1 (#Closes 598) +- Fixes Remove orphaned without moving to orphaned_dir (Closes #590) +- Fixes bug in printing schedule mode when run is set to True +- Modifies noHL threshold to 0.1 to address false positives -**Full Changelog**: https://github.com/StuffAnThings/qbit_manage/compare/v4.1.5...v4.1.6 +Special thanks to @bakerboy448, @ineednewpajamas, @lflare, @convexshiba for their contributions! +**Full Changelog**: https://github.com/StuffAnThings/qbit_manage/compare/v4.1.6...v4.1.7 diff --git a/VERSION b/VERSION index 561ad334..9edf2a44 100755 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -4.1.6 +4.1.7 diff --git a/config/config.yml.sample b/config/config.yml.sample index e99eeabb..70d6ac5c 100755 --- a/config/config.yml.sample +++ b/config/config.yml.sample @@ -309,7 +309,3 @@ webhooks: tag_nohardlinks: notifiarr share_limits: notifiarr cleanup_dirs: notifiarr - -bhd: - # BHD Integration used for checking unregistered torrents - apikey: diff --git a/docs/Config-Setup.md b/docs/Config-Setup.md index 8ee0c087..708f3947 100644 --- a/docs/Config-Setup.md +++ b/docs/Config-Setup.md @@ -530,12 +530,3 @@ Payload will be sent when files are deleted/cleaned up from the various folders "size_in_bytes": int, // Total number of bytes deleted from the location } ``` - -## **bhd:** - ---- -BHD integration is used if you are on the private tracker BHD. (Used to identify any unregistered torrents from this tracker) - -| Variable | Definition | Default Values | Required | -| :------- | :---------- | :------------- | :----------------- | -| `apikey` | BHD API Key | `None` (blank) |
| diff --git a/docs/Docker-Installation.md b/docs/Docker-Installation.md index 44027216..1101301f 100644 --- a/docs/Docker-Installation.md +++ b/docs/Docker-Installation.md @@ -19,31 +19,7 @@ The official build on github is available [here](https://ghcr.io/StuffAnThings/q 2. The config file needs to drill down (if required) further to the desired root dir. * `remote_dir`: is not required and can be commented out with `#` -Below is a list of the docker environment variables -| Docker Environment Variable | Description | Default Value | -| :-------------------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------ | -| QBT_RUN | Run without the scheduler. Script will exit after completion. | False | -| QBT_SCHEDULE | Schedule to run every x minutes. (Default set to 1440) | 1440 | -| QBT_STARTUP_DELAY | Initial run will start after x seconds (Default set to 0) | 0 | -| QBT_CONFIG | This is used if you want to use a different name for your config.yml. `Example: tv.yml` This variable can also be used to allow the use of multiple config files for a single instance of qbit-manage. For example, listing a wildcard value `Example: QBIT_CONFIG=config_*.yml` and naming your config files accordingly `Example: config_movies.yml` and `config_tv.yml` will instruct qbit-manage to utilize each config file that matches the specified naming convention during every run. | config.yml | -| QBT_LOGFILE | This is used if you want to use a different name for your log file. `Example: tv.log` | activity.log | -| QBT_CROSS_SEED | Use this after running [cross-seed script](https://github.com/mmgoodnow/cross-seed) to add torrents from the cross-seed output folder to qBittorrent | False | -| QBT_RECHECK | Recheck paused torrents sorted by lowest size. Resume if Completed. | False | -| QBT_CAT_UPDATE | Use this if you would like to update your categories or move from one category to another.. | False | -| QBT_TAG_UPDATE | Use this if you would like to update your tags. (Only adds tags to untagged torrents) | False | -| QBT_REM_UNREGISTERED | Use this if you would like to remove unregistered torrents. (It will the delete data & torrent if it is not being cross-seeded, otherwise it will just remove the torrent without deleting data) | False | -| QBT_TAG_TRACKER_ERROR | Use this to tag any torrents with tracker errors, such as unregistered torrents or unreachable trackers. | False | -| QBT_REM_ORPHANED | Use this if you would like to remove orphaned files from your `root_dir` directory that are not referenced by any torrents. It will scan your `root_dir` directory and compare it with what is in qBittorrent. Any data not referenced in qBittorrent will be moved into `/data/torrents/orphaned_data` folder for you to review/delete. | False | -| QBT_TAG_NOHARDLINKS | Use this to tag any torrents that do not have any hard links associated with any of the files. This is useful for those that use Sonarr/Radarr that hard links your media files with the torrents for seeding. When files get upgraded they no longer become linked with your media therefore will be tagged with a new tag noHL. You can then safely delete/remove these torrents to free up any extra space that is not being used by your media folder. | False | -| QBT_SHARE_LIMITS | Control how torrent share limits are set depending on the priority of your grouping. This can apply a max ratio, seed time limits to your torrents or limit your torrent upload speed as well. Each torrent will be matched with the share limit group with the highest priority that meets the group filter criteria. Each torrent can only be matched with one share limit group. | False | -| QBT_SKIP_CLEANUP | Use this to skip emptying the Recycle Bin folder (`/root_dir/.RecycleBin`) and Orphaned directory. (`/root_dir/orphaned_data`) | False | -| QBT_SKIP_QB_VERSION_CHECK | Use this to bypass qBittorrent/libtorrent version compatibility check. You run the risk of undesirable behavior and will receive no support. | False | -| QBT_DRY_RUN | If you would like to see what is gonna happen but not actually move/delete or tag/categorize anything. | False | -| QBT_LOG_LEVEL | Change the output log level. | INFO | -| QBT_DIVIDER | Character that divides the sections (Default: '=') | = | -| QBT_WIDTH | Screen Width (Default: 100) | 100 | -| QBT_DEBUG | Enable Debug logs | False | -| QBT_TRACE | Enable Trace logs | False | +Please see [Commands](https://github.com/StuffAnThings/qbit_manage/wiki/Commands) for a list of arguments and docker environment variables. Here is an example of a docker compose diff --git a/docs/Home.md b/docs/Home.md index b8161a47..a0329fe2 100644 --- a/docs/Home.md +++ b/docs/Home.md @@ -41,5 +41,4 @@ This wiki should tell you everything you need to know about the script to get it * [apprise](Config-Setup#apprise) * [notifiarr](Config-Setup#notifiarr) * [webhooks](Config-Setup#webhooks) - * [bhd](Config-Setup#bhd) * [Commands](Commands) diff --git a/docs/_Sidebar.md b/docs/_Sidebar.md index a3d1d5cd..0af94854 100644 --- a/docs/_Sidebar.md +++ b/docs/_Sidebar.md @@ -22,5 +22,4 @@ - [apprise](Config-Setup#apprise) - [notifiarr](Config-Setup#notifiarr) - [webhooks](Config-Setup#webhooks) - - [bhd](Config-Setup#bhd) - [Commands](Commands) diff --git a/docs/v4-Migration-Guide.md b/docs/v4-Migration-Guide.md index 8eb4c557..a7351f14 100644 --- a/docs/v4-Migration-Guide.md +++ b/docs/v4-Migration-Guide.md @@ -38,8 +38,8 @@ nohardlinks: ```yml cat: -- movies: “/data/torrents/movies” -- tv: “/data/torrents/tv” + movies: “/data/torrents/movies” + tv: “/data/torrents/tv” tracker: Tracker-a: tag: a diff --git a/modules/bhd.py b/modules/bhd.py index 1ea46882..86988d02 100755 --- a/modules/bhd.py +++ b/modules/bhd.py @@ -35,6 +35,9 @@ def search(self, json, path="torrents/"): if response.status_code >= 400: logger.debug(f"Response: {response_json}") raise Failed(f"({response.status_code} [{response.reason}]) {response_json}") + if "rate limited" in response_json.get("status_message", ""): + logger.error(f"BHD Error: {response_json.get('status_message')}") + return {} if not response_json.get("success"): raise Failed(f"BHD Error: {response_json.get('status_message', 'Issue receiving response from BHD API.')}") return response_json diff --git a/modules/config.py b/modules/config.py index ad85a969..f5453655 100755 --- a/modules/config.py +++ b/modules/config.py @@ -77,6 +77,18 @@ def __init__(self, default_dir, args): default=False, save=True, ) + logger.separator("DOCKER ENV COMMANDS", loglevel="DEBUG") + logger.debug(f" --run (QBT_RUN): {args['run']}") + logger.debug(f" --schedule (QBT_SCHEDULE): {args['sch']}") + logger.debug(f" --startup-delay (QBT_STARTUP_DELAY): {args['startupDelay']}") + logger.debug(f" --config-file (QBT_CONFIG): {args['config_files']}") + logger.debug(f" --log-file (QBT_LOGFILE): {args['log_file']}") + logger.debug(f" --log-level (QBT_LOG_LEVEL): {args['log_level']}") + logger.debug(f" --divider (QBT_DIVIDER): {args['divider']}") + logger.debug(f" --width (QBT_WIDTH): {args['screen_width']}") + logger.debug(f" --debug (QBT_DEBUG): {args['debug']}") + logger.debug(f" --trace (QBT_TRACE): {args['trace']}") + logger.separator("CONFIG OVERRIDE RUN COMMDANDS", space=False, border=False, loglevel="DEBUG") logger.debug(f" --cross-seed (QBT_CROSS_SEED): {self.commands['cross_seed']}") logger.debug(f" --recheck (QBT_RECHECK): {self.commands['recheck']}") logger.debug(f" --cat-update (QBT_CAT_UPDATE): {self.commands['cat_update']}") @@ -89,8 +101,35 @@ def __init__(self, default_dir, args): logger.debug(f" --skip-cleanup (QBT_SKIP_CLEANUP): {self.commands['skip_cleanup']}") logger.debug(f" --skip-qb-version-check (QBT_SKIP_QB_VERSION_CHECK): {self.commands['skip_qb_version_check']}") logger.debug(f" --dry-run (QBT_DRY_RUN): {self.commands['dry_run']}") + logger.separator(loglevel="DEBUG") + else: self.commands = args + logger.separator("DOCKER ENV COMMANDS", loglevel="DEBUG") + logger.debug(f" --run (QBT_RUN): {args['run']}") + logger.debug(f" --schedule (QBT_SCHEDULE): {args['sch']}") + logger.debug(f" --startup-delay (QBT_STARTUP_DELAY): {args['startupDelay']}") + logger.debug(f" --config-file (QBT_CONFIG): {args['config_files']}") + logger.debug(f" --log-file (QBT_LOGFILE): {args['log_file']}") + logger.debug(f" --log-level (QBT_LOG_LEVEL): {args['log_level']}") + logger.debug(f" --divider (QBT_DIVIDER): {args['divider']}") + logger.debug(f" --width (QBT_WIDTH): {args['screen_width']}") + logger.debug(f" --debug (QBT_DEBUG): {args['debug']}") + logger.debug(f" --trace (QBT_TRACE): {args['trace']}") + logger.separator("DOCKER ENV RUN COMMANDS", space=False, border=False, loglevel="DEBUG") + logger.debug(f" --cross-seed (QBT_CROSS_SEED): {args['cross_seed']}") + logger.debug(f" --recheck (QBT_RECHECK): {args['recheck']}") + logger.debug(f" --cat-update (QBT_CAT_UPDATE): {args['cat_update']}") + logger.debug(f" --tag-update (QBT_TAG_UPDATE): {args['tag_update']}") + logger.debug(f" --rem-unregistered (QBT_REM_UNREGISTERED): {args['rem_unregistered']}") + logger.debug(f" --tag-tracker-error (QBT_TAG_TRACKER_ERROR): {args['tag_tracker_error']}") + logger.debug(f" --rem-orphaned (QBT_REM_ORPHANED): {args['rem_orphaned']}") + logger.debug(f" --tag-nohardlinks (QBT_TAG_NOHARDLINKS): {args['tag_nohardlinks']}") + logger.debug(f" --share-limits (QBT_SHARE_LIMITS): {args['share_limits']}") + logger.debug(f" --skip-cleanup (QBT_SKIP_CLEANUP): {args['skip_cleanup']}") + logger.debug(f" --skip-qb-version-check (QBT_SKIP_QB_VERSION_CHECK): {args['skip_qb_version_check']}") + logger.debug(f" --dry-run (QBT_DRY_RUN): {args['dry_run']}") + logger.separator(loglevel="DEBUG") if "qbt" in self.data: self.data["qbt"] = self.data.pop("qbt") @@ -293,7 +332,12 @@ def hooks(attr): self.beyond_hd = None if "bhd" in self.data: - if self.data["bhd"] is not None and self.data["bhd"].get("apikey") is not None: + logger.warning("DEPCRATED: bhd attribute is no longer valid. Please remove the 'bhd' attribute from your config.") + if ( + self.data["bhd"] is not None + and self.data["bhd"].get("apikey") is not None + and self.data["bhd"].get("legacy", False) + ): logger.info("Connecting to BHD API...") try: self.beyond_hd = BeyondHD( @@ -552,12 +596,14 @@ def _sort_share_limits(share_limits): self.share_limits[group]["torrents"] = [] if ( self.share_limits[group]["min_seeding_time"] > 0 + and self.share_limits[group]["max_seeding_time"] != -1 and self.share_limits[group]["min_seeding_time"] > self.share_limits[group]["max_seeding_time"] ): err = ( f"Config Error: min_seeding_time ({self.share_limits[group]['min_seeding_time']}) is greater than " f"max_seeding_time ({self.share_limits[group]['max_seeding_time']}) for the grouping '{group}'.\n" - f"min_seeding_time must be less than or equal to max_seeding_time." + f"min_seeding_time must be less than or equal to max_seeding_time or " + "max_seeding_time must be unlimited (-1)." ) self.notify(err, "Config") raise Failed(err) diff --git a/modules/core/remove_orphaned.py b/modules/core/remove_orphaned.py index 6ca64856..e4e7e5b5 100644 --- a/modules/core/remove_orphaned.py +++ b/modules/core/remove_orphaned.py @@ -66,11 +66,17 @@ def rem_orphaned(self): num_orphaned = len(orphaned_files) logger.print_line(f"{num_orphaned} Orphaned files found", self.config.loglevel) body += logger.print_line("\n".join(orphaned_files), self.config.loglevel) - body += logger.print_line( - f"{'Not moving' if self.config.dry_run else 'Moving'} {num_orphaned} Orphaned files " - f"to {self.orphaned_dir.replace(self.remote_dir, self.root_dir)}", - self.config.loglevel, - ) + if self.config.orphaned["empty_after_x_days"] == 0: + body += logger.print_line( + f"{'Not Deleting' if self.config.dry_run else 'Deleting'} {num_orphaned} Orphaned files", + self.config.loglevel, + ) + else: + body += logger.print_line( + f"{'Not moving' if self.config.dry_run else 'Moving'} {num_orphaned} Orphaned files " + f"to {self.orphaned_dir.replace(self.remote_dir, self.root_dir)}", + self.config.loglevel, + ) attr = { "function": "rem_orphaned", @@ -83,7 +89,7 @@ def rem_orphaned(self): self.config.send_notifications(attr) # Delete empty directories after moving orphan files if not self.config.dry_run: - orphaned_parent_path = set(self.executor.map(self.move_orphan, orphaned_files)) + orphaned_parent_path = set(self.executor.map(self.handle_orphaned_files, orphaned_files)) logger.print_line("Removing newly empty directories", self.config.loglevel) self.executor.map( lambda directory: util.remove_empty_directories(directory, self.qbt.get_category_save_paths()), @@ -93,11 +99,21 @@ def rem_orphaned(self): else: logger.print_line("No Orphaned Files found.", self.config.loglevel) - def move_orphan(self, file): + def handle_orphaned_files(self, file): src = file.replace(self.root_dir, self.remote_dir) dest = os.path.join(self.orphaned_dir, file.replace(self.root_dir, "")) - util.move_files(src, dest, True) - return os.path.dirname(file).replace(self.root_dir, self.remote_dir) + orphaned_parent_path = os.path.dirname(file).replace(self.root_dir, self.remote_dir) + + """Delete orphaned files directly if empty_after_x_days is set to 0""" + if self.config.orphaned["empty_after_x_days"] == 0: + try: + util.delete_files(src) + except Exception: + logger.error(f"Error deleting orphaned file: {file}") + util.move_files(src, dest, True) + else: # Move orphaned files to orphaned directory + util.move_files(src, dest, True) + return orphaned_parent_path def get_full_path_of_torrent_files(self, torrent): torrent_files = map(lambda dict: dict.name, torrent.files) diff --git a/modules/core/remove_unregistered.py b/modules/core/remove_unregistered.py index 49cce037..095b5871 100644 --- a/modules/core/remove_unregistered.py +++ b/modules/core/remove_unregistered.py @@ -73,18 +73,23 @@ def remove_previous_errors(self): self.config.webhooks_factory.notify(torrents_updated, notify_attr, group_by="tag") - def check_for_unregistered_torrents_using_bhd_api(self, tracker, msg_up, torrent_hash): + def check_for_unregistered_torrents_in_bhd(self, tracker, msg_up, torrent_hash): """ - Checks if a torrent is unregistered using the BHD API if the tracker is BHD. + Checks if a torrent is unregistered in BHD using their deletion reasons. + Legacy method uses the BHD API to check if a torrent is unregistered. """ - if ( - "tracker.beyond-hd.me" in tracker["url"] - and self.config.beyond_hd is not None - and not list_in_text(msg_up, TorrentMessages.IGNORE_MSGS) - ): - json = {"info_hash": torrent_hash} - response = self.config.beyond_hd.search(json) - if response.get("total_results") == 0: + # Some status's from BHD have a option message such as + # "Trumped: Internal: https://beyond-hd.xxxxx", so removing the colon is needed to match the status + status_filtered = msg_up.split(":")[0] + if "tracker.beyond-hd.me" in tracker["url"]: + # Checks if the legacy method is used and if the tracker is BHD then use API method + if self.config.beyond_hd is not None and not list_in_text(msg_up, TorrentMessages.IGNORE_MSGS): + json = {"info_hash": torrent_hash} + response = self.config.beyond_hd.search(json) + if response.get("total_results") == 0: + return True + # Checks if the tracker is BHD and the message is in the deletion reasons for BHD + elif list_in_text(status_filtered, TorrentMessages.UNREGISTERED_MSGS_BHD): return True return False @@ -102,26 +107,31 @@ def process_torrent_issues(self): self.t_status = self.qbt.torrentinfo[self.t_name]["status"] check_tags = util.get_list(torrent.tags) try: + tracker_working = False for trk in torrent.trackers: - if trk.url.startswith("http"): - tracker = self.qbt.get_tags(self.qbt.get_tracker_urls([trk])) - msg_up = trk.msg.upper() - msg = trk.msg - if TrackerStatus(trk.status) == TrackerStatus.NOT_WORKING: - # Check for unregistered torrents - if self.cfg_rem_unregistered: - if list_in_text(msg_up, TorrentMessages.UNREGISTERED_MSGS) and not list_in_text( - msg_up, TorrentMessages.IGNORE_MSGS - ): - self.del_unregistered(msg, tracker, torrent) - break - else: - if self.check_for_unregistered_torrents_using_bhd_api(tracker, msg_up, torrent.hash): - self.del_unregistered(msg, tracker, torrent) - break - # Tag any error torrents - if self.cfg_tag_error and self.tag_error not in check_tags: - self.tag_tracker_error(msg, tracker, torrent) + if ( + trk.url.split(":")[0] in ["http", "https", "udp", "ws", "wss"] + and TrackerStatus(trk.status) == TrackerStatus.WORKING + ): + tracker_working = True + if tracker_working: + continue + tracker = self.qbt.get_tags(self.qbt.get_tracker_urls([trk])) + msg_up = trk.msg.upper() + msg = trk.msg + if TrackerStatus(trk.status) == TrackerStatus.NOT_WORKING: + # Check for unregistered torrents + if self.cfg_rem_unregistered: + if list_in_text(msg_up, TorrentMessages.UNREGISTERED_MSGS) and not list_in_text( + msg_up, TorrentMessages.IGNORE_MSGS + ): + self.del_unregistered(msg, tracker, torrent) + else: + if self.check_for_unregistered_torrents_in_bhd(tracker, msg_up, torrent.hash): + self.del_unregistered(msg, tracker, torrent) + # Tag any error torrents + if self.cfg_tag_error and self.tag_error not in check_tags: + self.tag_tracker_error(msg, tracker, torrent) except NotFound404Error: continue except Exception as ex: diff --git a/modules/core/share_limits.py b/modules/core/share_limits.py index b7e7c062..23920364 100644 --- a/modules/core/share_limits.py +++ b/modules/core/share_limits.py @@ -545,7 +545,7 @@ def _has_reached_last_active_time_limit(): def _has_reached_seeding_time_limit(): nonlocal body seeding_time_limit = None - if max_seeding_time is None: + if max_seeding_time is None or max_seeding_time == -1: return False if max_seeding_time >= 0: seeding_time_limit = max_seeding_time @@ -570,7 +570,7 @@ def _has_reached_seeding_time_limit(): if last_active is not None: if not _has_reached_last_active_time_limit(): return body - if max_ratio is not None: + if max_ratio is not None and max_ratio != -1: if max_ratio >= 0: if torrent.ratio >= max_ratio and _has_reached_min_seeding_time_limit(): body += logger.insert_space(f"Ratio vs Max Ratio: {torrent.ratio:.2f} >= {max_ratio:.2f}", 8) diff --git a/modules/qbittorrent.py b/modules/qbittorrent.py index 4aa09f5f..d99c7fce 100755 --- a/modules/qbittorrent.py +++ b/modules/qbittorrent.py @@ -158,7 +158,7 @@ def get_torrent_info(self): status_list = [] is_complete = torrent_is_complete for trk in torrent_trackers: - if trk.url.startswith("http"): + if trk.url.split(":")[0] in ["http", "https", "udp", "ws", "wss"]: status = trk.status msg = trk.msg.upper() if TrackerStatus(trk.status) == TrackerStatus.WORKING: diff --git a/modules/util.py b/modules/util.py index 657b2e30..689c8038 100755 --- a/modules/util.py +++ b/modules/util.py @@ -78,6 +78,20 @@ class TorrentMessages: "TORRENT HAS BEEN DELETED.", # blutopia ] + UNREGISTERED_MSGS_BHD = [ + "DEAD", + "DUPE", + "COMPLETE SEASON UPLOADED", + "PROBLEM WITH DESCRIPTION", + "PROBLEM WITH FILE", + "PROBLEM WITH PACK", + "SPECIFICALLY BANNED", + "TRUMPED", + "OTHER", + "TORRENT HAS BEEN DELETED", + "NUKED", + ] + IGNORE_MSGS = [ "YOU HAVE REACHED THE CLIENT LIMIT FOR THIS TORRENT", "MISSING PASSKEY", @@ -90,6 +104,7 @@ class TorrentMessages: "GATEWAY TIMEOUT", # BHD Gateway Timeout "ANNOUNCE IS CURRENTLY UNAVAILABLE", # BHD Announce unavailable "TORRENT HAS BEEN POSTPONED", # BHD Status + "520 (UNKNOWN HTTP ERROR)", ] EXCEPTIONS_MSGS = [ @@ -449,6 +464,18 @@ def move_files(src, dest, mod=False): return to_delete +def delete_files(file_path): + """Try to delete the file directly.""" + try: + os.remove(file_path) + except FileNotFoundError as e: + logger.warning(f"File not found: {e.filename} - {e.strerror}.") + except PermissionError as e: + logger.warning(f"Permission denied: {e.filename} - {e.strerror}.") + except OSError as e: + logger.error(f"Error deleting file: {e.filename} - {e.strerror}.") + + def copy_files(src, dest): """Copy files from source to destination""" dest_path = os.path.dirname(dest) @@ -570,7 +597,7 @@ def has_hardlinks(self, file, ignore_root_dir): sorted_files = sorted(Path(file).rglob("*"), key=lambda x: os.stat(x).st_size, reverse=True) logger.trace(f"Folder: {file}") logger.trace(f"Files Sorted by size: {sorted_files}") - threshold = 0.5 + threshold = 0.1 if not sorted_files: msg = ( f"Nohardlink Error: Unable to open the folder {file}. " diff --git a/qbit_manage.py b/qbit_manage.py index f360fa40..e149bd90 100755 --- a/qbit_manage.py +++ b/qbit_manage.py @@ -394,16 +394,20 @@ def my_except_hook(exctype, value, tbi): version = (version[0].replace("develop", branch), version[1].replace("develop", branch), version[2]) -def start_loop(): +def start_loop(first_run=False): """Start the main loop""" if len(config_files) == 1: args["config_file"] = config_files[0] + if not first_run: + print_logo(logger) start() else: for config_file in config_files: args["config_file"] = config_file config_base = os.path.splitext(config_file)[0] logger.add_config_handler(config_base) + if not first_run: + print_logo(logger) start() logger.remove_config_handler(config_base) @@ -446,12 +450,15 @@ def finished_run(): nonlocal end_time, start_time, stats_summary, run_time, next_run, body end_time = datetime.now() run_time = str(end_time - start_time).split(".", maxsplit=1)[0] - if is_valid_cron_syntax(sch): # Simple check to guess if it's a cron syntax - next_run_time = schedule_from_cron(sch) + if run is False: + if is_valid_cron_syntax(sch): # Simple check to guess if it's a cron syntax + next_run_time = schedule_from_cron(sch) + else: + delta = timedelta(minutes=sch) + logger.info(f" Scheduled Mode: Running every {precisedelta(delta)}.") + next_run_time = schedule_every_x_minutes(sch) else: - delta = timedelta(minutes=sch) - logger.info(f" Scheduled Mode: Running every {precisedelta(delta)}.") - next_run_time = schedule_every_x_minutes(sch) + next_run_time = datetime.now() nxt_run = calc_next_run(next_run_time) next_run_str = nxt_run["next_run_str"] next_run = nxt_run["next_run"] @@ -618,9 +625,8 @@ def schedule_every_x_minutes(min): return next_run_time -if __name__ == "__main__": - killer = GracefulKiller() - logger.add_main_handler() +def print_logo(logger): + global is_docker, version, git_branch logger.separator() logger.info_center(" _ _ _ ") # noqa: W605 logger.info_center(" | | (_) | ") # noqa: W605 @@ -641,48 +647,33 @@ def schedule_every_x_minutes(min): if new_version: logger.info(f" Newest Version: {new_version}") logger.info(f" Platform: {platform.platform()}") - logger.separator(loglevel="DEBUG") - logger.debug(f" --run (QBT_RUN): {run}") - logger.debug(f" --schedule (QBT_SCHEDULE): {sch}") - logger.debug(f" --startup-delay (QBT_STARTUP_DELAY): {startupDelay}") - logger.debug(f" --config-file (QBT_CONFIG): {config_files}") - logger.debug(f" --log-file (QBT_LOGFILE): {log_file}") - logger.debug(f" --cross-seed (QBT_CROSS_SEED): {cross_seed}") - logger.debug(f" --recheck (QBT_RECHECK): {recheck}") - logger.debug(f" --cat-update (QBT_CAT_UPDATE): {cat_update}") - logger.debug(f" --tag-update (QBT_TAG_UPDATE): {tag_update}") - logger.debug(f" --rem-unregistered (QBT_REM_UNREGISTERED): {rem_unregistered}") - logger.debug(f" --tag-tracker-error (QBT_TAG_TRACKER_ERROR): {tag_tracker_error}") - logger.debug(f" --rem-orphaned (QBT_REM_ORPHANED): {rem_orphaned}") - logger.debug(f" --tag-nohardlinks (QBT_TAG_NOHARDLINKS): {tag_nohardlinks}") - logger.debug(f" --share-limits (QBT_SHARE_LIMITS): {share_limits}") - logger.debug(f" --skip-cleanup (QBT_SKIP_CLEANUP): {skip_cleanup}") - logger.debug(f" --skip-qb-version-check (QBT_SKIP_QB_VERSION_CHECK): {skip_qb_version_check}") - logger.debug(f" --dry-run (QBT_DRY_RUN): {dry_run}") - logger.debug(f" --log-level (QBT_LOG_LEVEL): {log_level}") - logger.debug(f" --divider (QBT_DIVIDER): {divider}") - logger.debug(f" --width (QBT_WIDTH): {screen_width}") - logger.debug(f" --debug (QBT_DEBUG): {debug}") - logger.debug(f" --trace (QBT_TRACE): {trace}") - logger.debug("") + + +if __name__ == "__main__": + killer = GracefulKiller() + logger.add_main_handler() + print_logo(logger) try: if run: - logger.info(" Run Mode: Script will exit after completion.") - start_loop() + run_mode_message = " Run Mode: Script will exit after completion." + logger.info(run_mode_message) + start_loop(True) else: if is_valid_cron_syntax(sch): # Simple check to guess if it's a cron syntax - logger.info(f" Scheduled Mode: Running cron '{sch}'") + run_mode_message = f" Scheduled Mode: Running cron '{sch}'" next_run_time = schedule_from_cron(sch) next_run = calc_next_run(next_run_time) - logger.info(f" {next_run['next_run_str']}") + run_mode_message += f"\n {next_run['next_run_str']}" + logger.info(run_mode_message) else: delta = timedelta(minutes=sch) - logger.info(f" Scheduled Mode: Running every {precisedelta(delta)}.") + run_mode_message = f" Scheduled Mode: Running every {precisedelta(delta)}." next_run_time = schedule_every_x_minutes(sch) if startupDelay: - logger.info(f" Startup Delay: Initial Run will start after {startupDelay} seconds") + run_mode_message += f"\n Startup Delay: Initial Run will start after {startupDelay} seconds" time.sleep(startupDelay) - start_loop() + logger.info(run_mode_message) + start_loop(True) while not killer.kill_now: next_run = calc_next_run(next_run_time) diff --git a/requirements-dev.txt b/requirements-dev.txt index 45173b0c..175e64ab 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,2 +1,2 @@ -flake8==7.0.0 +flake8==7.1.0 pre-commit==3.7.1 diff --git a/scripts/mover.py b/scripts/mover.py index b270eee9..a20a23d3 100755 --- a/scripts/mover.py +++ b/scripts/mover.py @@ -2,12 +2,21 @@ # This standalone script is used to pause torrents older than last x days, # run mover (in Unraid) and start torrents again once completed import argparse +import logging import os import sys import time from datetime import datetime from datetime import timedelta +# Configure logging +logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s - %(levelname)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + handlers=[logging.StreamHandler(sys.stdout)], +) + parser = argparse.ArgumentParser(prog="Qbit Mover", description="Stop torrents and kick off Unraid mover process") parser.add_argument("--host", help="qbittorrent host including port", required=True) parser.add_argument("-u", "--user", help="qbittorrent user", default="admin") @@ -29,6 +38,29 @@ action="store_true", default=False, ) +parser.add_argument( + "--status-filter", + help="Define a status to limit which torrents to pause. Useful if you want to leave certain torrents unpaused.", + choices=[ + "all", + "downloading", + "seeding", + "completed", + "paused", + "stopped", + "active", + "inactive", + "resumed", + "running", + "stalled", + "stalled_uploading", + "stalled_downloading", + "checking", + "moving", + "errored", + ], + default="completed", +) # --DEFINE VARIABLES--# # --START SCRIPT--# @@ -37,7 +69,9 @@ from qbittorrentapi import Client from qbittorrentapi import LoginFailed except ModuleNotFoundError: - print('Requirements Error: qbittorrent-api not installed. Please install using the command "pip install qbittorrent-api"') + logging.error( + 'Requirements Error: qbittorrent-api not installed. Please install using the command "pip install qbittorrent-api"' + ) sys.exit(1) @@ -60,10 +94,10 @@ def exists_in_cache(cache_mount, content_path): def stop_start_torrents(torrent_list, pause=True): for torrent in torrent_list: if pause: - print(f"Pausing: {torrent.name} [{torrent.added_on}]") + logging.info(f"Pausing: {torrent.name} [{torrent.added_on}]") torrent.pause() else: - print(f"Resuming: {torrent.name} [{torrent.added_on}]") + logging.info(f"Resuming: {torrent.name} [{torrent.added_on}]") torrent.resume() @@ -85,21 +119,23 @@ def stop_start_torrents(torrent_list, pause=True): timeoffset_from = current - timedelta(days=args.days_from) timeoffset_to = current - timedelta(days=args.days_to) - torrent_list = client.torrents.info(sort="added_on", reverse=True) + torrent_list = client.torrents.info(status_filter=args.status_filter, sort="added_on", reverse=True) torrents = filter_torrents(torrent_list, timeoffset_from.timestamp(), timeoffset_to.timestamp(), args.cache_mount) # Pause Torrents - print(f"Pausing [{len(torrents)}] torrents from {args.days_from} - {args.days_to} days ago") + logging.info(f"Pausing [{len(torrents)}] torrents from {args.days_from} - {args.days_to} days ago") stop_start_torrents(torrents, True) time.sleep(10) - # Start mover - print(f"Starting {'mover.old' if args.mover_old else 'mover'} to move files older than {args.days_to} days to array disks.") # Or using mover tunning if args.mover_old: + # Start mover + logging.info("Starting mover.old to move files in to array disks.") os.system("/usr/local/sbin/mover.old start") else: + # Start mover + logging.info("Starting mover to move files in to array disks based on mover tuning preferences.") os.system("/usr/local/sbin/mover start") # Start Torrents - print(f"Resuming [{len(torrents)}] paused torrents from {args.days_from} - {args.days_to} days ago") + logging.info(f"Resuming [{len(torrents)}] paused torrents from {args.days_from} - {args.days_to} days ago") stop_start_torrents(torrents, False)