From d466e263e6b00e5577922183024c6aa20f034b0a Mon Sep 17 00:00:00 2001 From: Mehedi Hasan Shojib Date: Sat, 18 Jan 2025 21:35:19 +0600 Subject: [PATCH] feat: update to v2.0.0 with major updates. - New deploy may required. - No description for now, check CHANGELOG.md. --------- Signed-off-by: 5hojib --- .github/workflows/ruff_format.yml | 4 +- .gitignore | 12 +- .python-version | 1 + CHANGELOG.md | 24 + Dockerfile | 2 +- README.md | 7 +- alive.py | 4 +- bot/__init__.py | 567 +------ bot/__main__.py | 425 ++--- .../upload_utils => core}/__init__.py | 0 bot/core/aeon_client.py | 72 + bot/core/config_manager.py | 171 ++ bot/core/handlers.py | 242 +++ bot/core/startup.py | 240 +++ bot/helper/aeon_utils/__init__.py | 0 bot/helper/aeon_utils/access_check.py | 197 +++ bot/helper/aeon_utils/caption_gen.py | 119 ++ .../{metadata.py => metadata_editor.py} | 118 +- bot/helper/aeon_utils/nsfw_check.py | 82 - bot/helper/aeon_utils/send_react.py | 40 - bot/helper/aeon_utils/shorteners.py | 34 + bot/helper/aeon_utils/tinyfy.py | 14 - bot/helper/common.py | 1200 ++++++++++++++ bot/helper/ext_utils/bot_utils.py | 611 ++----- bot/helper/ext_utils/bulk_links.py | 45 +- bot/helper/ext_utils/db_handler.py | 278 ++-- bot/helper/ext_utils/exceptions.py | 17 +- bot/helper/ext_utils/files_utils.py | 914 ++++------- bot/helper/ext_utils/help_messages.py | 408 +++++ bot/helper/ext_utils/help_strings.py | 171 -- bot/helper/ext_utils/links_utils.py | 57 + bot/helper/ext_utils/media_utils.py | 806 +++++++++ bot/helper/ext_utils/shorteners.py | 83 - bot/helper/ext_utils/status_utils.py | 264 +++ bot/helper/ext_utils/task_manager.py | 264 +-- bot/helper/ext_utils/telegraph_helper.py | 54 +- bot/helper/listeners/aria2_listener.py | 274 ++-- bot/helper/listeners/direct_listener.py | 89 +- bot/helper/listeners/mega_listener.py | 140 ++ bot/helper/listeners/qbit_listener.py | 239 ++- bot/helper/listeners/task_listener.py | 558 +++++++ bot/helper/listeners/tasks_listener.py | 677 -------- bot/helper/mirror_leech_utils/__init__.py | 1 - .../download_utils/aria2_download.py | 103 +- .../download_utils/direct_downloader.py | 99 +- .../download_utils/direct_link_generator.py | 1197 +++++++++----- .../direct_link_generator_license.md | 82 + .../download_utils/gd_download.py | 95 +- .../download_utils/mega_download.py | 240 +-- .../download_utils/qbit_download.py | 128 +- .../download_utils/rclone_download.py | 177 +- .../download_utils/telegram_download.py | 206 ++- .../download_utils/yt_dlp_download.py | 316 ++-- .../gdrive_utils/__init__.py | 0 .../mirror_leech_utils/gdrive_utils/clone.py | 174 ++ .../mirror_leech_utils/gdrive_utils/count.py | 81 + .../mirror_leech_utils/gdrive_utils/delete.py | 40 + .../gdrive_utils/download.py | 176 ++ .../mirror_leech_utils/gdrive_utils/helper.py | 267 +++ .../mirror_leech_utils/gdrive_utils/list.py | 381 +++++ .../mirror_leech_utils/gdrive_utils/search.py | 187 +++ .../mirror_leech_utils/gdrive_utils/upload.py | 263 +++ .../mirror_leech_utils/rclone_utils/list.py | 313 ++-- .../mirror_leech_utils/rclone_utils/serve.py | 62 + .../rclone_utils/transfer.py | 553 ++++--- .../status_utils/aria2_status.py | 114 +- .../status_utils/direct_status.py | 37 +- .../status_utils/extract_status.py | 74 - .../status_utils/ffmpeg_status.py | 64 + .../status_utils/gdrive_status.py | 48 +- .../status_utils/mega_status.py | 42 +- .../status_utils/qbit_status.py | 117 +- .../status_utils/queue_status.py | 39 +- .../status_utils/rclone_status.py | 40 +- .../status_utils/sevenz_status.py | 67 + .../status_utils/split_status.py | 46 - .../status_utils/telegram_status.py | 44 +- .../status_utils/yt_dlp_status.py | 56 + .../status_utils/ytdlp_status.py | 55 - .../status_utils/zip_status.py | 74 - .../mirror_leech_utils/telegram_uploader.py | 576 +++++++ .../upload_utils/gdriveTools.py | 971 ----------- .../upload_utils/telegramEngine.py | 688 -------- bot/helper/telegram_helper/bot_commands.py | 76 +- bot/helper/telegram_helper/button_build.py | 83 +- bot/helper/telegram_helper/filters.py | 67 +- bot/helper/telegram_helper/message_utils.py | 584 +++---- bot/modules/__init__.py | 90 ++ bot/modules/authorize.py | 107 -- bot/modules/bot_settings.py | 892 ++++------ bot/modules/broadcast.py | 31 +- bot/modules/cancel_mirror.py | 121 -- bot/modules/cancel_task.py | 192 +++ bot/modules/chat_permission.py | 115 ++ bot/modules/clone.py | 626 ++++--- bot/modules/count.py | 63 - bot/modules/delete.py | 51 - bot/modules/exec.py | 114 ++ bot/modules/executor.py | 126 -- bot/modules/file_selector.py | 162 ++ bot/modules/force_start.py | 84 + bot/modules/gd_count.py | 42 + bot/modules/gd_delete.py | 27 + bot/modules/gd_search.py | 101 ++ bot/modules/help.py | 60 + bot/modules/images.py | 174 -- bot/modules/list.py | 111 -- bot/modules/mediainfo.py | 105 +- bot/modules/mirror_leech.py | 784 ++++----- bot/modules/restart.py | 144 ++ bot/modules/rss.py | 847 ++++++++++ bot/modules/search.py | 324 ++++ bot/modules/services.py | 135 ++ bot/modules/shell.py | 36 +- bot/modules/speedtest.py | 22 +- bot/modules/stats.py | 107 ++ bot/modules/status.py | 175 +- bot/modules/torrent_search.py | 367 ----- bot/modules/torrent_select.py | 98 -- bot/modules/users_settings.py | 1437 +++++++++-------- bot/modules/ytdlp.py | 722 ++++----- config_sample.py | 107 ++ default.otf | Bin 0 -> 26792 bytes qBittorrent/config/qBittorrent.conf | 15 +- requirements.txt | 4 +- ruff.toml | 66 +- sample_config.env | 7 - update.py | 181 ++- web/nodes.py | 133 +- web/templates/page.html | 958 +++++++++++ web/wserver.py | 865 ++-------- 131 files changed, 17220 insertions(+), 12335 deletions(-) create mode 100644 .python-version create mode 100644 CHANGELOG.md rename bot/{helper/mirror_leech_utils/upload_utils => core}/__init__.py (100%) create mode 100644 bot/core/aeon_client.py create mode 100644 bot/core/config_manager.py create mode 100644 bot/core/handlers.py create mode 100644 bot/core/startup.py create mode 100644 bot/helper/aeon_utils/__init__.py create mode 100644 bot/helper/aeon_utils/access_check.py create mode 100644 bot/helper/aeon_utils/caption_gen.py rename bot/helper/aeon_utils/{metadata.py => metadata_editor.py} (70%) delete mode 100644 bot/helper/aeon_utils/nsfw_check.py delete mode 100644 bot/helper/aeon_utils/send_react.py create mode 100644 bot/helper/aeon_utils/shorteners.py delete mode 100644 bot/helper/aeon_utils/tinyfy.py create mode 100644 bot/helper/common.py create mode 100644 bot/helper/ext_utils/help_messages.py delete mode 100644 bot/helper/ext_utils/help_strings.py create mode 100644 bot/helper/ext_utils/links_utils.py create mode 100644 bot/helper/ext_utils/media_utils.py delete mode 100644 bot/helper/ext_utils/shorteners.py create mode 100644 bot/helper/ext_utils/status_utils.py create mode 100644 bot/helper/listeners/mega_listener.py create mode 100644 bot/helper/listeners/task_listener.py delete mode 100644 bot/helper/listeners/tasks_listener.py create mode 100644 bot/helper/mirror_leech_utils/download_utils/direct_link_generator_license.md create mode 100644 bot/helper/mirror_leech_utils/gdrive_utils/__init__.py create mode 100644 bot/helper/mirror_leech_utils/gdrive_utils/clone.py create mode 100644 bot/helper/mirror_leech_utils/gdrive_utils/count.py create mode 100644 bot/helper/mirror_leech_utils/gdrive_utils/delete.py create mode 100644 bot/helper/mirror_leech_utils/gdrive_utils/download.py create mode 100644 bot/helper/mirror_leech_utils/gdrive_utils/helper.py create mode 100644 bot/helper/mirror_leech_utils/gdrive_utils/list.py create mode 100644 bot/helper/mirror_leech_utils/gdrive_utils/search.py create mode 100644 bot/helper/mirror_leech_utils/gdrive_utils/upload.py create mode 100644 bot/helper/mirror_leech_utils/rclone_utils/serve.py delete mode 100644 bot/helper/mirror_leech_utils/status_utils/extract_status.py create mode 100644 bot/helper/mirror_leech_utils/status_utils/ffmpeg_status.py create mode 100644 bot/helper/mirror_leech_utils/status_utils/sevenz_status.py delete mode 100644 bot/helper/mirror_leech_utils/status_utils/split_status.py create mode 100644 bot/helper/mirror_leech_utils/status_utils/yt_dlp_status.py delete mode 100644 bot/helper/mirror_leech_utils/status_utils/ytdlp_status.py delete mode 100644 bot/helper/mirror_leech_utils/status_utils/zip_status.py create mode 100644 bot/helper/mirror_leech_utils/telegram_uploader.py delete mode 100644 bot/helper/mirror_leech_utils/upload_utils/gdriveTools.py delete mode 100644 bot/helper/mirror_leech_utils/upload_utils/telegramEngine.py delete mode 100644 bot/modules/authorize.py delete mode 100644 bot/modules/cancel_mirror.py create mode 100644 bot/modules/cancel_task.py create mode 100644 bot/modules/chat_permission.py delete mode 100644 bot/modules/count.py delete mode 100644 bot/modules/delete.py create mode 100644 bot/modules/exec.py delete mode 100644 bot/modules/executor.py create mode 100644 bot/modules/file_selector.py create mode 100644 bot/modules/force_start.py create mode 100644 bot/modules/gd_count.py create mode 100644 bot/modules/gd_delete.py create mode 100644 bot/modules/gd_search.py create mode 100644 bot/modules/help.py delete mode 100644 bot/modules/images.py delete mode 100644 bot/modules/list.py create mode 100644 bot/modules/restart.py create mode 100644 bot/modules/rss.py create mode 100644 bot/modules/search.py create mode 100644 bot/modules/services.py create mode 100644 bot/modules/stats.py delete mode 100644 bot/modules/torrent_search.py delete mode 100644 bot/modules/torrent_select.py create mode 100644 config_sample.py create mode 100644 default.otf delete mode 100644 sample_config.env create mode 100644 web/templates/page.html diff --git a/.github/workflows/ruff_format.yml b/.github/workflows/ruff_format.yml index 1c5bdfee0..2813eabb3 100644 --- a/.github/workflows/ruff_format.yml +++ b/.github/workflows/ruff_format.yml @@ -6,7 +6,9 @@ on: jobs: code-format: - runs-on: ubuntu-latest + permissions: + contents: write + runs-on: ubuntu-24.04 steps: - name: Checkout repository uses: actions/checkout@v4 diff --git a/.gitignore b/.gitignore index 9efdd5c25..9f5c16d31 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,4 @@ -config.env +config.py *.pyc data* .vscode @@ -11,11 +11,11 @@ accounts/* MediaInfo/* Images/* Thumbnails/* -tanha/* +rclone/* +tokens/* list_drives.txt +shorteners.txt cookies.txt downloads -bot.session -user.session -terabox.txt -rcl.conf \ No newline at end of file +bot.session* +rclone.conf diff --git a/.python-version b/.python-version new file mode 100644 index 000000000..fdcfcfdfc --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.12 \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 000000000..e0ead33f3 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,24 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## v2.0.0 - 2025-01-18 + +### Breaking Changes + +- Rebased the project on the latest MLTB version. + +### Added + +- Integrated all new features from MLTB, excluding NZB and jDownloader. +- Introduced watermark support for videos. +- Enabled the ability to use a user session string to download private media from Telegram. +- Reintroduced RSS feed support. +- Added versioning system. +- Added `CHANGELOG.md` to track changes. + +### Removed + +- Removed certain limit-related variables such as `MIRROR LIMIT` and `LEECH LIMIT`. \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 25d67d90e..5ff0c0e01 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,7 +4,7 @@ WORKDIR /usr/src/app RUN chmod 777 /usr/src/app COPY requirements.txt . -RUN pip3 install --break-system-packages --ignore-installed --no-cache-dir -r requirements.txt +RUN uv pip install --break-system-packages --system --no-cache-dir -r requirements.txt COPY . . CMD ["bash", "start.sh"] diff --git a/README.md b/README.md index 46776f4fc..ede30a064 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,6 @@ Follow these steps to deploy Aeon to Heroku: - **HEROKU_EMAIL**: Email address associated with your Heroku account. - **HEROKU_API_KEY**: API key from your Heroku account. - **HEROKU_TEAM_NAME** (Optional): Required only if deploying under a Heroku team account. - 4. Run the workflow and wait for it to complete. ### 5. Finalize Setup @@ -70,4 +69,8 @@ This project is licensed under the MIT License. Refer to the [LICENSE](LICENSE) ## Acknowledgements - Special thanks to the original developers of the [Mirror-Leech-Telegram-Bot](https://github.com/anasty17/mirror-leech-telegram-bot). -- Gratitude to contributors from various repositories whose features have been integrated into Aeon. \ No newline at end of file +- Gratitude to contributors from various repositories whose features have been integrated into Aeon. + + +## Recent activity [![Time period](https://images.repography.com/58464391/AeonOrg/Aeon-MLTB/recent-activity/MUUzwqnoU_5n6kL3Jc8TTWcA3UxPyCHC2emNNSTGJh8/4gYNvj3-wi0i5zQVemeNAbqB7TrkUx_7BxZxhReSIVg_badge.svg)](https://repography.com) +[![Timeline graph](https://images.repography.com/58464391/AeonOrg/Aeon-MLTB/recent-activity/MUUzwqnoU_5n6kL3Jc8TTWcA3UxPyCHC2emNNSTGJh8/4gYNvj3-wi0i5zQVemeNAbqB7TrkUx_7BxZxhReSIVg_timeline.svg)](https://github.com/AeonOrg/Aeon-MLTB/commits) \ No newline at end of file diff --git a/alive.py b/alive.py index 9cadf18db..f8b4edbb5 100644 --- a/alive.py +++ b/alive.py @@ -1,6 +1,6 @@ +import logging import os import time -import logging import requests @@ -18,7 +18,7 @@ def check_status(): try: - requests.get(BASE_URL).status_code + requests.get(BASE_URL) except Exception as e: logging.error(f"alive.py: {e}") return False diff --git a/bot/__init__.py b/bot/__init__.py index 2f58ea472..559f28231 100644 --- a/bot/__init__.py +++ b/bot/__init__.py @@ -1,52 +1,67 @@ -import sys -from os import path as ospath -from os import remove as osremove -from os import environ -from time import time, sleep -from socket import setdefaulttimeout -from asyncio import Lock +import os +import subprocess +from asyncio import Lock, new_event_loop, set_event_loop +from datetime import datetime from logging import ( - INFO, ERROR, - Formatter, + INFO, + WARNING, FileHandler, + Formatter, + LogRecord, StreamHandler, - error, - warning, - getLogger, basicConfig, + getLogger, ) -from threading import Thread -from subprocess import Popen, check_output -from subprocess import run as srun -from faulthandler import enable as faulthandler_enable +from socket import setdefaulttimeout +from time import time -from aria2p import API +from apscheduler.schedulers.asyncio import AsyncIOScheduler +from aria2p import API as ariaAPI # noqa: N811 from aria2p import Client as ariaClient -from dotenv import load_dotenv, dotenv_values -from uvloop import install -from pymongo import MongoClient +from pytz import timezone +from qbittorrentapi import Client as QbClient from tzlocal import get_localzone -from pyrogram import Client as tgClient -from pyrogram import enums -from qbittorrentapi import Client as qbClient -from apscheduler.schedulers.asyncio import AsyncIOScheduler +from uvloop import install + +# from faulthandler import enable as faulthandler_enable +# faulthandler_enable() -faulthandler_enable() install() setdefaulttimeout(600) -getLogger("pymongo").setLevel(ERROR) -getLogger("httpx").setLevel(ERROR) + +getLogger("qbittorrentapi").setLevel(WARNING) +getLogger("requests").setLevel(WARNING) +getLogger("urllib3").setLevel(WARNING) +getLogger("pyrogram").setLevel(ERROR) +getLogger("httpx").setLevel(WARNING) +getLogger("pymongo").setLevel(WARNING) + bot_start_time = time() +bot_loop = new_event_loop() +set_event_loop(bot_loop) + class CustomFormatter(Formatter): - def format(self, record): + def formatTime( # noqa: N802 + self, + record: LogRecord, + datefmt: str | None, + ) -> str: + dt: datetime = datetime.fromtimestamp( + record.created, + tz=timezone("Asia/Dhaka"), + ) + return dt.strftime(datefmt) + + def format(self, record: LogRecord) -> str: return super().format(record).replace(record.levelname, record.levelname[:1]) formatter = CustomFormatter( - "[%(asctime)s] [%(levelname)s] - %(message)s", datefmt="%d-%b-%y %I:%M:%S %p" + "[%(asctime)s] %(levelname)s - %(message)s [%(module)s:%(lineno)d]", + datefmt="%d-%b %I:%M:%S %p", ) file_handler = FileHandler("log.txt") @@ -59,423 +74,36 @@ def format(self, record): LOGGER = getLogger(__name__) -load_dotenv("config.env", override=True) - -Interval = [] -QbInterval = [] -QbTorrents = {} -GLOBAL_EXTENSION_FILTER = ["aria2", "!qB"] +intervals = {"status": {}, "qb": "", "stopAll": False} +qb_torrents = {} user_data = {} -extra_buttons = {} -list_drives_dict = {} -shorteners_list = [] aria2_options = {} qbit_options = {} queued_dl = {} queued_up = {} +status_dict = {} +task_dict = {} +rss_dict = {} non_queued_dl = set() non_queued_up = set() -download_dict_lock = Lock() -status_reply_dict_lock = Lock() +multi_tags = set() +task_dict_lock = Lock() queue_dict_lock = Lock() qb_listener_lock = Lock() -status_reply_dict = {} -download_dict = {} - -BOT_TOKEN = environ.get("BOT_TOKEN", "") -if len(BOT_TOKEN) == 0: - error("BOT_TOKEN variable is missing! Exiting now") - sys.exit(1) - -bot_id = BOT_TOKEN.split(":", 1)[0] - -DATABASE_URL = environ.get("DATABASE_URL", "") -if len(DATABASE_URL) == 0: - DATABASE_URL = "" - -if DATABASE_URL: - conn = MongoClient(DATABASE_URL) - db = conn.luna - current_config = dict(dotenv_values("config.env")) - old_config = db.settings.deployConfig.find_one({"_id": bot_id}) - if old_config is None: - db.settings.deployConfig.replace_one( - {"_id": bot_id}, current_config, upsert=True - ) - else: - del old_config["_id"] - if old_config and old_config != current_config: - db.settings.deployConfig.replace_one( - {"_id": bot_id}, current_config, upsert=True - ) - elif config_dict := db.settings.config.find_one({"_id": bot_id}): - del config_dict["_id"] - for key, value in config_dict.items(): - environ[key] = str(value) - if pf_dict := db.settings.files.find_one({"_id": bot_id}): - del pf_dict["_id"] - for key, value in pf_dict.items(): - if value: - file_ = key.replace("__", ".") - with open(file_, "wb+") as f: - f.write(value) - if a2c_options := db.settings.aria2c.find_one({"_id": bot_id}): - del a2c_options["_id"] - aria2_options = a2c_options - if qbit_opt := db.settings.qbittorrent.find_one({"_id": bot_id}): - del qbit_opt["_id"] - qbit_options = qbit_opt - conn.close() - BOT_TOKEN = environ.get("BOT_TOKEN", "") - bot_id = BOT_TOKEN.split(":", 1)[0] - DATABASE_URL = environ.get("DATABASE_URL", "") -else: - config_dict = {} - -GROUPS_EMAIL = environ.get("GROUPS_EMAIL", "") -if len(GROUPS_EMAIL) != 0: - GROUPS_EMAIL = GROUPS_EMAIL.lower() - -OWNER_ID = environ.get("OWNER_ID", "") -if len(OWNER_ID) == 0: - error("OWNER_ID variable is missing! Exiting now") - sys.exit(1) -else: - OWNER_ID = int(OWNER_ID) - -TELEGRAM_API = environ.get("TELEGRAM_API", "") -if len(TELEGRAM_API) == 0: - error("TELEGRAM_API variable is missing! Exiting now") - sys.exit(1) -else: - TELEGRAM_API = int(TELEGRAM_API) - -TELEGRAM_HASH = environ.get("TELEGRAM_HASH", "") -if len(TELEGRAM_HASH) == 0: - error("TELEGRAM_HASH variable is missing! Exiting now") - sys.exit(1) - -GDRIVE_ID = environ.get("GDRIVE_ID", "") -if len(GDRIVE_ID) == 0: - GDRIVE_ID = "" - -METADATA_KEY = environ.get("METADATA_KEY", "") -if len(METADATA_KEY) == 0: - METADATA_KEY = "" - -RCLONE_PATH = environ.get("RCLONE_PATH", "") -if len(RCLONE_PATH) == 0: - RCLONE_PATH = "" - -ATTACHMENT_URL = environ.get("ATTACHMENT_URL", "") -if len(ATTACHMENT_URL) == 0: - ATTACHMENT_URL = "" - -RCLONE_FLAGS = environ.get("RCLONE_FLAGS", "") -if len(RCLONE_FLAGS) == 0: - RCLONE_FLAGS = "" - -DEFAULT_UPLOAD = environ.get("DEFAULT_UPLOAD", "") -if DEFAULT_UPLOAD != "rc": - DEFAULT_UPLOAD = "gd" - -EXTENSION_FILTER = environ.get("EXTENSION_FILTER", "") -if len(EXTENSION_FILTER) > 0: - fx = EXTENSION_FILTER.split() - for x in fx: - cleaned_x = x.lstrip(".") - GLOBAL_EXTENSION_FILTER.append(cleaned_x.strip().lower()) - -IS_PREMIUM_USER = False -user = "" -USER_SESSION_STRING = environ.get("USER_SESSION_STRING", "") -if len(USER_SESSION_STRING) != 0: - try: - user = tgClient( - "user", - TELEGRAM_API, - TELEGRAM_HASH, - session_string=USER_SESSION_STRING, - workers=1000, - parse_mode=enums.ParseMode.HTML, - no_updates=True, - ).start() - IS_PREMIUM_USER = user.me.is_premium - except Exception as e: - error(f"Failed making client from USER_SESSION_STRING : {e}") - user = "" - -MAX_SPLIT_SIZE = 4194304000 if IS_PREMIUM_USER else 2097152000 - -MEGA_EMAIL = environ.get("MEGA_EMAIL", "") -MEGA_PASSWORD = environ.get("MEGA_PASSWORD", "") -if len(MEGA_EMAIL) == 0 or len(MEGA_PASSWORD) == 0: - MEGA_EMAIL = "" - MEGA_PASSWORD = "" - -FILELION_API = environ.get("FILELION_API", "") -if len(FILELION_API) == 0: - FILELION_API = "" - -INDEX_URL = environ.get("INDEX_URL", "").rstrip("/") -if len(INDEX_URL) == 0: - INDEX_URL = "" - -SEARCH_API_LINK = environ.get("SEARCH_API_LINK", "").rstrip("/") -if len(SEARCH_API_LINK) == 0: - SEARCH_API_LINK = "" - -STREAMWISH_API = environ.get("STREAMWISH_API", "") -if len(STREAMWISH_API) == 0: - STREAMWISH_API = "" - -BOT_MAX_TASKS = environ.get("BOT_MAX_TASKS", "") -BOT_MAX_TASKS = int(BOT_MAX_TASKS) if BOT_MAX_TASKS.isdigit() else "" - -LEECH_LOG_ID = environ.get("LEECH_LOG_ID", "") -LEECH_LOG_ID = "" if len(LEECH_LOG_ID) == 0 else int(LEECH_LOG_ID) - -YT_DLP_OPTIONS = environ.get("YT_DLP_OPTIONS", "") -if len(YT_DLP_OPTIONS) == 0: - YT_DLP_OPTIONS = "" - -SEARCH_LIMIT = environ.get("SEARCH_LIMIT", "") -SEARCH_LIMIT = 0 if len(SEARCH_LIMIT) == 0 else int(SEARCH_LIMIT) - -LEECH_DUMP_ID = environ.get("LEECH_DUMP_ID", "") -if len(LEECH_DUMP_ID) == 0: - LEECH_DUMP_ID = "" - -CMD_SUFFIX = environ.get("CMD_SUFFIX", "") - -TORRENT_TIMEOUT = environ.get("TORRENT_TIMEOUT", "") -TORRENT_TIMEOUT = 3000 if len(TORRENT_TIMEOUT) == 0 else int(TORRENT_TIMEOUT) - -QUEUE_ALL = environ.get("QUEUE_ALL", "") -QUEUE_ALL = "" if len(QUEUE_ALL) == 0 else int(QUEUE_ALL) - -QUEUE_DOWNLOAD = environ.get("QUEUE_DOWNLOAD", "") -QUEUE_DOWNLOAD = "" if len(QUEUE_DOWNLOAD) == 0 else int(QUEUE_DOWNLOAD) - -QUEUE_UPLOAD = environ.get("QUEUE_UPLOAD", "") -QUEUE_UPLOAD = "" if len(QUEUE_UPLOAD) == 0 else int(QUEUE_UPLOAD) - -STOP_DUPLICATE = environ.get("STOP_DUPLICATE", "") -STOP_DUPLICATE = STOP_DUPLICATE.lower() == "true" - -USE_SERVICE_ACCOUNTS = environ.get("USE_SERVICE_ACCOUNTS", "") -USE_SERVICE_ACCOUNTS = USE_SERVICE_ACCOUNTS.lower() == "true" - -AS_DOCUMENT = environ.get("AS_DOCUMENT", "") -AS_DOCUMENT = AS_DOCUMENT.lower() == "true" - -SHOW_MEDIAINFO = environ.get("SHOW_MEDIAINFO", "") -SHOW_MEDIAINFO = SHOW_MEDIAINFO.lower() == "true" - -MEDIA_GROUP = environ.get("MEDIA_GROUP", "") -MEDIA_GROUP = MEDIA_GROUP.lower() == "true" - -BASE_URL = environ.get("BASE_URL", "").rstrip("/") -if len(BASE_URL) == 0: - warning("BASE_URL not provided!") - BASE_URL = "" - -UPSTREAM_REPO = environ.get("UPSTREAM_REPO", "") -if len(UPSTREAM_REPO) == 0: - UPSTREAM_REPO = "" - -UPSTREAM_BRANCH = environ.get("UPSTREAM_BRANCH", "") -if len(UPSTREAM_BRANCH) == 0: - UPSTREAM_BRANCH = "main" - -TORRENT_LIMIT = environ.get("TORRENT_LIMIT", "") -TORRENT_LIMIT = "" if len(TORRENT_LIMIT) == 0 else float(TORRENT_LIMIT) - -DIRECT_LIMIT = environ.get("DIRECT_LIMIT", "") -DIRECT_LIMIT = "" if len(DIRECT_LIMIT) == 0 else float(DIRECT_LIMIT) - -YTDLP_LIMIT = environ.get("YTDLP_LIMIT", "") -YTDLP_LIMIT = "" if len(YTDLP_LIMIT) == 0 else float(YTDLP_LIMIT) - -GDRIVE_LIMIT = environ.get("GDRIVE_LIMIT", "") -GDRIVE_LIMIT = "" if len(GDRIVE_LIMIT) == 0 else float(GDRIVE_LIMIT) - -CLONE_LIMIT = environ.get("CLONE_LIMIT", "") -CLONE_LIMIT = "" if len(CLONE_LIMIT) == 0 else float(CLONE_LIMIT) - -MEGA_LIMIT = environ.get("MEGA_LIMIT", "") -MEGA_LIMIT = "" if len(MEGA_LIMIT) == 0 else float(MEGA_LIMIT) - -LEECH_LIMIT = environ.get("LEECH_LIMIT", "") -LEECH_LIMIT = "" if len(LEECH_LIMIT) == 0 else float(LEECH_LIMIT) - -USER_MAX_TASKS = environ.get("USER_MAX_TASKS", "") -USER_MAX_TASKS = "" if len(USER_MAX_TASKS) == 0 else int(USER_MAX_TASKS) - -PLAYLIST_LIMIT = environ.get("PLAYLIST_LIMIT", "") -PLAYLIST_LIMIT = "" if len(PLAYLIST_LIMIT) == 0 else int(PLAYLIST_LIMIT) - -DELETE_LINKS = environ.get("DELETE_LINKS", "") -DELETE_LINKS = DELETE_LINKS.lower() == "true" - -FSUB_IDS = environ.get("FSUB_IDS", "") -if len(FSUB_IDS) == 0: - FSUB_IDS = "" - -MIRROR_LOG_ID = environ.get("MIRROR_LOG_ID", "") -if len(MIRROR_LOG_ID) == 0: - MIRROR_LOG_ID = "" - -IMAGES = environ.get("IMAGES", "") -IMAGES = ( - IMAGES.replace("'", "") - .replace('"', "") - .replace("[", "") - .replace("]", "") - .replace(",", "") -).split() - - -SET_COMMANDS = environ.get("SET_COMMANDS", "") -SET_COMMANDS = SET_COMMANDS.lower() == "true" - -TOKEN_TIMEOUT = environ.get("TOKEN_TIMEOUT", "") -TOKEN_TIMEOUT = int(TOKEN_TIMEOUT) if TOKEN_TIMEOUT.isdigit() else "" - -config_dict = { - "AS_DOCUMENT": AS_DOCUMENT, - "BASE_URL": BASE_URL, - "BOT_TOKEN": BOT_TOKEN, - "BOT_MAX_TASKS": BOT_MAX_TASKS, - "CMD_SUFFIX": CMD_SUFFIX, - "DATABASE_URL": DATABASE_URL, - "DELETE_LINKS": DELETE_LINKS, - "DEFAULT_UPLOAD": DEFAULT_UPLOAD, - "FILELION_API": FILELION_API, - "TORRENT_LIMIT": TORRENT_LIMIT, - "DIRECT_LIMIT": DIRECT_LIMIT, - "YTDLP_LIMIT": YTDLP_LIMIT, - "GDRIVE_LIMIT": GDRIVE_LIMIT, - "CLONE_LIMIT": CLONE_LIMIT, - "MEGA_LIMIT": MEGA_LIMIT, - "LEECH_LIMIT": LEECH_LIMIT, - "FSUB_IDS": FSUB_IDS, - "USER_MAX_TASKS": USER_MAX_TASKS, - "PLAYLIST_LIMIT": PLAYLIST_LIMIT, - "MIRROR_LOG_ID": MIRROR_LOG_ID, - "LEECH_DUMP_ID": LEECH_DUMP_ID, - "IMAGES": IMAGES, - "EXTENSION_FILTER": EXTENSION_FILTER, - "GDRIVE_ID": GDRIVE_ID, - "ATTACHMENT_URL": ATTACHMENT_URL, - "INDEX_URL": INDEX_URL, - "LEECH_LOG_ID": LEECH_LOG_ID, - "TOKEN_TIMEOUT": TOKEN_TIMEOUT, - "MEDIA_GROUP": MEDIA_GROUP, - "MEGA_EMAIL": MEGA_EMAIL, - "MEGA_PASSWORD": MEGA_PASSWORD, - "METADATA_KEY": METADATA_KEY, - "OWNER_ID": OWNER_ID, - "QUEUE_ALL": QUEUE_ALL, - "QUEUE_DOWNLOAD": QUEUE_DOWNLOAD, - "QUEUE_UPLOAD": QUEUE_UPLOAD, - "RCLONE_FLAGS": RCLONE_FLAGS, - "RCLONE_PATH": RCLONE_PATH, - "SEARCH_API_LINK": SEARCH_API_LINK, - "SEARCH_LIMIT": SEARCH_LIMIT, - "SET_COMMANDS": SET_COMMANDS, - "SHOW_MEDIAINFO": SHOW_MEDIAINFO, - "STOP_DUPLICATE": STOP_DUPLICATE, - "STREAMWISH_API": STREAMWISH_API, - "TELEGRAM_API": TELEGRAM_API, - "TELEGRAM_HASH": TELEGRAM_HASH, - "TORRENT_TIMEOUT": TORRENT_TIMEOUT, - "UPSTREAM_REPO": UPSTREAM_REPO, - "UPSTREAM_BRANCH": UPSTREAM_BRANCH, - "USER_SESSION_STRING": USER_SESSION_STRING, - "GROUPS_EMAIL": GROUPS_EMAIL, - "USE_SERVICE_ACCOUNTS": USE_SERVICE_ACCOUNTS, - "YT_DLP_OPTIONS": YT_DLP_OPTIONS, -} - -if GDRIVE_ID: - list_drives_dict["Main"] = {"drive_id": GDRIVE_ID, "index_link": INDEX_URL} - -if ospath.exists("list_drives.txt"): - with open("list_drives.txt", "r+") as f: - lines = f.readlines() - for line in lines: - sep = 2 if line.strip().split()[-1].startswith("http") else 1 - temp = line.strip().rsplit(maxsplit=sep) - name = "Main Custom" if temp[0].casefold() == "Main" else temp[0] - list_drives_dict[name] = { - "drive_id": temp[1], - "index_link": (temp[2] if sep == 2 else ""), - } - -if ospath.exists("buttons.txt"): - with open("buttons.txt", "r+") as f: - lines = f.readlines() - for line in lines: - temp = line.strip().split() - if len(extra_buttons.keys()) == 4: - break - if len(temp) == 2: - extra_buttons[temp[0].replace("_", " ")] = temp[1] - -if ospath.exists("shorteners.txt"): - with open("shorteners.txt", "r+") as f: - lines = f.readlines() - for line in lines: - temp = line.strip().split() - if len(temp) == 2: - shorteners_list.append({"domain": temp[0], "api_key": temp[1]}) - -PORT = environ.get("PORT") -Popen( - f"gunicorn web.wserver:app --bind 0.0.0.0:{PORT} --worker-class gevent", - shell=True, -) - -srun(["xnox", "-d", "--profile=."], check=False) -if not ospath.exists(".netrc"): - with open(".netrc", "w"): - pass -srun(["chmod", "600", ".netrc"], check=False) -srun(["cp", ".netrc", "/root/.netrc"], check=False) - -trackers = ( - check_output( - "curl -Ns https://raw.githubusercontent.com/XIU2/TrackersListCollection/master/all.txt https://ngosang.github.io/trackerslist/trackers_all_http.txt https://newtrackon.com/api/all https://raw.githubusercontent.com/hezhijie0327/Trackerslist/main/trackerslist_tracker.txt | awk '$0' | tr '\n\n' ','", - shell=True, - ) - .decode("utf-8") - .rstrip(",") -) -with open("a2c.conf", "a+") as a: - if TORRENT_TIMEOUT is not None: - a.write(f"bt-stop-timeout={TORRENT_TIMEOUT}\n") - a.write(f"bt-tracker=[{trackers}]") -srun(["xria", "--conf-path=/usr/src/app/a2c.conf"], check=False) +cpu_eater_lock = Lock() +same_directory_lock = Lock() +extension_filter = ["aria2", "!qB"] +drives_names = [] +drives_ids = [] +index_urls = [] +shorteners_list = [] -if ospath.exists("accounts.zip"): - if ospath.exists("accounts"): - srun(["rm", "-rf", "accounts"], check=False) - srun( - ["7z", "x", "-o.", "-bd", "-aoa", "accounts.zip", "accounts/*.json"], - check=False, - ) - srun(["chmod", "-R", "777", "accounts"], check=False) - osremove("accounts.zip") -if not ospath.exists("accounts"): - config_dict["USE_SERVICE_ACCOUNTS"] = False -alive = Popen(["python3", "alive.py"]) -sleep(0.5) +aria2 = ariaAPI(ariaClient(host="http://localhost", port=6800, secret="")) -aria2 = API(ariaClient(host="http://localhost", port=6800, secret="")) +subprocess.run(["xnox", "-d", f"--profile={os.getcwd()}"], check=False) -xnox_client = qbClient( +xnox_client = QbClient( host="localhost", port=8090, VERIFY_WEBUI_CERTIFICATE=False, @@ -487,63 +115,28 @@ def format(self, record): }, ) +trackers = ( + subprocess.check_output( + "curl -Ns https://raw.githubusercontent.com/XIU2/TrackersListCollection/master/all.txt https://ngosang.github.io/trackerslist/trackers_all_http.txt https://newtrackon.com/api/all https://raw.githubusercontent.com/hezhijie0327/Trackerslist/main/trackerslist_tracker.txt | awk '$0' | tr '\n\n' ','", + shell=True, + ) + .decode("utf-8") + .rstrip(",") +) -def aria2c_init(): - try: - link = "https://linuxmint.com/torrents/lmde-5-cinnamon-64bit.iso.torrent" - dire = "/usr/src/app/downloads/".rstrip("/") - aria2.add_uris([link], {"dir": dire}) - sleep(3) - downloads = aria2.get_downloads() - sleep(10) - aria2.remove(downloads, force=True, files=True, clean=True) - except Exception as e: - error(f"Aria2c initializing error: {e}") - - -Thread(target=aria2c_init).start() -sleep(1.5) +with open("a2c.conf", "a+") as a: + a.write("bt-stop-timeout=600\n") + a.write(f"bt-tracker=[{trackers}]") +subprocess.run(["xria", "--conf-path=/usr/src/app/a2c.conf"], check=False) -aria2c_global = [ - "bt-max-open-files", - "download-result", - "keep-unfinished-download-result", - "log", - "log-level", - "max-concurrent-downloads", - "max-download-result", - "max-overall-download-limit", - "save-session", - "max-overall-upload-limit", - "optimize-concurrent-downloads", - "save-cookies", - "server-stat-of", -] -if not aria2_options: - aria2_options = aria2.client.get_global_option() -else: - a2c_glo = {op: aria2_options[op] for op in aria2c_global if op in aria2_options} - aria2.set_global_options(a2c_glo) +if os.path.exists("shorteners.txt"): + with open("shorteners.txt", "r+") as f: + lines = f.readlines() + for line in lines: + temp = line.strip().split() + if len(temp) == 2: + shorteners_list.append({"domain": temp[0], "api_key": temp[1]}) -if not qbit_options: - qbit_options = dict(xnox_client.app_preferences()) - del qbit_options["listen_port"] - for k in list(qbit_options.keys()): - if k.startswith("rss"): - del qbit_options[k] -else: - qb_opt = {**qbit_options} - xnox_client.app_set_preferences(qb_opt) -bot = tgClient( - "bot", - TELEGRAM_API, - TELEGRAM_HASH, - bot_token=BOT_TOKEN, - workers=1000, - parse_mode=enums.ParseMode.HTML, -).start() -bot_loop = bot.loop -bot_name = bot.me.username scheduler = AsyncIOScheduler(timezone=str(get_localzone()), event_loop=bot_loop) diff --git a/bot/__main__.py b/bot/__main__.py index a2df4f7b0..7eec1fdff 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -1,339 +1,148 @@ -# ruff: noqa: F401 -import contextlib -from os import execl as osexecl -from sys import executable -from html import escape -from time import time -from uuid import uuid4 +# ruff: noqa: E402 +from asyncio import gather from signal import SIGINT, signal -from asyncio import gather, create_subprocess_exec -from psutil import boot_time, disk_usage, cpu_percent, virtual_memory -from aiofiles import open as aiopen -from aiofiles.os import path as aiopath -from aiofiles.os import remove as aioremove -from pyrogram.filters import regex, command -from pyrogram.handlers import MessageHandler, CallbackQueryHandler - -from bot import ( - LOGGER, - DATABASE_URL, - Interval, - QbInterval, - bot, - bot_name, - scheduler, - user_data, - config_dict, - bot_start_time, +from pyrogram.filters import regex +from pyrogram.handlers import CallbackQueryHandler +from pyrogram.types import BotCommand + +from . import LOGGER, bot_loop +from .core.config_manager import Config, SystemEnv + +# Initialize Configurations +LOGGER.info("Loading config...") +Config.load() +SystemEnv.load() + +from .core.startup import ( + load_configurations, + load_settings, + save_settings, + update_aria2_options, + update_qb_options, + update_variables, ) -from .modules import ( - list, - clone, - count, - shell, - ytdlp, - delete, - images, - status, - executor, - authorize, - broadcast, - mediainfo, - speedtest, - bot_settings, - mirror_leech, - cancel_mirror, - torrent_search, - torrent_select, - users_settings, -) -from .helper.ext_utils.bot_utils import ( - new_task, - new_thread, - set_commands, - sync_to_async, - get_readable_time, - get_readable_file_size, -) -from .helper.ext_utils.db_handler import DbManager -from .helper.ext_utils.files_utils import clean_all, exit_clean_up, start_cleanup -from .helper.telegram_helper.filters import CustomFilters +bot_loop.run_until_complete(load_settings()) + + +from .core.aeon_client import TgClient +from .core.handlers import add_handlers +from .helper.ext_utils.bot_utils import create_help_buttons, new_task, sync_to_async +from .helper.ext_utils.files_utils import clean_all, exit_clean_up +from .helper.ext_utils.telegraph_helper import telegraph from .helper.listeners.aria2_listener import start_aria2_listener +from .helper.mirror_leech_utils.rclone_utils.serve import rclone_serve_booter from .helper.telegram_helper.bot_commands import BotCommands -from .helper.telegram_helper.button_build import ButtonMaker +from .helper.telegram_helper.filters import CustomFilters from .helper.telegram_helper.message_utils import ( - sendFile, + delete_message, edit_message, send_message, - delete_message, - one_minute_del, - five_minute_del, +) +from .modules import ( + get_packages_version, + initiate_search_tools, + restart_notification, ) -if config_dict["GDRIVE_ID"]: - help_string = f"""NOTE: Try each command without any arguments to see more details. - -
/{BotCommands.MirrorCommand[0]} - Start mirroring to Google Drive. -/{BotCommands.LeechCommand[0]} - Start leeching to Telegram. -/{BotCommands.YtdlCommand[0]} - Mirror links supported by yt-dlp. -/{BotCommands.YtdlLeechCommand[0]} - Leech links supported by yt-dlp. -/{BotCommands.CloneCommand[0]} - Copy files/folders to Google Drive. -/{BotCommands.CountCommand} - Count files/folders in Google Drive. -/{BotCommands.ListCommand} - Search in Google Drive(s). -/{BotCommands.UserSetCommand} - Open the settings panel. -/{BotCommands.MediaInfoCommand} - View MediaInfo from a file or link. -/{BotCommands.StopAllCommand[0]} - Cancel all active tasks. -/{BotCommands.SearchCommand} - Search for torrents using API or plugins. -/{BotCommands.StatusCommand[0]} - Show the status of all downloads. -/{BotCommands.StatsCommand[0]} - Display machine stats hosting the bot.
-""" -else: - help_string = f"""NOTE: Try each command without any arguments to see more details. - -
/{BotCommands.LeechCommand[0]} - Start leeching to Telegram. -/{BotCommands.YtdlLeechCommand[0]} - Leech links supported by yt-dlp. -/{BotCommands.UserSetCommand} - Open the settings panel. -/{BotCommands.MediaInfoCommand} - View MediaInfo from a file or link. -/{BotCommands.StopAllCommand[0]} - Cancel all active tasks. -/{BotCommands.SearchCommand} - Search for torrents using API or plugins. -/{BotCommands.StatusCommand[0]} - Show the status of all downloads. -/{BotCommands.StatsCommand[0]} - Display machine stats hosting the bot.
-""" - - -@new_thread -async def stats(_, message): - total, used, free, disk = disk_usage("/") - memory = virtual_memory() - current_time = get_readable_time(time() - bot_start_time) - os_uptime = get_readable_time(time() - boot_time()) - cpu_usage = cpu_percent(interval=0.5) - limit_mapping = { - "Torrent": config_dict.get("TORRENT_LIMIT", "∞"), - "Gdrive": config_dict.get("GDRIVE_LIMIT", "∞"), - "Ytdlp": config_dict.get("YTDLP_LIMIT", "∞"), - "Direct": config_dict.get("DIRECT_LIMIT", "∞"), - "Leech": config_dict.get("LEECH_LIMIT", "∞"), - "Clone": config_dict.get("CLONE_LIMIT", "∞"), - "Mega": config_dict.get("MEGA_LIMIT", "∞"), - "User task": config_dict.get("USER_MAX_TASKS", "∞"), - } - system_info = ( - f"• Bot uptime : {current_time}\n" - f"• Sys uptime : {os_uptime}\n" - f"• CPU usage : {cpu_usage}%\n" - f"• RAM usage : {memory.percent}%\n" - f"• Disk usage : {disk}%\n" - f"• Free space : {get_readable_file_size(free)}\n" - f"• Total space: {get_readable_file_size(total)}\n\n" - ) - - limitations = "LIMITATIONS\n\n" - - for k, v in limit_mapping.items(): - if v == "": - value = "∞" - elif k != "User task": - value = f"{v}GB/Link" - else: - value = f"{v} Tasks/user" - limitations += f"• {k:<11}: {value}\n" - - stats = system_info + limitations - reply_message = await send_message(message, stats, photo="Random") - await delete_message(message) - await one_minute_del(reply_message) - - -@new_thread -async def start(client, message): - if len(message.command) > 1 and message.command[1] == "private": - await delete_message(message) - elif len(message.command) > 1 and len(message.command[1]) == 36: - userid = message.from_user.id - input_token = message.command[1] - if DATABASE_URL: - stored_token = await DbManager().get_user_token(userid) - if stored_token is None: - return await send_message( - message, - "This token is not for you!\n\nPlease generate your own.", - ) - if input_token != stored_token: - return await send_message( - message, "Invalid token.\n\nPlease generate a new one." - ) - if userid not in user_data: - return await send_message( - message, "This token is not yours!\n\nKindly generate your own." - ) - data = user_data[userid] - if "token" not in data or data["token"] != input_token: - return await send_message( - message, - "This token has already been used!\n\nPlease get a new one.", - ) - token = str(uuid4()) - token_time = time() - data["token"] = token - data["time"] = token_time - user_data[userid].update(data) - if DATABASE_URL: - await DbManager().update_user_tdata(userid, token, token_time) - msg = "Your token has been successfully generated!\n\n" - msg += f'It will be valid for {get_readable_time(int(config_dict["TOKEN_TIMEOUT"]), True)}' - return await send_message(message, msg) - elif await CustomFilters.authorized(client, message): - help_command = f"/{BotCommands.HelpCommand}" - start_string = f"This bot can mirror all your links|files|torrents to Google Drive or any rclone cloud or to telegram.\nType {help_command} to get a list of available commands" - await send_message(message, start_string, photo="Random") - else: - await send_message(message, "You are not a authorized user!", photo="Random") - await DbManager().update_pm_users(message.from_user.id) - return None - - -async def restart(_, message): - restart_message = await send_message(message, "Restarting...") - if scheduler.running: - scheduler.shutdown(wait=False) - for interval in [QbInterval, Interval]: - if interval: - interval[0].cancel() - await sync_to_async(clean_all) - proc1 = await create_subprocess_exec( - "pkill", "-9", "-f", "-e", "gunicorn|xria|xnox|xtra|xone" - ) - proc2 = await create_subprocess_exec("python3", "update.py") - await gather(proc1.wait(), proc2.wait()) - async with aiopen(".restartmsg", "w") as f: - await f.write(f"{restart_message.chat.id}\n{restart_message.id}\n") - osexecl(executable, executable, "-m", "bot") - - -async def ping(_, message): - start_time = int(round(time() * 1000)) - reply = await send_message(message, "Starting ping...") - end_time = int(round(time() * 1000)) - value = end_time - start_time - await edit_message(reply, f"{value} ms.") - - +# Commands and Descriptions +COMMANDS = { + "MirrorCommand": "- Start mirroring", + "LeechCommand": "- Start leeching", + "YtdlCommand": "- Mirror yt-dlp supported link", + "YtdlLeechCommand": "- Leech through yt-dlp supported link", + "CloneCommand": "- Copy file/folder to Drive", + "MediaInfoCommand": "- Get mediainfo", + "ForceStartCommand": "- Start task from queue", + "CountCommand": "- Count file/folder on Google Drive", + "ListCommand": "- Search in Drive", + "SearchCommand": "- Search in Torrent", + "UserSetCommand": "- User settings", + "StatusCommand": "- Get mirror status message", + "StatsCommand": "- Check Bot & System stats", + "CancelAllCommand": "- Cancel all tasks added by you to the bot", + "HelpCommand": "- Get detailed help", + "SpeedTest": "- Get speedtest result", + "BotSetCommand": "- [ADMIN] Open Bot settings", + "LogCommand": "- [ADMIN] View log", + "RestartCommand": "- [ADMIN] Restart the bot", + # "RestartSessionsCommand": "- [ADMIN] Restart the session instead of the bot", +} + + +# Restart Sessions Handler @new_task -async def aeon_callback(_, query): - message = query.message - user_id = query.from_user.id +async def restart_sessions_confirm(_, query): data = query.data.split() - if user_id != int(data[1]): - return await query.answer(text="This message not your's!", show_alert=True) - if data[2] == "logdisplay": - await query.answer() - async with aiopen("log.txt") as f: - log_file_lines = (await f.read()).splitlines() - - def parseline(line): - try: - return "[" + line.split("] [", 1)[1] - except IndexError: - return line + message = query.message - ind, log_lines = 1, "" - try: - while len(log_lines) <= 3500: - log_lines = parseline(log_file_lines[-ind]) + "\n" + log_lines - if ind == len(log_file_lines): - break - ind += 1 - start_line = "
"
-            end_line = "
" - btn = ButtonMaker() - btn.callback("Close", f"aeon {user_id} close") - reply_message = await send_message( - message, start_line + escape(log_lines) + end_line, btn.column(1) - ) - await query.edit_message_reply_markup(None) - await delete_message(message) - await five_minute_del(reply_message) - except Exception as err: - LOGGER.error(f"TG Log Display : {err!s}") - elif data[2] == "private": - await query.answer(url=f"https://t.me/{bot_name}?start=private") - return None + if data[1] == "confirm": + reply_to = message.reply_to_message + restart_message = await send_message(reply_to, "Restarting Session(s)...") + await delete_message(message) + await TgClient.reload() + add_handlers() + TgClient.bot.add_handler( + CallbackQueryHandler( + restart_sessions_confirm, + filters=regex("^sessionrestart") & CustomFilters.sudo, + ), + ) + await edit_message(restart_message, "Session(s) Restarted Successfully!") else: - await query.answer() await delete_message(message) - return None - - -@new_task -async def log(_, message): - buttons = ButtonMaker() - buttons.callback("Log display", f"aeon {message.from_user.id} logdisplay") - reply_message = await sendFile(message, "log.txt", buttons=buttons.column(1)) - await delete_message(message) - await five_minute_del(reply_message) -@new_task -async def bot_help(_, message): - reply_message = await send_message(message, help_string) - await delete_message(message) - await one_minute_del(reply_message) +# Setup Commands +COMMAND_OBJECTS = [ + BotCommand( + getattr(BotCommands, cmd)[0] + if isinstance(getattr(BotCommands, cmd), list) + else getattr(BotCommands, cmd), + description, + ) + for cmd, description in COMMANDS.items() +] -async def restart_notification(): - if await aiopath.isfile(".restartmsg"): - with open(".restartmsg") as f: - chat_id, msg_id = map(int, f) - with contextlib.suppress(Exception): - await bot.edit_message_text( - chat_id=chat_id, message_id=msg_id, text="Restarted Successfully!" - ) - await aioremove(".restartmsg") +# Set Bot Commands +async def set_commands(): + if Config.SET_COMMANDS: + await TgClient.bot.set_bot_commands(COMMAND_OBJECTS) +# Main Function async def main(): + await gather(TgClient.start_bot(), TgClient.start_user()) + await gather(load_configurations(), update_variables()) await gather( - start_cleanup(), - torrent_search.initiate_search_tools(), - restart_notification(), - set_commands(bot), - ) - await sync_to_async(start_aria2_listener, wait=False) - bot.add_handler(MessageHandler(start, filters=command(BotCommands.StartCommand))) - bot.add_handler( - MessageHandler( - log, filters=command(BotCommands.LogCommand) & CustomFilters.sudo - ) - ) - bot.add_handler( - MessageHandler( - restart, filters=command(BotCommands.RestartCommand) & CustomFilters.sudo - ) + sync_to_async(update_qb_options), + sync_to_async(update_aria2_options), + set_commands(), ) - bot.add_handler( - MessageHandler( - ping, filters=command(BotCommands.PingCommand) & CustomFilters.authorized - ) - ) - bot.add_handler( - MessageHandler( - bot_help, - filters=command(BotCommands.HelpCommand) & CustomFilters.authorized, - ) + await gather( + save_settings(), + sync_to_async(clean_all), + initiate_search_tools(), + get_packages_version(), + restart_notification(), + telegraph.create_account(), + rclone_serve_booter(), + sync_to_async(start_aria2_listener, wait=False), ) - bot.add_handler( - MessageHandler( - stats, - filters=command(BotCommands.StatsCommand) & CustomFilters.authorized, - ) + create_help_buttons() + add_handlers() + TgClient.bot.add_handler( + CallbackQueryHandler( + restart_sessions_confirm, + filters=regex("^sessionrestart") & CustomFilters.sudo, + ), ) - bot.add_handler(CallbackQueryHandler(aeon_callback, filters=regex(r"^aeon"))) LOGGER.info("Bot Started!") signal(SIGINT, exit_clean_up) -bot.loop.run_until_complete(main()) -bot.loop.run_forever() +# Run Bot +bot_loop.run_until_complete(main()) +bot_loop.run_forever() diff --git a/bot/helper/mirror_leech_utils/upload_utils/__init__.py b/bot/core/__init__.py similarity index 100% rename from bot/helper/mirror_leech_utils/upload_utils/__init__.py rename to bot/core/__init__.py diff --git a/bot/core/aeon_client.py b/bot/core/aeon_client.py new file mode 100644 index 000000000..edfe97023 --- /dev/null +++ b/bot/core/aeon_client.py @@ -0,0 +1,72 @@ +from asyncio import Lock + +from pyrogram import Client, enums + +from bot import LOGGER + +from .config_manager import Config + + +class TgClient: + _lock = Lock() + bot = None + user = None + NAME = "" + ID = 0 + IS_PREMIUM_USER = False + MAX_SPLIT_SIZE = 2097152000 + + @classmethod + async def start_bot(cls): + LOGGER.info("Creating client from BOT_TOKEN") + cls.bot = Client( + "bot", + Config.TELEGRAM_API, + Config.TELEGRAM_HASH, + bot_token=Config.BOT_TOKEN, + parse_mode=enums.ParseMode.HTML, + max_concurrent_transmissions=10, + ) + await cls.bot.start() + cls.NAME = cls.bot.me.username + cls.ID = Config.BOT_TOKEN.split(":", 1)[0] + + @classmethod + async def start_user(cls): + if Config.USER_SESSION_STRING: + LOGGER.info("Creating client from USER_SESSION_STRING") + try: + cls.user = Client( + "user", + Config.TELEGRAM_API, + Config.TELEGRAM_HASH, + session_string=Config.USER_SESSION_STRING, + parse_mode=enums.ParseMode.HTML, + no_updates=True, + max_concurrent_transmissions=10, + ) + await cls.user.start() + cls.IS_PREMIUM_USER = cls.user.me.is_premium + if cls.IS_PREMIUM_USER: + cls.MAX_SPLIT_SIZE = 4194304000 + except Exception as e: + LOGGER.error(f"Failed to start client from USER_SESSION_STRING. {e}") + cls.IS_PREMIUM_USER = False + cls.user = None + + @classmethod + async def stop(cls): + async with cls._lock: + if cls.bot: + await cls.bot.stop() + if cls.user: + await cls.user.stop() + LOGGER.info("Client stopped") + + @classmethod + async def reload(cls): + async with cls._lock: + await cls.bot.restart() + if cls.user: + await cls.user.restart() + LOGGER.info("Client restarted") diff --git a/bot/core/config_manager.py b/bot/core/config_manager.py new file mode 100644 index 000000000..e7aa16bde --- /dev/null +++ b/bot/core/config_manager.py @@ -0,0 +1,171 @@ +import os +from importlib import import_module +from typing import Any, ClassVar + + +class Config: + AS_DOCUMENT = False + AUTHORIZED_CHATS = "" + BASE_URL = "" + BASE_URL_PORT = 80 + BOT_TOKEN = "" + CMD_SUFFIX = "" + DATABASE_URL = "" + DEFAULT_UPLOAD = "rc" + DOWNLOAD_DIR = "/usr/src/app/downloads/" + EXTENSION_FILTER = "" + FFMPEG_CMDS: ClassVar[dict[str, list[str]]] = {} + FILELION_API = "" + GDRIVE_ID = "" + INCOMPLETE_TASK_NOTIFIER = False + INDEX_URL = "" + IS_TEAM_DRIVE = False + LEECH_DUMP_CHAT = "" + LEECH_FILENAME_PREFIX = "" + LEECH_SPLIT_SIZE = 2097152000 + MEDIA_GROUP = False + MIXED_LEECH = False + MEGA_EMAIL = "" + MEGA_PASSWORD = "" + NAME_SUBSTITUTE = "" + OWNER_ID = 0 + QUEUE_ALL = 0 + QUEUE_DOWNLOAD = 0 + QUEUE_UPLOAD = 0 + RCLONE_FLAGS = "" + RCLONE_PATH = "" + RCLONE_SERVE_URL = "" + RCLONE_SERVE_USER = "" + RCLONE_SERVE_PASS = "" + RCLONE_SERVE_PORT = 8080 + RSS_CHAT = "" + RSS_DELAY = 600 + RSS_SIZE_LIMIT = 0 + SEARCH_API_LINK = "" + SEARCH_LIMIT = 0 + SEARCH_PLUGINS: ClassVar[list[str]] = [] + STOP_DUPLICATE = False + STREAMWISH_API = "" + SUDO_USERS = "" + TELEGRAM_API = 0 + TELEGRAM_HASH = "" + THUMBNAIL_LAYOUT = "" + TORRENT_TIMEOUT = 0 + USER_TRANSMISSION = False + UPSTREAM_REPO = "" + UPSTREAM_BRANCH = "main" + USER_SESSION_STRING = "" + USE_SERVICE_ACCOUNTS = False + WEB_PINCODE = False + YT_DLP_OPTIONS = "" + + # INKYPINKY + METADATA_KEY = "" + WATERMARK_KEY = "" + SET_COMMANDS = True + TOKEN_TIMEOUT = 0 + PAID_CHANNEL_ID = 0 + PAID_CHANNEL_LINK = "" + DELETE_LINKS = False + FSUB_IDS = "" + LOG_CHAT_ID = 0 + LEECH_FILENAME_CAPTION = "" + + @classmethod + def get(cls, key): + if hasattr(cls, key): + return getattr(cls, key) + raise KeyError(f"{key} is not a valid configuration key.") + + @classmethod + def set(cls, key, value): + if hasattr(cls, key): + setattr(cls, key, value) + else: + raise KeyError(f"{key} is not a valid configuration key.") + + @classmethod + def get_all(cls): + return { + key: getattr(cls, key) + for key in cls.__dict__ + if not key.startswith("__") and not callable(getattr(cls, key)) + } + + @classmethod + def load(cls): + try: + settings = import_module("config") + except ModuleNotFoundError: + return + else: + for attr in dir(settings): + if hasattr(cls, attr): + value = getattr(settings, attr) + if not value: + continue + if isinstance(value, str): + value = value.strip() + if attr == "DEFAULT_UPLOAD" and value != "gd": + value = "rc" + elif ( + attr + in [ + "BASE_URL", + "RCLONE_SERVE_URL", + "INDEX_URL", + "SEARCH_API_LINK", + ] + and value + ): + value = value.strip("/") + setattr(cls, attr, value) + + @classmethod + def load_dict(cls, config_dict): + for key, value in config_dict.items(): + if hasattr(cls, key): + if key == "DEFAULT_UPLOAD" and value != "gd": + value = "rc" + elif ( + key + in [ + "BASE_URL", + "RCLONE_SERVE_URL", + "INDEX_URL", + "SEARCH_API_LINK", + ] + and value + ): + value = value.strip("/") + setattr(cls, key, value) + + +class SystemEnv: + @classmethod + def load(cls): + config_vars = Config.get_all() + for key in config_vars: + env_value = os.getenv(key) + if env_value is not None: + converted_value = cls._convert_type(key, env_value) + Config.set(key, converted_value) + + @classmethod + def _convert_type(cls, key: str, value: str) -> Any: + original_value = getattr(Config, key, None) + if original_value is None: + return value + if isinstance(original_value, bool): + return value.lower() in ("true", "1", "yes") + if isinstance(original_value, int): + try: + return int(value) + except ValueError: + return original_value + if isinstance(original_value, float): + try: + return float(value) + except ValueError: + return original_value + return value diff --git a/bot/core/handlers.py b/bot/core/handlers.py new file mode 100644 index 000000000..7292049a9 --- /dev/null +++ b/bot/core/handlers.py @@ -0,0 +1,242 @@ +# ruff: noqa: F405 +from pyrogram.filters import command, regex +from pyrogram.handlers import ( + CallbackQueryHandler, + EditedMessageHandler, + MessageHandler, +) + +from bot.helper.telegram_helper.bot_commands import BotCommands +from bot.helper.telegram_helper.filters import CustomFilters +from bot.modules import * + +from .aeon_client import TgClient + + +def add_handlers(): + command_filters = { + "authorize": ( + authorize, + BotCommands.AuthorizeCommand, + CustomFilters.sudo, + ), + "unauthorize": ( + unauthorize, + BotCommands.UnAuthorizeCommand, + CustomFilters.sudo, + ), + "add_sudo": ( + add_sudo, + BotCommands.AddSudoCommand, + CustomFilters.sudo, + ), + "remove_sudo": ( + remove_sudo, + BotCommands.RmSudoCommand, + CustomFilters.sudo, + ), + "send_bot_settings": ( + send_bot_settings, + BotCommands.BotSetCommand, + CustomFilters.sudo, + ), + "cancel_all_buttons": ( + cancel_all_buttons, + BotCommands.CancelAllCommand, + CustomFilters.authorized, + ), + "clone_node": ( + clone_node, + BotCommands.CloneCommand, + CustomFilters.authorized, + ), + "aioexecute": ( + aioexecute, + BotCommands.AExecCommand, + CustomFilters.owner, + ), + "execute": ( + execute, + BotCommands.ExecCommand, + CustomFilters.owner, + ), + "clear": ( + clear, + BotCommands.ClearLocalsCommand, + CustomFilters.owner, + ), + "select": ( + select, + BotCommands.SelectCommand, + CustomFilters.authorized, + ), + "remove_from_queue": ( + remove_from_queue, + BotCommands.ForceStartCommand, + CustomFilters.authorized, + ), + "count_node": ( + count_node, + BotCommands.CountCommand, + CustomFilters.authorized, + ), + "delete_file": ( + delete_file, + BotCommands.DeleteCommand, + CustomFilters.authorized, + ), + "gdrive_search": ( + gdrive_search, + BotCommands.ListCommand, + CustomFilters.authorized, + ), + "mirror": ( + mirror, + BotCommands.MirrorCommand, + CustomFilters.authorized, + ), + "leech": ( + leech, + BotCommands.LeechCommand, + CustomFilters.authorized, + ), + "get_rss_menu": ( + get_rss_menu, + BotCommands.RssCommand, + CustomFilters.authorized, + ), + "run_shell": ( + run_shell, + BotCommands.ShellCommand, + CustomFilters.owner, + ), + "start": ( + start, + BotCommands.StartCommand, + None, + ), + "log": ( + log, + BotCommands.LogCommand, + CustomFilters.sudo, + ), + "restart_bot": ( + restart_bot, + BotCommands.RestartCommand, + CustomFilters.sudo, + ), + "ping": ( + ping, + BotCommands.PingCommand, + CustomFilters.authorized, + ), + "bot_help": ( + bot_help, + BotCommands.HelpCommand, + CustomFilters.authorized, + ), + "bot_stats": ( + bot_stats, + BotCommands.StatsCommand, + CustomFilters.authorized, + ), + "task_status": ( + task_status, + BotCommands.StatusCommand, + CustomFilters.authorized, + ), + "torrent_search": ( + torrent_search, + BotCommands.SearchCommand, + CustomFilters.authorized, + ), + "get_users_settings": ( + get_users_settings, + BotCommands.UsersCommand, + CustomFilters.sudo, + ), + "send_user_settings": ( + send_user_settings, + BotCommands.UserSetCommand, + CustomFilters.authorized, + ), + "ytdl": ( + ytdl, + BotCommands.YtdlCommand, + CustomFilters.authorized, + ), + "ytdl_leech": ( + ytdl_leech, + BotCommands.YtdlLeechCommand, + CustomFilters.authorized, + ), + # "restart_sessions": ( + # restart_sessions, + # BotCommands.RestartSessionsCommand, + # CustomFilters.sudo, + # ), + "mediainfo": ( + mediainfo, + BotCommands.MediaInfoCommand, + CustomFilters.authorized, + ), + "speedtest": ( + speedtest, + BotCommands.SpeedTest, + CustomFilters.authorized, + ), + "broadcast": ( + broadcast, + BotCommands.BroadcastCommand, + CustomFilters.owner, + ), + } + + for handler_func, command_name, custom_filter in command_filters.values(): + if custom_filter: + filters_to_apply = ( + command(command_name, case_sensitive=True) & custom_filter + ) + else: + filters_to_apply = command(command_name, case_sensitive=True) + + TgClient.bot.add_handler( + MessageHandler( + handler_func, + filters=filters_to_apply, + ), + ) + + regex_filters = { + "^botset": edit_bot_settings, + "^canall": cancel_all_update, + "^stopm": cancel_multi, + "^sel": confirm_selection, + "^list_types": select_type, + "^rss": rss_listener, + "^torser": torrent_search_update, + "^userset": edit_user_settings, + "^help": arg_usage, + "^status": status_pages, + "^botrestart": confirm_restart, + "^log": log_callback, + } + + for regex_filter, handler_func in regex_filters.items(): + TgClient.bot.add_handler( + CallbackQueryHandler(handler_func, filters=regex(regex_filter)), + ) + + TgClient.bot.add_handler( + EditedMessageHandler( + run_shell, + filters=command(BotCommands.ShellCommand, case_sensitive=True) + & CustomFilters.owner, + ), + ) + TgClient.bot.add_handler( + MessageHandler( + cancel, + filters=regex(r"^/stop(_\w+)?(?!all)") & CustomFilters.authorized, + ), + ) diff --git a/bot/core/startup.py b/bot/core/startup.py new file mode 100644 index 000000000..518ccf93a --- /dev/null +++ b/bot/core/startup.py @@ -0,0 +1,240 @@ +from asyncio import create_subprocess_exec, create_subprocess_shell +from os import environ + +from aiofiles import open as aiopen +from aiofiles.os import makedirs, remove +from aiofiles.os import path as aiopath +from aioshutil import rmtree + +from bot import ( + LOGGER, + aria2, + aria2_options, + drives_ids, + drives_names, + extension_filter, + index_urls, + qbit_options, + rss_dict, + user_data, + xnox_client, +) +from bot.helper.ext_utils.db_handler import database + +from .aeon_client import TgClient +from .config_manager import Config + + +def update_qb_options(): + if not qbit_options: + qbit_options.update(dict(xnox_client.app_preferences())) + del qbit_options["listen_port"] + for k in list(qbit_options.keys()): + if k.startswith("rss"): + del qbit_options[k] + qbit_options["web_ui_password"] = "mltbmltb" + xnox_client.app_set_preferences({"web_ui_password": "mltbmltb"}) + else: + xnox_client.app_set_preferences(qbit_options) + + +def update_aria2_options(): + if not aria2_options: + aria2_options.update(aria2.client.get_global_option()) + else: + aria2.set_global_options(aria2_options) + + +async def load_settings(): + if not Config.DATABASE_URL: + return + await database.connect() + if database.db is not None: + BOT_ID = Config.BOT_TOKEN.split(":", 1)[0] + config_file = Config.get_all() + old_config = await database.db.settings.deployConfig.find_one( + {"_id": BOT_ID}, + {"_id": 0}, + ) + if old_config is None: + database.db.settings.deployConfig.replace_one( + {"_id": BOT_ID}, + config_file, + upsert=True, + ) + if old_config and old_config != config_file: + await database.db.settings.deployConfig.replace_one( + {"_id": BOT_ID}, + config_file, + upsert=True, + ) + else: + config_dict = await database.db.settings.config.find_one( + {"_id": BOT_ID}, + {"_id": 0}, + ) + if config_dict: + Config.load_dict(config_dict) + + if pf_dict := await database.db.settings.files.find_one( + {"_id": BOT_ID}, + {"_id": 0}, + ): + for key, value in pf_dict.items(): + if value: + file_ = key.replace("__", ".") + async with aiopen(file_, "wb+") as f: + await f.write(value) + + if a2c_options := await database.db.settings.aria2c.find_one( + {"_id": BOT_ID}, + {"_id": 0}, + ): + aria2_options.update(a2c_options) + + if qbit_opt := await database.db.settings.qbittorrent.find_one( + {"_id": BOT_ID}, + {"_id": 0}, + ): + qbit_options.update(qbit_opt) + + if await database.db.users.find_one(): + rows = database.db.users.find({}) + async for row in rows: + uid = row["_id"] + del row["_id"] + thumb_path = f"Thumbnails/{uid}.jpg" + rclone_config_path = f"rclone/{uid}.conf" + token_path = f"tokens/{uid}.pickle" + if row.get("thumb"): + if not await aiopath.exists("Thumbnails"): + await makedirs("Thumbnails") + async with aiopen(thumb_path, "wb+") as f: + await f.write(row["thumb"]) + row["thumb"] = thumb_path + if row.get("rclone_config"): + if not await aiopath.exists("rclone"): + await makedirs("rclone") + async with aiopen(rclone_config_path, "wb+") as f: + await f.write(row["rclone_config"]) + row["rclone_config"] = rclone_config_path + if row.get("token_pickle"): + if not await aiopath.exists("tokens"): + await makedirs("tokens") + async with aiopen(token_path, "wb+") as f: + await f.write(row["token_pickle"]) + row["token_pickle"] = token_path + user_data[uid] = row + LOGGER.info("Users data has been imported from Database") + + if await database.db.rss[BOT_ID].find_one(): + rows = database.db.rss[BOT_ID].find({}) + async for row in rows: + user_id = row["_id"] + del row["_id"] + rss_dict[user_id] = row + LOGGER.info("Rss data has been imported from Database.") + + +async def save_settings(): + if database.db is None: + return + config_dict = Config.get_all() + await database.db.settings.config.replace_one( + {"_id": TgClient.ID}, + config_dict, + upsert=True, + ) + if await database.db.settings.aria2c.find_one({"_id": TgClient.ID}) is None: + await database.db.settings.aria2c.update_one( + {"_id": TgClient.ID}, + {"$set": aria2_options}, + upsert=True, + ) + if await database.db.settings.qbittorrent.find_one({"_id": TgClient.ID}) is None: + await database.save_qbit_settings() + + +async def update_variables(): + if ( + Config.LEECH_SPLIT_SIZE > TgClient.MAX_SPLIT_SIZE + or Config.LEECH_SPLIT_SIZE == 2097152000 + or not Config.LEECH_SPLIT_SIZE + ): + Config.LEECH_SPLIT_SIZE = TgClient.MAX_SPLIT_SIZE + + Config.MIXED_LEECH = bool(Config.MIXED_LEECH and TgClient.IS_PREMIUM_USER) + Config.USER_TRANSMISSION = bool( + Config.USER_TRANSMISSION and TgClient.IS_PREMIUM_USER, + ) + + if Config.AUTHORIZED_CHATS: + aid = Config.AUTHORIZED_CHATS.split() + for id_ in aid: + chat_id, *thread_ids = id_.split("|") + chat_id = int(chat_id.strip()) + if thread_ids: + thread_ids = [int(x.strip()) for x in thread_ids] + user_data[chat_id] = {"is_auth": True, "thread_ids": thread_ids} + else: + user_data[chat_id] = {"is_auth": True} + + if Config.SUDO_USERS: + aid = Config.SUDO_USERS.split() + for id_ in aid: + user_data[int(id_.strip())] = {"is_sudo": True} + + if Config.EXTENSION_FILTER: + fx = Config.EXTENSION_FILTER.split() + for x in fx: + x = x.lstrip(".") + extension_filter.append(x.strip().lower()) + + if Config.GDRIVE_ID: + drives_names.append("Main") + drives_ids.append(Config.GDRIVE_ID) + index_urls.append(Config.INDEX_URL) + + if await aiopath.exists("list_drives.txt"): + async with aiopen("list_drives.txt", "r+") as f: + lines = await f.readlines() + for line in lines: + temp = line.split() + drives_ids.append(temp[1]) + drives_names.append(temp[0].replace("_", " ")) + if len(temp) > 2: + index_urls.append(temp[2]) + else: + index_urls.append("") + + +async def load_configurations(): + if not await aiopath.exists(".netrc"): + async with aiopen(".netrc", "w"): + pass + await (await create_subprocess_shell("chmod 600 .netrc")).wait() + await (await create_subprocess_shell("cp .netrc /root/.netrc")).wait() + + PORT = environ.get("PORT") or environ.get("BASE_URL_PORT", 80) + await create_subprocess_shell( + f"gunicorn web.wserver:app --bind 0.0.0.0:{PORT} --worker-class gevent", + ) + + if await aiopath.exists("accounts.zip"): + if await aiopath.exists("accounts"): + await rmtree("accounts") + await ( + await create_subprocess_exec( + "7z", + "x", + "-o.", + "-aoa", + "accounts.zip", + "accounts/*.json", + ) + ).wait() + await (await create_subprocess_exec("chmod", "-R", "777", "accounts")).wait() + await remove("accounts.zip") + + if not await aiopath.exists("accounts"): + Config.USE_SERVICE_ACCOUNTS = False diff --git a/bot/helper/aeon_utils/__init__.py b/bot/helper/aeon_utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/bot/helper/aeon_utils/access_check.py b/bot/helper/aeon_utils/access_check.py new file mode 100644 index 000000000..8f271f179 --- /dev/null +++ b/bot/helper/aeon_utils/access_check.py @@ -0,0 +1,197 @@ +from re import IGNORECASE, escape, search +from time import time +from uuid import uuid4 + +from pyrogram.errors import PeerIdInvalid, RPCError, UserNotParticipant + +from bot import ( + LOGGER, + user_data, +) +from bot.core.aeon_client import TgClient +from bot.core.config_manager import Config +from bot.helper.aeon_utils.shorteners import short +from bot.helper.ext_utils.db_handler import database +from bot.helper.ext_utils.help_messages import nsfw_keywords +from bot.helper.ext_utils.status_utils import get_readable_time +from bot.helper.telegram_helper.button_build import ButtonMaker + + +async def error_check(message): + msg, button = [], None + user_id = message.from_user.id + token_timeout = Config.TOKEN_TIMEOUT + + if message.chat.type != message.chat.type.BOT: + if FSUB_IDS := Config.FSUB_IDS: + join_button = {} + for channel_id in FSUB_IDS.split(): + chat = await get_chat_info(int(channel_id)) + if not chat: + continue + + try: + await chat.get_member(message.from_user.id) + except UserNotParticipant: + invite_link = ( + f"https://t.me/{chat.username}" + if chat.username + else chat.invite_link + ) + join_button[chat.title] = invite_link + except RPCError as e: + LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") + except Exception as e: + LOGGER.error(f"{e} for {channel_id}") + + if join_button: + button = button or ButtonMaker() + for title, link in join_button.items(): + button.url_button(f"Join {title}", link, "footer") + msg.append("You haven't joined our channel/group yet!") + + if not token_timeout or user_id in { + Config.OWNER_ID, + user_data.get(user_id, {}).get("is_sudo"), + }: + try: + temp_msg = await message._client.send_message( + chat_id=user_id, + text="Checking Access...", + ) + await temp_msg.delete() + except Exception: + button = button or ButtonMaker() + button.data_button("Start", f"aeon {user_id} private", "header") + msg.append("You haven't initiated the bot in a private message!") + + if user_id not in { + Config.OWNER_ID, + 1781717085, + user_data.get(user_id, {}).get("is_sudo"), + }: + token_msg, button = await token_check(user_id, button) + if token_msg: + msg.append(token_msg) + + if await nsfw_precheck(message): + msg.append("NSFW detected") + + if msg: + username = message.from_user.username + tag = f"@{username}" if username else message.from_user.mention + final_msg = f"Hey, {tag}!\n" + for i, m in enumerate(msg, 1): + final_msg += f"\n
{i}: {m}
" + + if button: + button = button.build_menu(2) + return final_msg, button + + return None, None + + +async def get_chat_info(channel_id): + try: + return await TgClient.bot.get_chat(channel_id) + except PeerIdInvalid as e: + LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") + return None + + +def is_nsfw(text): + pattern = ( + r"(?:^|\W|_)(?:" + + "|".join(escape(keyword) for keyword in nsfw_keywords) + + r")(?:$|\W|_)" + ) + return bool(search(pattern, text, flags=IGNORECASE)) + + +def is_nsfw_data(data): + if isinstance(data, list): + return any( + is_nsfw(item.get("name", "")) + if isinstance(item, dict) + else is_nsfw(item) + for item in data + ) + if isinstance(data, dict): + return any(is_nsfw(item["filename"]) for item in data.get("contents", [])) + return False + + +async def nsfw_precheck(message): + if is_nsfw(message.text): + return True + + reply_to = message.reply_to_message + if not reply_to: + return False + + for attr in ["document", "video"]: + if hasattr(reply_to, attr) and getattr(reply_to, attr): + file_name = getattr(reply_to, attr).file_name + if file_name and is_nsfw(file_name): + return True + + return any( + is_nsfw(getattr(reply_to, attr)) + for attr in ["caption", "text"] + if hasattr(reply_to, attr) and getattr(reply_to, attr) + ) + + +async def check_is_paid(chat, uid): + try: + await chat.get_member(uid) + return True + except UserNotParticipant: + return False + except Exception as e: + LOGGER.error(f"{e} for {chat.id}") + return False + + +async def is_paid(user_id): + if chat := await get_chat_info(Config.PAID_CHANNEL_ID): + return await check_is_paid(chat, user_id) + return True + + +async def token_check(user_id, button=None): + token_timeout = Config.TOKEN_TIMEOUT + if not token_timeout or user_id == Config.OWNER_ID: + return None, button + if Config.PAID_CHANNEL_ID and await is_paid(user_id): + return None, button + + user_data.setdefault(user_id, {}) + data = user_data[user_id] + # await database.connect() + data["time"] = await database.get_token_expiry(user_id) + expire = data.get("time") + isExpired = expire is None or (time() - expire) > token_timeout + if isExpired: + token = data["token"] if expire is None and "token" in data else str(uuid4()) + if expire is not None: + del data["time"] + data["token"] = token + await database.update_user_token(user_id, token) + user_data[user_id] = data + + time_str = get_readable_time(token_timeout, True) + button = button or ButtonMaker() + short_link = await short( + f"https://telegram.me/{TgClient.NAME}?start={token}", + ) + button.url_button("Collect token", short_link) + msg = "Your token has expired, please collect a new token" + if Config.PAID_CHANNEL_ID and Config.PAID_CHANNEL_LINK: + msg += " or subscribe to the paid channel for no token." + button.url_button("Subscribe", Config.PAID_CHANNEL_LINK) + + return (msg + f"\nIt will expire after {time_str}!"), button + + # await database.disconnect() + return None, button diff --git a/bot/helper/aeon_utils/caption_gen.py b/bot/helper/aeon_utils/caption_gen.py new file mode 100644 index 000000000..c7711454a --- /dev/null +++ b/bot/helper/aeon_utils/caption_gen.py @@ -0,0 +1,119 @@ +import json +import os +from contextlib import suppress +from hashlib import md5 + +from aiofiles.os import path as aiopath +from langcodes import Language + +from bot import LOGGER +from bot.helper.ext_utils.bot_utils import cmd_exec +from bot.helper.ext_utils.status_utils import ( + get_readable_file_size, + get_readable_time, +) + + +class DefaultDict(dict): + def __missing__(self, key): + return "Unknown" + + +async def generate_caption(filename, directory, caption_template): + file_path = os.path.join(directory, filename) + + try: + result = await cmd_exec(["mediainfo", "--Output=JSON", file_path]) + if result[1]: + LOGGER.info(f"MediaInfo command output: {result[1]}") + + mediainfo_data = json.loads(result[0]) # Parse JSON output + except Exception as error: + LOGGER.error(f"Failed to retrieve media info: {error}. File may not exist!") + return filename + + media_data = mediainfo_data.get("media", {}) + track_data = media_data.get("track", []) + video_metadata = next( + (track for track in track_data if track["@type"] == "Video"), + {}, + ) + audio_metadata = [track for track in track_data if track["@type"] == "Audio"] + subtitle_metadata = [track for track in track_data if track["@type"] == "Text"] + + video_duration = round(float(video_metadata.get("Duration", 0))) + video_quality = get_video_quality(video_metadata.get("Height")) + + audio_languages = ", ".join( + parse_audio_language("", audio) + for audio in audio_metadata + if audio.get("Language") + ) + subtitle_languages = ", ".join( + parse_subtitle_language("", subtitle) + for subtitle in subtitle_metadata + if subtitle.get("Language") + ) + + audio_languages = audio_languages if audio_languages else "Unknown" + subtitle_languages = subtitle_languages if subtitle_languages else "Unknown" + video_quality = video_quality if video_quality else "Unknown" + file_md5_hash = calculate_md5(file_path) + + caption_data = DefaultDict( + filename=filename, + size=get_readable_file_size(await aiopath.getsize(file_path)), + duration=get_readable_time(video_duration, True), + quality=video_quality, + audios=audio_languages, + subtitles=subtitle_languages, + md5_hash=file_md5_hash, + ) + + return caption_template.format_map(caption_data) + + +def get_video_quality(height): + quality_map = { + 480: "480p", + 540: "540p", + 720: "720p", + 1080: "1080p", + 2160: "2160p", + 4320: "4320p", + 8640: "8640p", + } + for threshold, quality in sorted(quality_map.items()): + if height and int(height) <= threshold: + return quality + return "Unknown" + + +def parse_audio_language(existing_languages, audio_stream): + language_code = audio_stream.get("Language") + if language_code: + with suppress(Exception): + language_name = Language.get(language_code).display_name() + if language_name not in existing_languages: + LOGGER.debug(f"Parsed audio language: {language_name}") + existing_languages += f"{language_name}, " + return existing_languages.strip(", ") + + +def parse_subtitle_language(existing_subtitles, subtitle_stream): + subtitle_code = subtitle_stream.get("Language") + if subtitle_code: + with suppress(Exception): + subtitle_name = Language.get(subtitle_code).display_name() + if subtitle_name not in existing_subtitles: + LOGGER.debug(f"Parsed subtitle language: {subtitle_name}") + existing_subtitles += f"{subtitle_name}, " + return existing_subtitles.strip(", ") + + +def calculate_md5(file_path): + md5_hash = md5() + with open(file_path, "rb") as file: + for chunk in iter(lambda: file.read(4096), b""): + md5_hash.update(chunk) + return md5_hash.hexdigest() diff --git a/bot/helper/aeon_utils/metadata.py b/bot/helper/aeon_utils/metadata_editor.py similarity index 70% rename from bot/helper/aeon_utils/metadata.py rename to bot/helper/aeon_utils/metadata_editor.py index 09173048a..bfb11bc2a 100644 --- a/bot/helper/aeon_utils/metadata.py +++ b/bot/helper/aeon_utils/metadata_editor.py @@ -1,17 +1,12 @@ -import os import json +import os from asyncio import create_subprocess_exec from asyncio.subprocess import PIPE from bot import LOGGER -async def change_metadata(file, dirpath, key): - LOGGER.info(f"Starting metadata modification for file: {file}") - temp_file = f"{file}.temp.mkv" - full_file_path = os.path.join(dirpath, file) - temp_file_path = os.path.join(dirpath, temp_file) - +async def get_streams(file): cmd = [ "ffprobe", "-hide_banner", @@ -20,35 +15,72 @@ async def change_metadata(file, dirpath, key): "-print_format", "json", "-show_streams", - full_file_path, + file, ] process = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) stdout, stderr = await process.communicate() if process.returncode != 0: LOGGER.error(f"Error getting stream info: {stderr.decode().strip()}") - return file + return None try: - streams = json.loads(stdout)["streams"] + return json.loads(stdout)["streams"] except KeyError: LOGGER.error( - f"No streams found in the ffprobe output: {stdout.decode().strip()}" + f"No streams found in the ffprobe output: {stdout.decode().strip()}", ) - return file + return None - languages = {} - for stream in streams: - stream_index = stream["index"] - stream_type = stream["codec_type"] - if "tags" in stream and "language" in stream["tags"]: - languages[stream_index] = stream["tags"]["language"] + +# Lots of work need +async def get_watermark_cmd(file, key): + temp_file = f"{file}.temp.mkv" + font_path = "default.otf" cmd = [ "xtra", - "-y", + "-hide_banner", + "-loglevel", + "error", + "-progress", + "pipe:1", "-i", - full_file_path, + file, + "-vf", + f"drawtext=text='{key}':fontfile={font_path}:fontsize=20:fontcolor=white:x=10:y=10", + # "-preset", + # "ultrafast", + "-threads", + f"{max(1, os.cpu_count() // 2)}", + temp_file, + ] + + return cmd, temp_file + + +async def get_metadata_cmd(file_path, key): + """Processes a single file to update metadata.""" + temp_file = f"{file_path}.temp.mkv" + streams = await get_streams(file_path) + if not streams: + return None, None + + languages = { + stream["index"]: stream["tags"]["language"] + for stream in streams + if "tags" in stream and "language" in stream["tags"] + } + + cmd = [ + "xtra", + "-hide_banner", + "-loglevel", + "error", + "-progress", + "pipe:1", + "-i", + file_path, "-map_metadata", "-1", "-c", @@ -77,7 +109,7 @@ async def change_metadata(file, dirpath, key): [ f"-metadata:s:v:{stream_index}", f"language={languages[stream_index]}", - ] + ], ) elif stream_type == "audio": cmd.extend( @@ -86,21 +118,21 @@ async def change_metadata(file, dirpath, key): f"0:{stream_index}", f"-metadata:s:a:{audio_index}", f"title={key}", - ] + ], ) if stream_index in languages: cmd.extend( [ f"-metadata:s:a:{audio_index}", f"language={languages[stream_index]}", - ] + ], ) audio_index += 1 elif stream_type == "subtitle": codec_name = stream.get("codec_name", "unknown") if codec_name in ["webvtt", "unknown"]: LOGGER.warning( - f"Skipping unsupported subtitle metadata modification: {codec_name} for stream {stream_index}" + f"Skipping unsupported subtitle metadata modification: {codec_name} for stream {stream_index}", ) else: cmd.extend( @@ -109,55 +141,41 @@ async def change_metadata(file, dirpath, key): f"0:{stream_index}", f"-metadata:s:s:{subtitle_index}", f"title={key}", - ] + ], ) if stream_index in languages: cmd.extend( [ f"-metadata:s:s:{subtitle_index}", f"language={languages[stream_index]}", - ] + ], ) subtitle_index += 1 else: cmd.extend(["-map", f"0:{stream_index}"]) - cmd.append(temp_file_path) - - process = await create_subprocess_exec(*cmd, stderr=PIPE, stdout=PIPE) - stdout, stderr = await process.communicate() - - if process.returncode != 0: - err = stderr.decode().strip() - LOGGER.error(err) - LOGGER.error(f"Error modifying metadata for file: {file}") - return file - - os.replace(temp_file_path, full_file_path) - LOGGER.info(f"Metadata modified successfully for file: {file}") - return file + cmd.extend(["-threads", f"{max(1, os.cpu_count() // 2)}", temp_file]) + return cmd, temp_file -async def add_attachment(file, dirpath, attachment_path): +# later +async def add_attachment(file, attachment_path): LOGGER.info(f"Adding photo attachment to file: {file}") temp_file = f"{file}.temp.mkv" - full_file_path = os.path.join(dirpath, file) - temp_file_path = os.path.join(dirpath, temp_file) attachment_ext = attachment_path.split(".")[-1].lower() + mime_type = "application/octet-stream" if attachment_ext in ["jpg", "jpeg"]: mime_type = "image/jpeg" elif attachment_ext == "png": mime_type = "image/png" - else: - mime_type = "application/octet-stream" cmd = [ "xtra", "-y", "-i", - full_file_path, + file, "-attach", attachment_path, "-metadata:s:t", @@ -166,7 +184,7 @@ async def add_attachment(file, dirpath, attachment_path): "copy", "-map", "0", - temp_file_path, + temp_file, ] process = await create_subprocess_exec(*cmd, stderr=PIPE, stdout=PIPE) @@ -176,8 +194,8 @@ async def add_attachment(file, dirpath, attachment_path): err = stderr.decode().strip() LOGGER.error(err) LOGGER.error(f"Error adding photo attachment to file: {file}") - return file + return - os.replace(temp_file_path, full_file_path) + os.replace(temp_file, file) LOGGER.info(f"Photo attachment added successfully to file: {file}") - return file + return diff --git a/bot/helper/aeon_utils/nsfw_check.py b/bot/helper/aeon_utils/nsfw_check.py deleted file mode 100644 index 2b8d81d8d..000000000 --- a/bot/helper/aeon_utils/nsfw_check.py +++ /dev/null @@ -1,82 +0,0 @@ -from re import IGNORECASE, escape, search - -nsfw_keywords = [ - "porn", - "onlyfans", - "nsfw", - "Brazzers", - "adult", - "xnxx", - "xvideos", - "nsfwcherry", - "hardcore", - "Pornhub", - "xvideos2", - "youporn", - "pornrip", - "playboy", - "hentai", - "erotica", - "blowjob", - "redtube", - "stripchat", - "camgirl", - "nude", - "fetish", - "cuckold", - "orgy", - "horny", - "swingers", -] - - -def is_nsfw(text): - pattern = ( - r"(?:^|\W|_)(?:" - + "|".join(escape(keyword) for keyword in nsfw_keywords) - + r")(?:$|\W|_)" - ) - return bool(search(pattern, text, flags=IGNORECASE)) - - -def is_nsfw_data(data): - if isinstance(data, list): - for item in data: - if isinstance(item, dict): - if any( - isinstance(value, str) and is_nsfw(value) - for value in item.values() - ): - return True - elif ( - "name" in item - and isinstance(item["name"], str) - and is_nsfw(item["name"]) - ): - return True - elif isinstance(data, dict) and "contents" in data: - for item in data["contents"]: - if "filename" in item and is_nsfw(item["filename"]): - return True - return False - - -async def nsfw_precheck(message): - if is_nsfw(message.text): - return True - - reply_to = message.reply_to_message - if not reply_to: - return False - - for attr in ["document", "video"]: - if hasattr(reply_to, attr) and getattr(reply_to, attr): - file_name = getattr(reply_to, attr).file_name - if file_name and is_nsfw(file_name): - return True - - return any( - is_nsfw(getattr(reply_to, attr)) - for attr in ["caption", "text"] - if hasattr(reply_to, attr) and getattr(reply_to, attr) - ) diff --git a/bot/helper/aeon_utils/send_react.py b/bot/helper/aeon_utils/send_react.py deleted file mode 100644 index 053864cbe..000000000 --- a/bot/helper/aeon_utils/send_react.py +++ /dev/null @@ -1,40 +0,0 @@ -from random import choice - -from bot import LOGGER, bot - - -async def send_react(message): - try: - chat_id = int(message.chat.id) - chat_info = await bot.get_chat(chat_id) - available_reactions = chat_info.available_reactions - - full_emoji_set = { - "👌", - "🔥", - "🥰", - "❤️", - "❤️‍🔥", - "💯", - "⚡", - "💋", - "😘", - "🤩", - "😍", - } - - if available_reactions: - if getattr(available_reactions, "all_are_enabled", False): - emojis = full_emoji_set - else: - emojis = { - reaction.emoji for reaction in available_reactions.reactions - } - - await message.react(choice(list(emojis)), big=True) - except AttributeError as e: - LOGGER.error(f"AttributeError: {e}") - except TypeError as e: - LOGGER.error(f"TypeError: {e}") - except Exception as e: - LOGGER.error(f"An unexpected error occurred: {e}") diff --git a/bot/helper/aeon_utils/shorteners.py b/bot/helper/aeon_utils/shorteners.py new file mode 100644 index 000000000..f98c3e936 --- /dev/null +++ b/bot/helper/aeon_utils/shorteners.py @@ -0,0 +1,34 @@ +from asyncio import sleep +from random import choice +from urllib.parse import quote + +from aiohttp import ClientSession +from pyshorteners import Shortener + +from bot import shorteners_list + + +async def short(long_url): + async with ClientSession() as session: + for _attempt in range(4): + shortener_info = choice(shorteners_list) + try: + async with session.get( + f"https://{shortener_info['domain']}/api?api={shortener_info['api_key']}&url={quote(long_url)}", + ) as response: + result = await response.json() + short_url = result.get("shortenedUrl", long_url) + if short_url != long_url: + long_url = short_url + break + except Exception: + continue + + s = Shortener() + for _attempt in range(4): + try: + return s.tinyurl.short(long_url) + except Exception: + await sleep(1) + + return long_url diff --git a/bot/helper/aeon_utils/tinyfy.py b/bot/helper/aeon_utils/tinyfy.py deleted file mode 100644 index 56e5b1f18..000000000 --- a/bot/helper/aeon_utils/tinyfy.py +++ /dev/null @@ -1,14 +0,0 @@ -from pyshorteners import Shortener - -from bot import LOGGER - - -def tinyfy(long_url): - s = Shortener() - try: - short_url = s.tinyurl.short(long_url) - LOGGER.info(f"tinyfied {long_url} to {short_url}") - return short_url - except Exception: - LOGGER.error(f"Failed to shorten URL: {long_url}") - return long_url diff --git a/bot/helper/common.py b/bot/helper/common.py new file mode 100644 index 000000000..23d19c5a6 --- /dev/null +++ b/bot/helper/common.py @@ -0,0 +1,1200 @@ +import contextlib +import os +from asyncio import gather, sleep +from os import path as ospath +from os import walk +from re import IGNORECASE, sub +from secrets import token_hex +from shlex import split + +from aiofiles.os import listdir, makedirs, remove +from aiofiles.os import path as aiopath +from aioshutil import move, rmtree +from pyrogram.enums import ChatAction + +from bot import ( + LOGGER, + cpu_eater_lock, + extension_filter, + intervals, + multi_tags, + task_dict, + task_dict_lock, + user_data, +) +from bot.core.aeon_client import TgClient +from bot.core.config_manager import Config +from bot.helper.aeon_utils.metadata_editor import get_metadata_cmd, get_watermark_cmd + +from .ext_utils.bot_utils import get_size_bytes, new_task, sync_to_async +from .ext_utils.bulk_links import extract_bulk_links +from .ext_utils.files_utils import ( + SevenZ, + get_base_name, + get_path_size, + is_archive, + is_archive_split, + is_first_archive_split, + split_file, +) +from .ext_utils.links_utils import ( + is_gdrive_id, + is_gdrive_link, + is_rclone_path, + is_telegram_link, +) +from .ext_utils.media_utils import ( + FFMpeg, + create_thumb, + get_document_type, + is_mkv, + take_ss, +) +from .mirror_leech_utils.gdrive_utils.list import GoogleDriveList +from .mirror_leech_utils.rclone_utils.list import RcloneList +from .mirror_leech_utils.status_utils.ffmpeg_status import FFmpegStatus +from .mirror_leech_utils.status_utils.sevenz_status import SevenZStatus +from .telegram_helper.message_utils import ( + get_tg_link_message, + send_message, + send_status_message, +) + + +class TaskConfig: + def __init__(self): + self.mid = self.message.id + self.user = self.message.from_user or self.message.sender_chat + self.user_id = self.user.id + self.user_dict = user_data.get(self.user_id, {}) + self.dir = f"{Config.DOWNLOAD_DIR}{self.mid}" + self.up_dir = "" + self.link = "" + self.up_dest = "" + self.rc_flags = "" + self.tag = "" + self.name = "" + self.subname = "" + self.name_sub = "" + self.metadata = "" + self.watermark = "" + self.thumbnail_layout = "" + self.folder_name = "" + self.split_size = 0 + self.max_split_size = 0 + self.multi = 0 + self.size = 0 + self.subsize = 0 + self.proceed_count = 0 + self.is_leech = False + self.is_qbit = False + self.is_clone = False + self.is_ytdlp = False + self.user_transmission = False + self.mixed_leech = False + self.extract = False + self.compress = False + self.select = False + self.seed = False + self.compress = False + self.extract = False + self.join = False + self.private_link = False + self.stop_duplicate = False + self.sample_video = False + self.convert_audio = False + self.convert_video = False + self.screen_shots = False + self.is_cancelled = False + self.force_run = False + self.force_download = False + self.force_upload = False + self.is_torrent = False + self.as_med = False + self.as_doc = False + self.is_file = False + self.progress = True + self.ffmpeg_cmds = None + self.chat_thread_id = None + self.subproc = None + self.thumb = None + self.extension_filter = [] + self.files_to_proceed = [] + self.is_super_chat = self.message.chat.type.name in ["SUPERGROUP", "CHANNEL"] + + def get_token_path(self, dest): + if dest.startswith("mtp:"): + return f"tokens/{self.user_id}.pickle" + if dest.startswith("sa:") or ( + Config.USE_SERVICE_ACCOUNTS and not dest.startswith("tp:") + ): + return "accounts" + return "token.pickle" + + def get_config_path(self, dest): + return ( + f"rclone/{self.user_id}.conf" + if dest.startswith("mrcc:") + else "rclone.conf" + ) + + async def is_token_exists(self, path, status): + if is_rclone_path(path): + config_path = self.get_config_path(path) + if config_path != "rclone.conf" and status == "up": + self.private_link = True + if not await aiopath.exists(config_path): + raise ValueError(f"Rclone Config: {config_path} not Exists!") + elif (status == "dl" and is_gdrive_link(path)) or ( + status == "up" and is_gdrive_id(path) + ): + token_path = self.get_token_path(path) + if token_path.startswith("tokens/") and status == "up": + self.private_link = True + if not await aiopath.exists(token_path): + raise ValueError(f"NO TOKEN! {token_path} not Exists!") + + async def before_start(self): + self.name_sub = ( + self.name_sub + or self.user_dict.get("name_sub", False) + or (Config.NAME_SUBSTITUTE if "name_sub" not in self.user_dict else "") + ) + self.metadata = ( + self.metadata + or self.user_dict.get("metadata", False) + or (Config.METADATA_KEY if "metadata" not in self.user_dict else "") + ) + self.watermark = ( + self.watermark + or self.user_dict.get("watermark", False) + or (Config.WATERMARK_KEY if "watermark" not in self.user_dict else "") + ) + if self.name_sub: + self.name_sub = [x.split("/") for x in self.name_sub.split(" | ")] + self.extension_filter = self.user_dict.get("excluded_extensions") or ( + extension_filter + if "excluded_extensions" not in self.user_dict + else ["aria2", "!qB"] + ) + if self.link not in ["rcl", "gdl"]: + if is_rclone_path(self.link): + if not self.link.startswith("mrcc:") and self.user_dict.get( + "user_tokens", + False, + ): + self.link = f"mrcc:{self.link}" + await self.is_token_exists(self.link, "dl") + elif is_gdrive_link(self.link): + if not self.link.startswith( + ("mtp:", "tp:", "sa:"), + ) and self.user_dict.get("user_tokens", False): + self.link = f"mtp:{self.link}" + await self.is_token_exists(self.link, "dl") + elif self.link == "rcl": + if not self.is_ytdlp: + self.link = await RcloneList(self).get_rclone_path("rcd") + if not is_rclone_path(self.link): + raise ValueError(self.link) + elif self.link == "gdl" and not self.is_ytdlp: + self.link = await GoogleDriveList(self).get_target_id("gdd") + if not is_gdrive_id(self.link): + raise ValueError(self.link) + + self.user_transmission = TgClient.IS_PREMIUM_USER and ( + self.user_dict.get("user_transmission") + or ( + Config.USER_TRANSMISSION + and "user_transmission" not in self.user_dict + ) + ) + + if ( + "upload_paths" in self.user_dict + and self.up_dest + and self.up_dest in self.user_dict["upload_paths"] + ): + self.up_dest = self.user_dict["upload_paths"][self.up_dest] + + if self.ffmpeg_cmds and not isinstance(self.ffmpeg_cmds, list): + if self.user_dict.get("ffmpeg_cmds", None): + ffmpeg_dict = self.user_dict["ffmpeg_cmds"] + self.ffmpeg_cmds = [ + value + for key in list(self.ffmpeg_cmds) + if key in ffmpeg_dict + for value in ffmpeg_dict[key] + ] + elif "ffmpeg_cmds" not in self.user_dict and Config.FFMPEG_CMDS: + ffmpeg_dict = Config.FFMPEG_CMDS + self.ffmpeg_cmds = [ + value + for key in list(self.ffmpeg_cmds) + if key in ffmpeg_dict + for value in ffmpeg_dict[key] + ] + else: + self.ffmpeg_cmds = None + if not self.is_leech: + self.stop_duplicate = self.user_dict.get("stop_duplicate") or ( + "stop_duplicate" not in self.user_dict and Config.STOP_DUPLICATE + ) + default_upload = ( + self.user_dict.get("default_upload", "") or Config.DEFAULT_UPLOAD + ) + if (not self.up_dest and default_upload == "rc") or self.up_dest == "rc": + self.up_dest = ( + self.user_dict.get("rclone_path") or Config.RCLONE_PATH + ) + elif ( + not self.up_dest and default_upload == "gd" + ) or self.up_dest == "gd": + self.up_dest = self.user_dict.get("gdrive_id") or Config.GDRIVE_ID + if not self.up_dest: + raise ValueError("No Upload Destination!") + if is_gdrive_id(self.up_dest): + if not self.up_dest.startswith( + ("mtp:", "tp:", "sa:"), + ) and self.user_dict.get("user_tokens", False): + self.up_dest = f"mtp:{self.up_dest}" + elif is_rclone_path(self.up_dest): + if not self.up_dest.startswith("mrcc:") and self.user_dict.get( + "user_tokens", + False, + ): + self.up_dest = f"mrcc:{self.up_dest}" + self.up_dest = self.up_dest.strip("/") + else: + raise ValueError("Wrong Upload Destination!") + + if self.up_dest not in ["rcl", "gdl"]: + await self.is_token_exists(self.up_dest, "up") + + if self.up_dest == "rcl": + if self.is_clone: + if not is_rclone_path(self.link): + raise ValueError( + "You can't clone from different types of tools", + ) + config_path = self.get_config_path(self.link) + else: + config_path = None + self.up_dest = await RcloneList(self).get_rclone_path( + "rcu", + config_path, + ) + if not is_rclone_path(self.up_dest): + raise ValueError(self.up_dest) + elif self.up_dest == "gdl": + if self.is_clone: + if not is_gdrive_link(self.link): + raise ValueError( + "You can't clone from different types of tools", + ) + token_path = self.get_token_path(self.link) + else: + token_path = None + self.up_dest = await GoogleDriveList(self).get_target_id( + "gdu", + token_path, + ) + if not is_gdrive_id(self.up_dest): + raise ValueError(self.up_dest) + elif self.is_clone: + if is_gdrive_link(self.link) and self.get_token_path( + self.link, + ) != self.get_token_path(self.up_dest): + raise ValueError("You must use the same token to clone!") + if is_rclone_path(self.link) and self.get_config_path( + self.link, + ) != self.get_config_path(self.up_dest): + raise ValueError("You must use the same config to clone!") + else: + self.up_dest = ( + self.up_dest + or self.user_dict.get("leech_dest") + or Config.LEECH_DUMP_CHAT + ) + self.mixed_leech = TgClient.IS_PREMIUM_USER and ( + self.user_dict.get("mixed_leech") + or (Config.MIXED_LEECH and "mixed_leech" not in self.user_dict) + ) + if self.up_dest: + if not isinstance(self.up_dest, int): + if self.up_dest.startswith("b:"): + self.up_dest = self.up_dest.replace("b:", "", 1) + self.user_transmission = False + self.mixed_leech = False + elif self.up_dest.startswith("u:"): + self.up_dest = self.up_dest.replace("u:", "", 1) + self.user_transmission = TgClient.IS_PREMIUM_USER + elif self.up_dest.startswith("m:"): + self.user_transmission = TgClient.IS_PREMIUM_USER + self.mixed_leech = self.user_transmission + if "|" in self.up_dest: + self.up_dest, self.chat_thread_id = [ + int(x) if x.lstrip("-").isdigit() else x + for x in self.up_dest.split("|", 1) + ] + elif self.up_dest.lstrip("-").isdigit(): + self.up_dest = int(self.up_dest) + elif self.up_dest.lower() == "pm": + self.up_dest = self.user_id + + if self.user_transmission: + try: + chat = await TgClient.user.get_chat(self.up_dest) + except Exception: + chat = None + if chat is None: + self.user_transmission = False + self.mixed_leech = False + else: + uploader_id = TgClient.user.me.id + if chat.type.name not in ["SUPERGROUP", "CHANNEL", "GROUP"]: + self.user_transmission = False + self.mixed_leech = False + else: + member = await chat.get_member(uploader_id) + if ( + not member.privileges.can_manage_chat + or not member.privileges.can_delete_messages + ): + self.user_transmission = False + self.mixed_leech = False + + if not self.user_transmission or self.mixed_leech: + try: + chat = await self.client.get_chat(self.up_dest) + except Exception: + chat = None + if chat is None: + if self.user_transmission: + self.mixed_leech = False + else: + raise ValueError("Chat not found!") + else: + uploader_id = self.client.me.id + if chat.type.name in ["SUPERGROUP", "CHANNEL", "GROUP"]: + member = await chat.get_member(uploader_id) + if ( + not member.privileges.can_manage_chat + or not member.privileges.can_delete_messages + ): + if not self.user_transmission: + raise ValueError( + "You don't have enough privileges in this chat!", + ) + self.mixed_leech = False + else: + try: + await self.client.send_chat_action( + self.up_dest, + ChatAction.TYPING, + ) + except Exception: + raise ValueError( + "Start the bot and try again!", + ) from None + elif ( + self.user_transmission or self.mixed_leech + ) and not self.is_super_chat: + self.user_transmission = False + self.mixed_leech = False + if self.split_size: + if self.split_size.isdigit(): + self.split_size = int(self.split_size) + else: + self.split_size = get_size_bytes(self.split_size) + self.split_size = ( + self.split_size + or self.user_dict.get("split_size") + or Config.LEECH_SPLIT_SIZE + ) + self.max_split_size = ( + TgClient.MAX_SPLIT_SIZE if self.user_transmission else 2097152000 + ) + self.split_size = min(self.split_size, self.max_split_size) + + if not self.as_doc: + self.as_doc = ( + not self.as_med + if self.as_med + else ( + self.user_dict.get("as_doc", False) + or (Config.AS_DOCUMENT and "as_doc" not in self.user_dict) + ) + ) + + self.thumbnail_layout = ( + self.thumbnail_layout + or self.user_dict.get("thumb_layout", False) + or ( + Config.THUMBNAIL_LAYOUT + if "thumb_layout" not in self.user_dict + else "" + ) + ) + + if self.thumb != "none" and is_telegram_link(self.thumb): + msg = (await get_tg_link_message(self.thumb))[0] + self.thumb = ( + await create_thumb(msg) if msg.photo or msg.document else "" + ) + + async def get_tag(self, text: list): + if len(text) > 1 and text[1].startswith("Tag: "): + user_info = text[1].split("Tag: ") + if len(user_info) >= 3: + id_ = user_info[-1] + self.tag = " ".join(user_info[:-1]) + else: + self.tag, id_ = text[1].split("Tag: ")[1].split() + self.user = self.message.from_user = await self.client.get_users(id_) + self.user_id = self.user.id + self.user_dict = user_data.get(self.user_id, {}) + with contextlib.suppress(Exception): + await self.message.unpin() + if self.user: + if username := self.user.username: + self.tag = f"@{username}" + elif hasattr(self.user, "mention"): + self.tag = self.user.mention + else: + self.tag = self.user.title + + @new_task + async def run_multi(self, input_list, obj): + await sleep(7) + if not self.multi_tag and self.multi > 1: + self.multi_tag = token_hex(2) + multi_tags.add(self.multi_tag) + elif self.multi <= 1: + if self.multi_tag in multi_tags: + multi_tags.discard(self.multi_tag) + return + if self.multi_tag and self.multi_tag not in multi_tags: + await send_message( + self.message, + f"{self.tag} Multi Task has been cancelled!", + ) + await send_status_message(self.message) + async with task_dict_lock: + for fd_name in self.same_dir: + self.same_dir[fd_name]["total"] -= self.multi + return + if len(self.bulk) != 0: + msg = input_list[:1] + msg.append(f"{self.bulk[0]} -i {self.multi - 1} {self.options}") + msgts = " ".join(msg) + if self.multi > 2: + msgts += f"\nCancel Multi: /stop {self.multi_tag}" + nextmsg = await send_message(self.message, msgts) + else: + msg = [s.strip() for s in input_list] + index = msg.index("-i") + msg[index + 1] = f"{self.multi - 1}" + nextmsg = await self.client.get_messages( + chat_id=self.message.chat.id, + message_ids=self.message.reply_to_message_id + 1, + ) + msgts = " ".join(msg) + if self.multi > 2: + msgts += f"\nCancel Multi: /stop {self.multi_tag}" + nextmsg = await send_message(nextmsg, msgts) + nextmsg = await self.client.get_messages( + chat_id=self.message.chat.id, + message_ids=nextmsg.id, + ) + if self.message.from_user: + nextmsg.from_user = self.user + else: + nextmsg.sender_chat = self.user + if intervals["stopAll"]: + return + await obj( + self.client, + nextmsg, + self.is_qbit, + self.is_leech, + self.same_dir, + self.bulk, + self.multi_tag, + self.options, + ).new_event() + + async def init_bulk(self, input_list, bulk_start, bulk_end, obj): + try: + self.bulk = await extract_bulk_links(self.message, bulk_start, bulk_end) + if len(self.bulk) == 0: + raise ValueError("Bulk Empty!") + b_msg = input_list[:1] + self.options = input_list[1:] + index = self.options.index("-b") + del self.options[index] + if bulk_start or bulk_end: + del self.options[index + 1] + self.options = " ".join(self.options) + b_msg.append(f"{self.bulk[0]} -i {len(self.bulk)} {self.options}") + msg = " ".join(b_msg) + if len(self.bulk) > 2: + self.multi_tag = token_hex(2) + multi_tags.add(self.multi_tag) + msg += f"\nCancel Multi: /stop {self.multi_tag}" + nextmsg = await send_message(self.message, msg) + nextmsg = await self.client.get_messages( + chat_id=self.message.chat.id, + message_ids=nextmsg.id, + ) + if self.message.from_user: + nextmsg.from_user = self.user + else: + nextmsg.sender_chat = self.user + await obj( + self.client, + nextmsg, + self.is_qbit, + self.is_leech, + self.same_dir, + self.bulk, + self.multi_tag, + self.options, + ).new_event() + except Exception: + await send_message( + self.message, + "Reply to text file or to telegram message that have links seperated by new line!", + ) + + async def proceed_extract(self, dl_path, gid): + pswd = self.extract if isinstance(self.extract, str) else "" + self.files_to_proceed = [] + if self.is_file and is_archive(dl_path): + self.files_to_proceed.append(dl_path) + else: + for dirpath, _, files in await sync_to_async( + walk, + dl_path, + topdown=False, + ): + for file_ in files: + if is_first_archive_split(file_) or ( + is_archive(file_) and not file_.lower().endswith(".rar") + ): + f_path = ospath.join(dirpath, file_) + self.files_to_proceed.append(f_path) + + if not self.files_to_proceed: + return dl_path + sevenz = SevenZ(self) + LOGGER.info(f"Extracting: {self.name}") + async with task_dict_lock: + task_dict[self.mid] = SevenZStatus(self, sevenz, gid, "Extract") + for dirpath, _, files in await sync_to_async(walk, self.dir, topdown=False): + for file_ in files: + if is_first_archive_split(file_) or ( + is_archive(file_) and not file_.lower().endswith(".rar") + ): + self.proceed_count += 1 + f_path = ospath.join(dirpath, file_) + t_path = get_base_name(f_path) if self.is_file else dirpath + if not self.is_file: + self.subname = file_ + code = await sevenz.extract(f_path, t_path, pswd) + if code == 0: + try: + await remove(f_path) + except Exception: + self.is_cancelled = True + for file_ in files: + if is_archive_split(file_): + del_path = ospath.join(dirpath, file_) + try: + await remove(del_path) + except Exception: + self.is_cancelled = True + return t_path if self.is_file and code == 0 else dl_path + + async def proceed_ffmpeg(self, dl_path, gid): + checked = False + cmds = [ + [part.strip() for part in split(item) if part.strip()] + for item in self.ffmpeg_cmds + ] + try: + ffmpeg = FFMpeg(self) + for ffmpeg_cmd in cmds: + self.proceed_count = 0 + cmd = [ + "xtra", + "-hide_banner", + "-loglevel", + "error", + "-progress", + "pipe:1", + "-threads", # Should work + "4", + *ffmpeg_cmd, + ] + if "-del" in cmd: + cmd.remove("-del") + delete_files = True + else: + delete_files = False + index = cmd.index("-i") + input_file = cmd[index + 1] + if input_file.endswith(".video"): + ext = "video" + elif input_file.endswith(".audio"): + ext = "audio" + elif "." not in input_file: + ext = "all" + else: + ext = ospath.splitext(input_file)[-1].lower() + if await aiopath.isfile(dl_path): + is_video, is_audio, _ = await get_document_type(dl_path) + if (not is_video and not is_audio) or ( + is_video and ext == "audio" + ): + break + if (is_audio and not is_video and ext == "video") or ( + ext + not in [ + "all", + "audio", + "video", + ] + and not dl_path.lower().endswith(ext) + ): + break + new_folder = ospath.splitext(dl_path)[0] + name = ospath.basename(dl_path) + await makedirs(new_folder, exist_ok=True) + file_path = f"{new_folder}/{name}" + await move(dl_path, file_path) + if not checked: + checked = True + async with task_dict_lock: + task_dict[self.mid] = FFmpegStatus( + self, + ffmpeg, + gid, + "FFmpeg", + ) + self.progress = False + await cpu_eater_lock.acquire() + self.progress = True + LOGGER.info(f"Running ffmpeg cmd for: {file_path}") + cmd[index + 1] = file_path + self.subsize = self.size + res = await ffmpeg.ffmpeg_cmds(cmd, file_path) + if res: + if delete_files: + await remove(file_path) + if len(await listdir(new_folder)) == 1: + folder = new_folder.rsplit("/", 1)[0] + self.name = ospath.basename(res[0]) + if self.name.startswith("ffmpeg"): + self.name = self.name.split(".", 1)[-1] + dl_path = ospath.join(folder, self.name) + await move(res[0], dl_path) + await rmtree(new_folder) + else: + dl_path = new_folder + self.name = new_folder.rsplit("/", 1)[-1] + else: + dl_path = new_folder + self.name = new_folder.rsplit("/", 1)[-1] + else: + await move(file_path, dl_path) + await rmtree(new_folder) + else: + for dirpath, _, files in await sync_to_async( + walk, + dl_path, + topdown=False, + ): + for file_ in files: + var_cmd = cmd.copy() + if self.is_cancelled: + return False + f_path = ospath.join(dirpath, file_) + is_video, is_audio, _ = await get_document_type(f_path) + if (not is_video and not is_audio) or ( + is_video and ext == "audio" + ): + continue + if (is_audio and not is_video and ext == "video") or ( + ext + not in [ + "all", + "audio", + "video", + ] + and not f_path.lower().endswith(ext) + ): + continue + self.proceed_count += 1 + var_cmd[index + 1] = f_path + if not checked: + checked = True + async with task_dict_lock: + task_dict[self.mid] = FFmpegStatus( + self, + ffmpeg, + gid, + "FFmpeg", + ) + self.progress = False + await cpu_eater_lock.acquire() + self.progress = True + LOGGER.info(f"Running ffmpeg cmd for: {f_path}") + self.subsize = await get_path_size(f_path) + self.subname = file_ + res = await ffmpeg.ffmpeg_cmds(var_cmd, f_path) + if res and delete_files: + await remove(f_path) + if len(res) == 1: + file_name = ospath.basename(res[0]) + if file_name.startswith("ffmpeg"): + newname = file_name.split(".", 1)[-1] + newres = ospath.join(dirpath, newname) + await move(res[0], newres) + finally: + if checked: + cpu_eater_lock.release() + return dl_path + + async def substitute(self, dl_path): + def perform_substitution(name, substitutions): + for substitution in substitutions: + sen = False + pattern = substitution[0] + if len(substitution) > 1: + if len(substitution) > 2: + sen = substitution[2] == "s" + res = substitution[1] + elif len(substitution[1]) == 0: + res = " " + else: + res = substitution[1] + else: + res = "" + try: + name = sub( + rf"{pattern}", + res, + name, + flags=IGNORECASE if sen else 0, + ) + except Exception as e: + LOGGER.error( + f"Substitute Error: pattern: {pattern} res: {res}. Error: {e}", + ) + return False + if len(name.encode()) > 255: + LOGGER.error(f"Substitute: {name} is too long") + return False + return name + + if self.is_file: + up_dir, name = dl_path.rsplit("/", 1) + new_name = perform_substitution(name, self.name_sub) + if not new_name: + return dl_path + new_path = ospath.join(up_dir, new_name) + await move(dl_path, new_path) + return new_path + for dirpath, _, files in await sync_to_async(walk, dl_path, topdown=False): + for file_ in files: + f_path = ospath.join(dirpath, file_) + new_name = perform_substitution(file_, self.name_sub) + if not new_name: + continue + await move(f_path, ospath.join(dirpath, new_name)) + return dl_path + + async def generate_screenshots(self, dl_path): + ss_nb = int(self.screen_shots) if isinstance(self.screen_shots, str) else 10 + if self.is_file: + if (await get_document_type(dl_path))[0]: + LOGGER.info(f"Creating Screenshot for: {dl_path}") + res = await take_ss(dl_path, ss_nb) + if res: + new_folder = ospath.splitext(dl_path)[0] + name = ospath.basename(dl_path) + await makedirs(new_folder, exist_ok=True) + await gather( + move(dl_path, f"{new_folder}/{name}"), + move(res, new_folder), + ) + return new_folder + else: + LOGGER.info(f"Creating Screenshot for: {dl_path}") + for dirpath, _, files in await sync_to_async( + walk, + dl_path, + topdown=False, + ): + for file_ in files: + f_path = ospath.join(dirpath, file_) + if (await get_document_type(f_path))[0]: + await take_ss(f_path, ss_nb) + return dl_path + + async def convert_media(self, dl_path, gid): + fvext = [] + if self.convert_video: + vdata = self.convert_video.split() + vext = vdata[0].lower() + if len(vdata) > 2: + if "+" in vdata[1].split(): + vstatus = "+" + elif "-" in vdata[1].split(): + vstatus = "-" + else: + vstatus = "" + fvext.extend(f".{ext.lower()}" for ext in vdata[2:]) + else: + vstatus = "" + else: + vext = "" + vstatus = "" + + faext = [] + if self.convert_audio: + adata = self.convert_audio.split() + aext = adata[0].lower() + if len(adata) > 2: + if "+" in adata[1].split(): + astatus = "+" + elif "-" in adata[1].split(): + astatus = "-" + else: + astatus = "" + faext.extend(f".{ext.lower()}" for ext in adata[2:]) + else: + astatus = "" + else: + aext = "" + astatus = "" + + self.files_to_proceed = {} + all_files = [] + if self.is_file: + all_files.append(dl_path) + else: + for dirpath, _, files in await sync_to_async( + walk, + dl_path, + topdown=False, + ): + for file_ in files: + f_path = ospath.join(dirpath, file_) + all_files.append(f_path) + + for f_path in all_files: + is_video, is_audio, _ = await get_document_type(f_path) + if ( + is_video + and vext + and not f_path.lower().endswith(f".{vext}") + and ( + (vstatus == "+" and f_path.lower().endswith(tuple(fvext))) + or (vstatus == "-" and not f_path.lower().endswith(tuple(fvext))) + or not vstatus + ) + ): + self.files_to_proceed[f_path] = "video" + elif ( + is_audio + and aext + and not is_video + and not f_path.lower().endswith(f".{aext}") + and ( + (astatus == "+" and f_path.lower().endswith(tuple(faext))) + or (astatus == "-" and not f_path.lower().endswith(tuple(faext))) + or not astatus + ) + ): + self.files_to_proceed[f_path] = "audio" + del all_files + + if self.files_to_proceed: + ffmpeg = FFMpeg(self) + async with task_dict_lock: + task_dict[self.mid] = FFmpegStatus(self, ffmpeg, gid, "Convert") + self.progress = False + async with cpu_eater_lock: + self.progress = True + for f_path, f_type in self.files_to_proceed.items(): + self.proceed_count += 1 + LOGGER.info(f"Converting: {f_path}") + if self.is_file: + self.subsize = self.size + else: + self.subsize = await get_path_size(f_path) + self.subname = ospath.basename(f_path) + if f_type == "video": + res = await ffmpeg.convert_video(f_path, vext) + else: + res = await ffmpeg.convert_audio(f_path, aext) + if res: + try: + await remove(f_path) + except Exception: + self.is_cancelled = True + return False + if self.is_file: + return res + return dl_path + + async def generate_sample_video(self, dl_path, gid): + data = ( + self.sample_video.split(":") + if isinstance(self.sample_video, str) + else "" + ) + if data: + sample_duration = int(data[0]) if data[0] else 60 + part_duration = int(data[1]) if len(data) > 1 else 4 + else: + sample_duration = 60 + part_duration = 4 + + self.files_to_proceed = {} + if self.is_file and (await get_document_type(dl_path))[0]: + file_ = ospath.basename(dl_path) + self.files_to_proceed[dl_path] = file_ + else: + for dirpath, _, files in await sync_to_async( + walk, + dl_path, + topdown=False, + ): + for file_ in files: + f_path = ospath.join(dirpath, file_) + if (await get_document_type(f_path))[0]: + self.files_to_proceed[f_path] = file_ + if self.files_to_proceed: + ffmpeg = FFMpeg(self) + async with task_dict_lock: + task_dict[self.mid] = FFmpegStatus(self, ffmpeg, gid, "Sample Video") + self.progress = False + async with cpu_eater_lock: + self.progress = True + LOGGER.info(f"Creating Sample video: {self.name}") + for f_path, file_ in self.files_to_proceed.items(): + self.proceed_count += 1 + if self.is_file: + self.subsize = self.size + else: + self.subsize = await get_path_size(f_path) + self.subname = file_ + res = await ffmpeg.sample_video( + f_path, + sample_duration, + part_duration, + ) + if res and self.is_file: + new_folder = ospath.splitext(f_path)[0] + await makedirs(new_folder, exist_ok=True) + await gather( + move(f_path, f"{new_folder}/{file_}"), + move(res, f"{new_folder}/SAMPLE.{file_}"), + ) + return new_folder + return dl_path + + async def proceed_compress(self, dl_path, gid): + pswd = self.compress if isinstance(self.compress, str) else "" + if self.is_leech and self.is_file: + new_folder = ospath.splitext(dl_path)[0] + name = ospath.basename(dl_path) + await makedirs(new_folder, exist_ok=True) + new_dl_path = f"{new_folder}/{name}" + await move(dl_path, new_dl_path) + dl_path = new_dl_path + up_path = f"{new_dl_path}.zip" + self.is_file = False + else: + up_path = f"{dl_path}.zip" + sevenz = SevenZ(self) + async with task_dict_lock: + task_dict[self.mid] = SevenZStatus(self, sevenz, gid, "Zip") + return await sevenz.zip(dl_path, up_path, pswd) + + async def proceed_split(self, dl_path, gid): + self.files_to_proceed = {} + if self.is_file: + f_size = await get_path_size(dl_path) + if f_size > self.split_size: + self.files_to_proceed[dl_path] = [f_size, ospath.basename(dl_path)] + else: + for dirpath, _, files in await sync_to_async( + walk, + dl_path, + topdown=False, + ): + for file_ in files: + f_path = ospath.join(dirpath, file_) + f_size = await get_path_size(f_path) + if f_size > self.split_size: + self.files_to_proceed[f_path] = [f_size, file_] + if self.files_to_proceed: + ffmpeg = FFMpeg(self) + async with task_dict_lock: + task_dict[self.mid] = FFmpegStatus(self, ffmpeg, gid, "Split") + LOGGER.info(f"Splitting: {self.name}") + for f_path, (f_size, file_) in self.files_to_proceed.items(): + self.proceed_count += 1 + if self.is_file: + self.subsize = self.size + else: + self.subsize = f_size + self.subname = file_ + parts = -(-f_size // self.split_size) + split_size = self.split_size + if not self.as_doc and (await get_document_type(f_path))[0]: + self.progress = True + res = await ffmpeg.split(f_path, file_, parts, split_size) + else: + self.progress = False + res = await split_file(f_path, split_size, self) + if self.is_cancelled: + return False + if res or f_size >= self.max_split_size: + try: + await remove(f_path) + except Exception: + self.is_cancelled = True + return None + return None + + # change according sync + async def proceed_metadata(self, dl_path, gid): + key = self.metadata + ffmpeg = FFMpeg(self) + checked = False + if self.is_file: + if is_mkv(dl_path): + cmd, temp_file = await get_metadata_cmd(dl_path, key) + if cmd: + if not checked: + checked = True + async with task_dict_lock: + task_dict[self.mid] = FFmpegStatus( + self, + ffmpeg, + gid, + "Metadata", + ) + self.progress = False + await cpu_eater_lock.acquire() + self.progress = True + self.subsize = self.size + res = await ffmpeg.metadata_watermark_cmds(cmd, dl_path) + if res: + os.replace(temp_file, dl_path) + else: + for dirpath, _, files in await sync_to_async( + walk, + dl_path, + topdown=False, + ): + for file_ in files: + file_path = ospath.join(dirpath, file_) + if self.is_cancelled: + cpu_eater_lock.release() + return "" + self.proceed_count += 1 + if is_mkv(file_path): + cmd, temp_file = await get_metadata_cmd(file_path, key) + if cmd: + if not checked: + checked = True + async with task_dict_lock: + task_dict[self.mid] = FFmpegStatus( + self, + ffmpeg, + gid, + "Metadata", + ) + self.progress = False + await cpu_eater_lock.acquire() + self.progress = True + LOGGER.info(f"Running metadata cmd for: {file_path}") + self.subsize = await aiopath.getsize(file_path) + self.subname = file_ + res = await ffmpeg.metadata_watermark_cmds( + cmd, + file_path, + ) + if res: + os.replace(temp_file, file_path) + if checked: + cpu_eater_lock.release() + return dl_path + + async def proceed_watermark(self, dl_path, gid): + key = self.watermark + ffmpeg = FFMpeg(self) + checked = False + if self.is_file: + if is_mkv(dl_path): + cmd, temp_file = await get_watermark_cmd(dl_path, key) + if cmd: + if not checked: + checked = True + async with task_dict_lock: + task_dict[self.mid] = FFmpegStatus( + self, + ffmpeg, + gid, + "Watermark", + ) + self.progress = False + await cpu_eater_lock.acquire() + self.progress = True + self.subsize = self.size + res = await ffmpeg.metadata_watermark_cmds(cmd, dl_path) + if res: + os.replace(temp_file, dl_path) + else: + for dirpath, _, files in await sync_to_async( + walk, + dl_path, + topdown=False, + ): + for file_ in files: + file_path = ospath.join(dirpath, file_) + if self.is_cancelled: + cpu_eater_lock.release() + return "" + if is_mkv(file_path): + cmd, temp_file = await get_watermark_cmd(file_path, key) + if cmd: + if not checked: + checked = True + async with task_dict_lock: + task_dict[self.mid] = FFmpegStatus( + self, + ffmpeg, + gid, + "Watermark", + ) + self.progress = False + await cpu_eater_lock.acquire() + self.progress = True + LOGGER.info(f"Running cmd for: {file_path}") + self.subsize = await aiopath.getsize(file_path) + self.subname = file_ + res = await ffmpeg.metadata_watermark_cmds( + cmd, + file_path, + ) + if res: + os.replace(temp_file, file_path) + if checked: + cpu_eater_lock.release() + return dl_path diff --git a/bot/helper/ext_utils/bot_utils.py b/bot/helper/ext_utils/bot_utils.py index 88d3c25fd..179a0dade 100644 --- a/bot/helper/ext_utils/bot_utils.py +++ b/bot/helper/ext_utils/bot_utils.py @@ -1,496 +1,216 @@ -import contextlib -from os import path as ospath -from re import match as re_match -from html import escape -from time import time -from uuid import uuid4 from asyncio import ( - sleep, create_subprocess_exec, create_subprocess_shell, run_coroutine_threadsafe, + sleep, ) -from functools import wraps, partial -from urllib.parse import urlparse from asyncio.subprocess import PIPE from concurrent.futures import ThreadPoolExecutor +from functools import partial, wraps -from psutil import disk_usage -from aiohttp import ClientSession as aioClientSession -from aiofiles import open as aiopen -from aiofiles.os import path as aiopath -from aiofiles.os import mkdir -from pyrogram.types import BotCommand - -from bot import ( - LOGGER, - DATABASE_URL, - bot_loop, - bot_name, - user_data, - config_dict, - download_dict, - extra_buttons, - bot_start_time, - download_dict_lock, -) -from bot.helper.aeon_utils.tinyfy import tinyfy -from bot.helper.ext_utils.db_handler import DbManager -from bot.helper.ext_utils.shorteners import short_url -from bot.helper.ext_utils.telegraph_helper import telegraph -from bot.helper.telegram_helper.bot_commands import BotCommands +from httpx import AsyncClient + +from bot import bot_loop, user_data +from bot.core.config_manager import Config from bot.helper.telegram_helper.button_build import ButtonMaker -if config_dict.get("GDRIVE_ID"): - commands = [ - "MirrorCommand", - "LeechCommand", - "YtdlCommand", - "YtdlLeechCommand", - "CloneCommand", - "MediaInfoCommand", - "CountCommand", - "ListCommand", - "SearchCommand", - "UserSetCommand", - "StatusCommand", - "StatsCommand", - "StopAllCommand", - "HelpCommand", - "BotSetCommand", - "LogCommand", - "RestartCommand", - ] -else: - commands = [ - "LeechCommand", - "YtdlLeechCommand", - "MediaInfoCommand", - "SearchCommand", - "UserSetCommand", - "StatusCommand", - "StatsCommand", - "StopAllCommand", - "HelpCommand", - "BotSetCommand", - "LogCommand", - "RestartCommand", - ] +from .help_messages import ( + CLONE_HELP_DICT, + MIRROR_HELP_DICT, + YT_HELP_DICT, +) +from .telegraph_helper import telegraph + +COMMAND_USAGE = {} -command_descriptions = { - "MirrorCommand": "- Start mirroring", - "LeechCommand": "- Start leeching", - "YtdlCommand": "- Mirror yt-dlp supported link", - "YtdlLeechCommand": "- Leech through yt-dlp supported link", - "CloneCommand": "- Copy file/folder to Drive", - "MediaInfoCommand": "- Get MediaInfo", - "CountCommand": "- Count file/folder on Google Drive.", - "ListCommand": "- Search in Drive", - "SearchCommand": "- Search in Torrent", - "UserSetCommand": "- User settings", - "StatusCommand": "- Get mirror status message", - "StatsCommand": "- Check Bot & System stats", - "StopAllCommand": "- Cancel all tasks added by you to the bot.", - "HelpCommand": "- Get detailed help", - "BotSetCommand": "- [ADMIN] Open Bot settings", - "LogCommand": "- [ADMIN] View log", - "RestartCommand": "- [ADMIN] Restart the bot", -} - - -THREADPOOL = ThreadPoolExecutor(max_workers=1000) -MAGNET_REGEX = r"magnet:\?xt=urn:(btih|btmh):[a-zA-Z0-9]*\s*" -URL_REGEX = r"^(?!\/)(rtmps?:\/\/|mms:\/\/|rtsp:\/\/|https?:\/\/|ftp:\/\/)?([^\/:]+:[^\/@]+@)?(www\.)?(?=[^\/:\s]+\.[^\/:\s]+)([^\/:\s]+\.[^\/:\s]+)(:\d+)?(\/[^#\s]*[\s\S]*)?(\?[^#\s]*)?(#.*)?$" -SIZE_UNITS = ["B", "KB", "MB", "GB", "TB", "PB"] -STATUS_START = 0 -PAGES = 1 -PAGE_NO = 1 -STATUS_LIMIT = 4 - - -class MirrorStatus: - STATUS_UPLOADING = "Uploading" - STATUS_DOWNLOADING = "Downloading" - STATUS_CLONING = "Cloning" - STATUS_QUEUEDL = "DL queued" - STATUS_QUEUEUP = "UL queued" - STATUS_PAUSED = "Paused" - STATUS_ARCHIVING = "Archiving" - STATUS_EXTRACTING = "Extracting" - STATUS_SPLITTING = "Splitting" - STATUS_CHECKING = "CheckUp" - STATUS_SEEDING = "Seeding" - STATUS_PROCESSING = "Processing" +THREAD_POOL = ThreadPoolExecutor(max_workers=3000) class SetInterval: - def __init__(self, interval, action): + def __init__(self, interval, action, *args, **kwargs): self.interval = interval self.action = action - self.task = bot_loop.create_task(self.__set_interval()) + self.task = bot_loop.create_task(self._set_interval(*args, **kwargs)) - async def __set_interval(self): + async def _set_interval(self, *args, **kwargs): while True: await sleep(self.interval) - await self.action() + await self.action(*args, **kwargs) def cancel(self): self.task.cancel() -def is_mkv(file): - return file.lower().endswith("mkv") - - -def get_readable_file_size(size_in_bytes: int): - if size_in_bytes is None: - return "0B" - index = 0 - while size_in_bytes >= 1024 and index < len(SIZE_UNITS) - 1: - size_in_bytes /= 1024 - index += 1 - return ( - f"{size_in_bytes:.2f}{SIZE_UNITS[index]}" - if index > 0 - else f"{size_in_bytes:.2f}B" - ) - - -async def get_task_by_gid(gid): - async with download_dict_lock: - return next( - ( - dl - for dl in download_dict.values() - if len(gid) >= 8 and dl.gid().startswith(gid) - ), - None, - ) - - -async def get_all_task(req_status, user_id=None): - dls = [] - async with download_dict_lock: - for dl in list(download_dict.values()): - if user_id and user_id != dl.message.from_user.id: - continue - status = dl.status() - if req_status in ["all", status]: - dls.append(dl) - return dls +def _build_command_usage(help_dict, command_key): + buttons = ButtonMaker() + for name in list(help_dict.keys())[1:]: + buttons.data_button(name, f"help {command_key} {name}") + buttons.data_button("Close", "help close") + COMMAND_USAGE[command_key] = [help_dict["main"], buttons.build_menu(3)] + buttons.reset() -async def get_user_tasks(user_id, maxtask): - if tasks := await get_all_task("all", user_id): - return len(tasks) >= maxtask - return None +def create_help_buttons(): + _build_command_usage(MIRROR_HELP_DICT, "mirror") + _build_command_usage(YT_HELP_DICT, "yt") + _build_command_usage(CLONE_HELP_DICT, "clone") def bt_selection_buttons(id_): - gid = id_[:8] - pincode = "".join([n for n in id_ if n.isdigit()][:4]) + gid = id_[:12] if len(id_) > 25 else id_ + pin = "".join([n for n in id_ if n.isdigit()][:4]) buttons = ButtonMaker() - base_url = config_dict["BASE_URL"] - buttons.url("Select", f"{base_url}/app/files/{id_}") - buttons.callback("Pincode", f"btsel pin {gid} {pincode}") - buttons.callback("Cancel", f"btsel rm {gid} {id_}") - buttons.callback("Done Selecting", f"btsel done {gid} {id_}") - return buttons.column(2) + if Config.WEB_PINCODE: + buttons.url_button("Select Files", f"{Config.BASE_URL}/app/files?gid={id_}") + buttons.data_button("Pincode", f"sel pin {gid} {pin}") + else: + buttons.url_button( + "Select Files", + f"{Config.BASE_URL}/app/files?gid={id_}&pin={pin}", + ) + buttons.data_button("Done Selecting", f"sel done {gid} {id_}") + buttons.data_button("Cancel", f"sel cancel {gid}") + return buttons.build_menu(2) async def get_telegraph_list(telegraph_content): path = [ - (await telegraph.create_page(title="Drive Search", content=content))["path"] + ( + await telegraph.create_page( + title="Mirror-Leech-Bot Drive Search", + content=content, + ) + )["path"] for content in telegraph_content ] if len(path) > 1: await telegraph.edit_telegraph(path, telegraph_content) buttons = ButtonMaker() - buttons.url("View", f"https://telegra.ph/{path[0]}") - buttons = extra_btns(buttons) - return buttons.column(1) - - -def handle_index(index, dic): - while True: - if abs(index) < len(dic): - break - if index < 0: - index = len(dic) - abs(index) - elif index > 0: - index = index - len(dic) - return index - - -async def fetch_user_tds(user_id, force=False): - user_dict = user_data.get(user_id, {}) - if user_dict.get("td_mode", False) or force: - return user_dict.get("user_tds", {}) - return {} - - -def progress_bar(pct): - if isinstance(pct, str): - pct = float(pct.strip("%")) - p = min(max(pct, 0), 100) - c_full = int((p + 5) // 10) - p_str = "●" * c_full - p_str += "○" * (10 - c_full) - return p_str - - -def source(self): - return ( - sender_chat.title - if (sender_chat := self.message.sender_chat) - else self.message.from_user.username or self.message.from_user.id - ) - - -def get_readable_message(): - msg = "Powered by Aeon\n\n" - button = None - tasks = len(download_dict) - current_time = get_readable_time(time() - bot_start_time) - if config_dict["BOT_MAX_TASKS"]: - bmax_task = f"/{config_dict['BOT_MAX_TASKS']}" - else: - bmax_task = "" - globals()["PAGES"] = (tasks + STATUS_LIMIT - 1) // STATUS_LIMIT - if PAGE_NO > PAGES and PAGES != 0: - globals()["STATUS_START"] = STATUS_LIMIT * (PAGES - 1) - globals()["PAGE_NO"] = PAGES - for download in list(download_dict.values())[ - STATUS_START : STATUS_LIMIT + STATUS_START - ]: - msg += f"{download.status()}: {escape(f'{download.name()}')}\n" - msg += f"by {source(download)}\n" - if download.status() not in [ - MirrorStatus.STATUS_SPLITTING, - MirrorStatus.STATUS_SEEDING, - MirrorStatus.STATUS_PROCESSING, - ]: - msg += f"
{progress_bar(download.progress())} {download.progress()}" - msg += f"\n{download.processed_bytes()} of {download.size()}" - msg += f"\nSpeed: {download.speed()}" - msg += f"\nEstimated: {download.eta()}" - if hasattr(download, "seeders_num"): - with contextlib.suppress(Exception): - msg += f"\nSeeders: {download.seeders_num()} | Leechers: {download.leechers_num()}" - elif download.status() == MirrorStatus.STATUS_SEEDING: - msg += f"
Size: {download.size()}" - msg += f"\nSpeed: {download.upload_speed()}" - msg += f"\nUploaded: {download.uploaded_bytes()}" - msg += f"\nRatio: {download.ratio()}" - msg += f"\nTime: {download.seeding_time()}" - else: - msg += f"
Size: {download.size()}" - msg += f"\nElapsed: {get_readable_time(time() - download.message.date.timestamp())}
" - msg += f"\n
/stop_{download.gid()[:8]}
\n\n" - if len(msg) == 0: - return None, None - if tasks > STATUS_LIMIT: - buttons = ButtonMaker() - buttons.callback("Prev", "status pre") - buttons.callback(f"{PAGE_NO}/{PAGES}", "status ref") - buttons.callback("Next", "status nex") - button = buttons.column(3) - msg += f"• Tasks: {tasks}{bmax_task}" - msg += f"\n• Bot uptime: {current_time}" - msg += f"\n• Free disk space: {get_readable_file_size(disk_usage('/usr/src/app/downloads/').free)}" - return msg, button - - -def text_to_bytes(size_text): - size_text = size_text.lower() - multiplier = { - "k": 1024, - "m": 1048576, - "g": 1073741824, - "t": 1099511627776, - "p": 1125899906842624, - } - for unit, factor in multiplier.items(): - if unit in size_text: - size_value = float(size_text.split(unit)[0]) - return size_value * factor - return 0 - - -async def turn_page(data): - global STATUS_START, PAGE_NO # noqa: PLW0603 - async with download_dict_lock: - if data[1] == "nex": - if PAGE_NO == PAGES: - STATUS_START = 0 - PAGE_NO = 1 - else: - STATUS_START += STATUS_LIMIT - PAGE_NO += 1 - elif data[1] == "pre": - if PAGE_NO == 1: - STATUS_START = STATUS_LIMIT * (PAGES - 1) - PAGE_NO = PAGES - else: - STATUS_START -= STATUS_LIMIT - PAGE_NO -= 1 - - -def get_readable_time(seconds, full_time=False): - periods = [ - ("millennium", 31536000000), - ("century", 3153600000), - ("decade", 315360000), - ("year", 31536000), - ("month", 2592000), - ("week", 604800), - ("day", 86400), - ("hour", 3600), - ("minute", 60), - ("second", 1), - ] - result = "" - for period_name, period_seconds in periods: - if seconds >= period_seconds: - period_value, seconds = divmod(seconds, period_seconds) - plural_suffix = "s" if period_value > 1 else "" - result += f"{int(period_value)} {period_name}{plural_suffix} " - if not full_time: - break - return result.strip() - - -def is_magnet(url): - return bool(re_match(MAGNET_REGEX, url)) - - -def is_url(url): - return bool(re_match(URL_REGEX, url)) - - -def is_gdrive_link(url): - return "drive.google.com" in url - - -def is_telegram_link(url): - return url.startswith(("https://t.me/", "tg://openmessage?user_id=")) - - -def is_share_link(url): - domain = urlparse(url).hostname - return any(x in domain for x in ["appdirve", "hubdrive", "gdflix", "filepress"]) - - -def is_mega_link(url): - return "mega.nz" in url or "mega.co.nz" in url - - -def is_rclone_path(path): - return bool( - re_match( - r"^(mrcc:)?(?!magnet:)(?![- ])[a-zA-Z0-9_\. -]+(? token_timeout - ) - if is_expired: - token = data["token"] if expire is None and "token" in data else str(uuid4()) - if expire is not None: - del data["time"] - data["token"] = token - if DATABASE_URL: - await DbManager().update_user_token(user_id, token) - user_data[user_id].update(data) - time_str = get_readable_time(token_timeout, True) - if button is None: - button = ButtonMaker() - button.url( - "Collect token", - tinyfy(short_url(f"https://telegram.me/{bot_name}?start={token}")), - ) - return ( - f"Your token has expired, please collect a new token.\nIt will expire after {time_str}!", - button, - ) - return None, button - - -def extra_btns(buttons): - if extra_buttons: - for btn_name, btn_url in extra_buttons.items(): - buttons.url(btn_name, btn_url) - return buttons - - -commands = [ - BotCommand( - getattr(BotCommands, cmd)[0] - if isinstance(getattr(BotCommands, cmd), list) - else getattr(BotCommands, cmd), - command_descriptions[cmd], - ) - for cmd in commands -] - - -async def set_commands(bot): - if config_dict["SET_COMMANDS"]: - await bot.set_bot_commands(commands) diff --git a/bot/helper/ext_utils/bulk_links.py b/bot/helper/ext_utils/bulk_links.py index 081633ab4..851f9ba2d 100644 --- a/bot/helper/ext_utils/bulk_links.py +++ b/bot/helper/ext_utils/bulk_links.py @@ -2,51 +2,40 @@ from aiofiles.os import remove -async def get_links_from_message(text, bulk_start, bulk_end): - links_list = text.split("\n") - links_list = [item.strip() for item in links_list if len(item) != 0] - +def filter_links(links_list: list, bulk_start: int, bulk_end: int) -> list: if bulk_start != 0 and bulk_end != 0: links_list = links_list[bulk_start:bulk_end] elif bulk_start != 0: links_list = links_list[bulk_start:] elif bulk_end != 0: links_list = links_list[:bulk_end] - return links_list -async def get_links_from_file(message, bulk_start, bulk_end): +def get_links_from_message(text: str) -> list: + links_list = text.split("\n") + return [item.strip() for item in links_list if len(item) != 0] + + +async def get_links_from_file(message) -> list: links_list = [] text_file_dir = await message.download() - async with aiopen(text_file_dir, "r+") as f: lines = await f.readlines() links_list.extend(line.strip() for line in lines if len(line) != 0) - - if bulk_start != 0 and bulk_end != 0: - links_list = links_list[bulk_start:bulk_end] - elif bulk_start != 0: - links_list = links_list[bulk_start:] - elif bulk_end != 0: - links_list = links_list[:bulk_end] - await remove(text_file_dir) - return links_list -async def extract_bulk_links(message, bulk_start, bulk_end): +async def extract_bulk_links(message, bulk_start: str, bulk_end: str) -> list: bulk_start = int(bulk_start) bulk_end = int(bulk_end) - if ( - (reply_to := message.reply_to_message) - and (file_ := reply_to.document) - and (file_.mime_type == "text/plain") - ): - return await get_links_from_file( - message.reply_to_message, bulk_start, bulk_end - ) - if text := message.reply_to_message.text: - return await get_links_from_message(text, bulk_start, bulk_end) - return [] + links_list = [] + if reply_to := message.reply_to_message: + if (file_ := reply_to.document) and (file_.mime_type == "text/plain"): + links_list = await get_links_from_file(reply_to) + elif text := reply_to.text: + links_list = get_links_from_message(text) + return ( + filter_links(links_list, bulk_start, bulk_end) if links_list else links_list + ) diff --git a/bot/helper/ext_utils/db_handler.py b/bot/helper/ext_utils/db_handler.py index 5b497b29d..1935a2d09 100644 --- a/bot/helper/ext_utils/db_handler.py +++ b/bot/helper/ext_utils/db_handler.py @@ -1,90 +1,97 @@ +from importlib import import_module + from aiofiles import open as aiopen from aiofiles.os import path as aiopath -from aiofiles.os import makedirs -from pymongo.errors import PyMongoError from motor.motor_asyncio import AsyncIOMotorClient +from pymongo.errors import PyMongoError +from pymongo.server_api import ServerApi -from bot import ( - LOGGER, - DATABASE_URL, - bot_id, - bot_loop, - user_data, - config_dict, - qbit_options, - aria2_options, -) +from bot import LOGGER, qbit_options, rss_dict, user_data +from bot.core.aeon_client import TgClient +from bot.core.config_manager import Config class DbManager: def __init__(self): - self.__err = False - self.__db = None - self.__conn = None - self.__connect() + self._return = True + self._conn = None + self.db = None - def __connect(self): + async def connect(self): try: - self.__conn = AsyncIOMotorClient(DATABASE_URL) - self.__db = self.__conn.luna + if self._conn is not None: + await self._conn.close() + self._conn = AsyncIOMotorClient( + Config.DATABASE_URL, + server_api=ServerApi("1"), + ) + self.db = self._conn.luna + self._return = False except PyMongoError as e: LOGGER.error(f"Error in DB connection: {e}") - self.__err = True + self.db = None + self._return = True + self._conn = None - async def db_load(self): - if self.__err: + async def disconnect(self): + self._return = True + if self._conn is not None: + await self._conn.close() + self._conn = None + + async def update_deploy_config(self): + if self._return: return - await self.__db.settings.config.update_one( - {"_id": bot_id}, {"$set": config_dict}, upsert=True + settings = import_module("config") + config_file = { + key: value.strip() if isinstance(value, str) else value + for key, value in vars(settings).items() + if not key.startswith("__") + } + await self.db.settings.deployConfig.replace_one( + {"_id": TgClient.ID}, + config_file, + upsert=True, ) - if await self.__db.settings.aria2c.find_one({"_id": bot_id}) is None: - await self.__db.settings.aria2c.update_one( - {"_id": bot_id}, {"$set": aria2_options}, upsert=True - ) - if await self.__db.settings.qbittorrent.find_one({"_id": bot_id}) is None: - await self.__db.settings.qbittorrent.update_one( - {"_id": bot_id}, {"$set": qbit_options}, upsert=True - ) - if await self.__db.users[bot_id].find_one(): - rows = self.__db.users[bot_id].find({}) - async for row in rows: - uid = row["_id"] - del row["_id"] - thumb_path = f"Thumbnails/{uid}.jpg" - rclone_path = f"tanha/{uid}.conf" - if row.get("thumb"): - if not await aiopath.exists("Thumbnails"): - await makedirs("Thumbnails") - async with aiopen(thumb_path, "wb+") as f: - await f.write(row["thumb"]) - row["thumb"] = thumb_path - if row.get("rclone"): - if not await aiopath.exists("tanha"): - await makedirs("tanha") - async with aiopen(rclone_path, "wb+") as f: - await f.write(row["rclone"]) - row["rclone"] = rclone_path - user_data[uid] = row - self.__conn.close async def update_config(self, dict_): - if self.__err: + if self._return: return - await self.__db.settings.config.update_one( - {"_id": bot_id}, {"$set": dict_}, upsert=True + await self.db.settings.config.update_one( + {"_id": TgClient.ID}, + {"$set": dict_}, + upsert=True, ) - self.__conn.close async def update_aria2(self, key, value): - if self.__err: + if self._return: return - await self.__db.settings.aria2c.update_one( - {"_id": bot_id}, {"$set": {key: value}}, upsert=True + await self.db.settings.aria2c.update_one( + {"_id": TgClient.ID}, + {"$set": {key: value}}, + upsert=True, + ) + + async def update_qbittorrent(self, key, value): + if self._return: + return + await self.db.settings.qbittorrent.update_one( + {"_id": TgClient.ID}, + {"$set": {key: value}}, + upsert=True, + ) + + async def save_qbit_settings(self): + if self._return: + return + await self.db.settings.qbittorrent.update_one( + {"_id": TgClient.ID}, + {"$set": qbit_options}, + upsert=True, ) - self.__conn.close async def update_private_file(self, path): - if self.__err: + if self._return: return if await aiopath.exists(path): async with aiopen(path, "rb+") as pf: @@ -92,105 +99,162 @@ async def update_private_file(self, path): else: pf_bin = "" path = path.replace(".", "__") - await self.__db.settings.files.update_one( - {"_id": bot_id}, {"$set": {path: pf_bin}}, upsert=True + await self.db.settings.files.update_one( + {"_id": TgClient.ID}, + {"$set": {path: pf_bin}}, + upsert=True, ) - self.__conn.close + if path == "config.py": + await self.update_deploy_config() async def update_user_data(self, user_id): - if self.__err: + if self._return: return - data = user_data[user_id] + data = user_data.get(user_id, {}) if data.get("thumb"): del data["thumb"] - if data.get("rclone"): - del data["rclone"] + if data.get("rclone_config"): + del data["rclone_config"] + if data.get("token_pickle"): + del data["token_pickle"] if data.get("token"): del data["token"] if data.get("time"): del data["time"] - await self.__db.users[bot_id].replace_one( - {"_id": user_id}, data, upsert=True - ) - self.__conn.close + await self.db.users.replace_one({"_id": user_id}, data, upsert=True) async def update_user_doc(self, user_id, key, path=""): - if self.__err: + if self._return: return if path: async with aiopen(path, "rb+") as doc: doc_bin = await doc.read() else: doc_bin = "" - await self.__db.users[bot_id].update_one( - {"_id": user_id}, {"$set": {key: doc_bin}}, upsert=True + await self.db.users.update_one( + {"_id": user_id}, + {"$set": {key: doc_bin}}, + upsert=True, + ) + + async def rss_update_all(self): + if self._return: + return + for user_id in list(rss_dict.keys()): + await self.db.rss[TgClient.ID].replace_one( + {"_id": user_id}, + rss_dict[user_id], + upsert=True, + ) + + async def rss_update(self, user_id): + if self._return: + return + await self.db.rss[TgClient.ID].replace_one( + {"_id": user_id}, + rss_dict[user_id], + upsert=True, + ) + + async def rss_delete(self, user_id): + if self._return: + return + await self.db.rss[TgClient.ID].delete_one({"_id": user_id}) + + async def add_incomplete_task(self, cid, link, tag): + if self._return: + return + await self.db.tasks[TgClient.ID].insert_one( + {"_id": link, "cid": cid, "tag": tag}, ) - self.__conn.close async def get_pm_uids(self): - if self.__err: + if self._return: return None - return [doc["_id"] async for doc in self.__db.pm_users[bot_id].find({})] + return [doc["_id"] async for doc in self.db.pm_users[TgClient.ID].find({})] async def update_pm_users(self, user_id): - if self.__err: + if self._return: return - if not bool(await self.__db.pm_users[bot_id].find_one({"_id": user_id})): - await self.__db.pm_users[bot_id].insert_one({"_id": user_id}) + if not bool(await self.db.pm_users[TgClient.ID].find_one({"_id": user_id})): + await self.db.pm_users[TgClient.ID].insert_one({"_id": user_id}) LOGGER.info(f"New PM User Added : {user_id}") - self.__conn.close async def rm_pm_user(self, user_id): - if self.__err: + if self._return: return - await self.__db.pm_users[bot_id].delete_one({"_id": user_id}) - self.__conn.close + await self.db.pm_users[TgClient.ID].delete_one({"_id": user_id}) async def update_user_tdata(self, user_id, token, time): - if self.__err: + if self._return: return - await self.__db.access_token.update_one( - {"_id": user_id}, {"$set": {"token": token, "time": time}}, upsert=True + await self.db.access_token.update_one( + {"_id": user_id}, + {"$set": {"token": token, "time": time}}, + upsert=True, ) - self.__conn.close async def update_user_token(self, user_id, token): - if self.__err: + if self._return: return - await self.__db.access_token.update_one( - {"_id": user_id}, {"$set": {"token": token}}, upsert=True + await self.db.access_token.update_one( + {"_id": user_id}, + {"$set": {"token": token}}, + upsert=True, ) - self.__conn.close async def get_token_expiry(self, user_id): - if self.__err: + if self._return: return None - user_data = await self.__db.access_token.find_one({"_id": user_id}) + user_data = await self.db.access_token.find_one({"_id": user_id}) if user_data: return user_data.get("time") - self.__conn.close return None async def delete_user_token(self, user_id): - if self.__err: + if self._return: return - await self.__db.access_token.delete_one({"_id": user_id}) + await self.db.access_token.delete_one({"_id": user_id}) async def get_user_token(self, user_id): - if self.__err: + if self._return: return None - user_data = await self.__db.access_token.find_one({"_id": user_id}) + user_data = await self.db.access_token.find_one({"_id": user_id}) if user_data: return user_data.get("token") - self.__conn.close return None async def delete_all_access_tokens(self): - if self.__err: + if self._return: + return + await self.db.access_token.delete_many({}) + + async def rm_complete_task(self, link): + if self._return: + return + await self.db.tasks[TgClient.ID].delete_one({"_id": link}) + + async def get_incomplete_tasks(self): + notifier_dict = {} + if self._return: + return notifier_dict + if await self.db.tasks[TgClient.ID].find_one(): + rows = self.db.tasks[TgClient.ID].find({}) + async for row in rows: + if row["cid"] in list(notifier_dict.keys()): + if row["tag"] in list(notifier_dict[row["cid"]]): + notifier_dict[row["cid"]][row["tag"]].append(row["_id"]) + else: + notifier_dict[row["cid"]][row["tag"]] = [row["_id"]] + else: + notifier_dict[row["cid"]] = {row["tag"]: [row["_id"]]} + await self.db.tasks[TgClient.ID].drop() + return notifier_dict + + async def trunc_table(self, name): + if self._return: return - await self.__db.access_token.delete_many({}) - self.__conn.close + await self.db[name][TgClient.ID].drop() -if DATABASE_URL: - bot_loop.run_until_complete(DbManager().db_load()) +database = DbManager() diff --git a/bot/helper/ext_utils/exceptions.py b/bot/helper/ext_utils/exceptions.py index fa7b37c79..b46ba321f 100644 --- a/bot/helper/ext_utils/exceptions.py +++ b/bot/helper/ext_utils/exceptions.py @@ -1,10 +1,15 @@ -class DirectDownloadLinkError(Exception): - pass +# ruff: noqa: N818 +class DirectDownloadLinkException(Exception): + """Not method found for extracting direct download link from the http link""" -class ExtractionArchiveError(Exception): - pass +class NotSupportedExtractionArchive(Exception): + """The archive format use is trying to extract is not supported""" -class TgLinkError(Exception): - pass +class RssShutdownException(Exception): + """This exception should be raised when shutdown is called to stop the montior""" + + +class TgLinkException(Exception): + """No Access granted for this chat""" diff --git a/bot/helper/ext_utils/files_utils.py b/bot/helper/ext_utils/files_utils.py index d8017dbf5..cb6432be8 100644 --- a/bot/helper/ext_utils/files_utils.py +++ b/bot/helper/ext_utils/files_utils.py @@ -1,52 +1,38 @@ -import contextlib +from asyncio import create_subprocess_exec, sleep, wait_for +from asyncio.subprocess import PIPE +from os import makedirs, readlink, walk from os import path as ospath -from os import walk -from re import IGNORECASE -from re import sub as re_sub -from re import split as re_split +from re import IGNORECASE, escape from re import search as re_search -from sys import exit as sexit -from time import time, gmtime, strftime -from shlex import split as ssplit -from shutil import rmtree, disk_usage -from asyncio import gather, create_task, create_subprocess_exec -from hashlib import md5 +from re import split as re_split +from shutil import rmtree from subprocess import run as srun -from asyncio.subprocess import PIPE +from sys import exit -from magic import Magic -from natsort import natsorted -from aioshutil import rmtree as aiormtree -from langcodes import Language -from telegraph import upload_file -from aiofiles.os import path as aiopath -from aiofiles.os import mkdir, rmdir, listdir, makedirs -from aiofiles.os import remove as aioremove - -from bot import ( - LOGGER, - MAX_SPLIT_SIZE, - GLOBAL_EXTENSION_FILTER, - aria2, - user_data, - config_dict, - xnox_client, +from aiofiles.os import ( + listdir, + remove, + rmdir, + symlink, +) +from aiofiles.os import ( + makedirs as aiomakedirs, ) -from bot.modules.mediainfo import parseinfo -from bot.helper.aeon_utils.metadata import change_metadata -from bot.helper.ext_utils.bot_utils import ( - is_mkv, - cmd_exec, - sync_to_async, - get_readable_time, - get_readable_file_size, +from aiofiles.os import ( + path as aiopath, ) -from bot.helper.ext_utils.telegraph_helper import telegraph +from aiofiles.os import ( + readlink as aioreadlink, +) +from aioshutil import rmtree as aiormtree +from magic import Magic -from .exceptions import ExtractionArchiveError +from bot import LOGGER, aria2, xnox_client +from bot.core.config_manager import Config + +from .bot_utils import cmd_exec, sync_to_async +from .exceptions import NotSupportedExtractionArchive -FIRST_SPLIT_REGEX = r"(\.|_)part0*1\.rar$|(\.|_)7z\.0*1$|(\.|_)zip\.0*1$|^(?!.*(\.|_)part\d+\.rar$).*\.rar$" -SPLIT_REGEX = r"\.r\d+$|\.7z\.\d+$|\.z\d+$|\.zip\.\d+$" ARCH_EXT = [ ".tar.bz2", ".tar.gz", @@ -85,581 +71,138 @@ ".udf", ".vhd", ".xar", + ".zst", + ".zstd", + ".cbz", + ".apfs", + ".ar", + ".qcow", + ".macho", + ".exe", + ".dll", + ".sys", + ".pmd", + ".swf", + ".swfc", + ".simg", + ".vdi", + ".vhdx", + ".vmdk", + ".gzip", + ".lzma86", + ".sha256", + ".sha512", + ".sha224", + ".sha384", + ".sha1", + ".md5", + ".crc32", + ".crc64", ] +FIRST_SPLIT_REGEX = r"(\.|_)part0*1\.rar$|(\.|_)7z\.0*1$|(\.|_)zip\.0*1$|^(?!.*(\.|_)part\d+\.rar$).*\.rar$" -async def is_multi_streams(path): - try: - result = await cmd_exec( - [ - "ffprobe", - "-hide_banner", - "-loglevel", - "error", - "-print_format", - "json", - "-show_streams", - path, - ] - ) - if res := result[1]: - LOGGER.warning(f"Get Video Streams: {res}") - except Exception as e: - LOGGER.error(f"Get Video Streams: {e}. Mostly File not found!") - return False - fields = eval(result[0]).get("streams") - if fields is None: - LOGGER.error(f"get_video_streams: {result}") - return False - videos = 0 - audios = 0 - for stream in fields: - if stream.get("codec_type") == "video": - videos += 1 - elif stream.get("codec_type") == "audio": - audios += 1 - return videos > 1 or audios > 1 - - -async def get_media_info(path, metadata=False): - try: - result = await cmd_exec( - [ - "ffprobe", - "-hide_banner", - "-loglevel", - "error", - "-print_format", - "json", - "-show_format", - "-show_streams", - path, - ] - ) - if res := result[1]: - LOGGER.warning(f"Get Media Info: {res}") - except Exception as e: - LOGGER.error(f"Media Info: {e}. Mostly File not found!") - return (0, "", "", "") if metadata else (0, None, None) - ffresult = eval(result[0]) - fields = ffresult.get("format") - if fields is None: - LOGGER.error(f"Media Info Sections: {result}") - return (0, "", "", "") if metadata else (0, None, None) - duration = round(float(fields.get("duration", 0))) - if metadata: - lang, qual, stitles = "", "", "" - if (streams := ffresult.get("streams")) and streams[0].get( - "codec_type" - ) == "video": - qual = int(streams[0].get("height")) - qual = f"{480 if qual <= 480 else 540 if qual <= 540 else 720 if qual <= 720 else 1080 if qual <= 1080 else 2160 if qual <= 2160 else 4320 if qual <= 4320 else 8640}p" - for stream in streams: - if stream.get("codec_type") == "audio" and ( - lc := stream.get("tags", {}).get("language") - ): - try: - lc = Language.get(lc).display_name() - if lc not in lang: - lang += f"{lc}, " - except Exception: - pass - if stream.get("codec_type") == "subtitle" and ( - st := stream.get("tags", {}).get("language") - ): - try: - st = Language.get(st).display_name() - if st not in stitles: - stitles += f"{st}, " - except Exception: - pass - - return duration, qual, lang[:-2], stitles[:-2] - tags = fields.get("tags", {}) - artist = tags.get("artist") or tags.get("ARTIST") or tags.get("Artist") - title = tags.get("title") or tags.get("TITLE") or tags.get("Title") - return duration, artist, title - - -async def get_document_type(path): - is_video, is_audio, is_image = False, False, False - if path.endswith(tuple(ARCH_EXT)) or re_search( - r".+(\.|_)(rar|7z|zip|bin)(\.0*\d+)?$", path - ): - return is_video, is_audio, is_image - mime_type = await sync_to_async(get_mime_type, path) - if mime_type.startswith("audio"): - return False, True, False - if mime_type.startswith("image"): - return False, False, True - if not mime_type.startswith("video") and not mime_type.endswith("octet-stream"): - return is_video, is_audio, is_image - try: - result = await cmd_exec( - [ - "ffprobe", - "-hide_banner", - "-loglevel", - "error", - "-print_format", - "json", - "-show_streams", - path, - ] - ) - if res := result[1]: - LOGGER.warning(f"Get Document Type: {res}") - except Exception as e: - LOGGER.error(f"Get Document Type: {e}. Mostly File not found!") - return is_video, is_audio, is_image - fields = eval(result[0]).get("streams") - if fields is None: - LOGGER.error(f"get_document_type: {result}") - return is_video, is_audio, is_image - for stream in fields: - if stream.get("codec_type") == "video": - is_video = True - elif stream.get("codec_type") == "audio": - is_audio = True - return is_video, is_audio, is_image - - -async def get_audio_thumb(audio_file): - des_dir = "Thumbnails" - if not await aiopath.exists(des_dir): - await mkdir(des_dir) - des_dir = ospath.join(des_dir, f"{time()}.jpg") - cmd = [ - "xtra", - "-hide_banner", - "-loglevel", - "error", - "-i", - audio_file, - "-an", - "-vcodec", - "copy", - des_dir, - ] - status = await create_subprocess_exec(*cmd, stderr=PIPE) - if await status.wait() != 0 or not await aiopath.exists(des_dir): - err = (await status.stderr.read()).decode().strip() - LOGGER.error( - f"Error while extracting thumbnail from audio. Name: {audio_file} stderr: {err}" - ) - return None - return des_dir - - -async def take_ss(video_file, duration=None, total=1, gen_ss=False): - des_dir = ospath.join("Thumbnails", f"{time()}") - await makedirs(des_dir, exist_ok=True) - if duration is None: - duration = (await get_media_info(video_file))[0] - if duration == 0: - duration = 3 - duration = duration - (duration * 2 / 100) - cmd = [ - "xtra", - "-hide_banner", - "-loglevel", - "error", - "-ss", - "", - "-i", - video_file, - "-vf", - "thumbnail", - "-frames:v", - "1", - des_dir, - ] - tasks = [] - tstamps = {} - for eq_thumb in range(1, total + 1): - cmd[5] = str((duration // total) * eq_thumb) - tstamps[f"aeon_{eq_thumb}.jpg"] = strftime("%H:%M:%S", gmtime(float(cmd[5]))) - cmd[-1] = ospath.join(des_dir, f"aeon_{eq_thumb}.jpg") - tasks.append(create_task(create_subprocess_exec(*cmd, stderr=PIPE))) - status = await gather(*tasks) - for task, eq_thumb in zip(status, range(1, total + 1)): - if await task.wait() != 0 or not await aiopath.exists( - ospath.join(des_dir, f"aeon_{eq_thumb}.jpg") - ): - err = (await task.stderr.read()).decode().strip() - LOGGER.error( - f"Error while extracting thumbnail no. {eq_thumb} from video. Name: {video_file} stderr: {err}" - ) - await aiormtree(des_dir) - return None - return (des_dir, tstamps) if gen_ss else ospath.join(des_dir, "aeon_1.jpg") - - -async def split_file( - path, - size, - file_, - dirpath, - split_size, - listener, - start_time=0, - i=1, - multi_streams=True, -): - if ( - listener.suproc == "cancelled" - or listener.suproc is not None - and listener.suproc.returncode == -9 - ): - return False - if listener.seed and not listener.newDir: - dirpath = f"{dirpath}/splited_files" - if not await aiopath.exists(dirpath): - await mkdir(dirpath) - leech_split_size = MAX_SPLIT_SIZE - parts = -(-size // leech_split_size) - if (await get_document_type(path))[0]: - if multi_streams: - multi_streams = await is_multi_streams(path) - duration = (await get_media_info(path))[0] - base_name, extension = ospath.splitext(file_) - split_size -= 5000000 - while i <= parts or start_time < duration - 4: - parted_name = f"{base_name}.part{i:03}{extension}" - out_path = ospath.join(dirpath, parted_name) - cmd = [ - "xtra", - "-hide_banner", - "-loglevel", - "error", - "-ss", - str(start_time), - "-i", - path, - "-fs", - str(split_size), - "-map", - "0", - "-map_chapters", - "-1", - "-async", - "1", - "-strict", - "-2", - "-c", - "copy", - out_path, - ] - if not multi_streams: - del cmd[10] - del cmd[10] - if ( - listener.suproc == "cancelled" - or listener.suproc is not None - and listener.suproc.returncode == -9 - ): - return False - listener.suproc = await create_subprocess_exec(*cmd, stderr=PIPE) - code = await listener.suproc.wait() - if code == -9: - return False - if code != 0: - err = (await listener.suproc.stderr.read()).decode().strip() - with contextlib.suppress(Exception): - await aioremove(out_path) - if multi_streams: - LOGGER.warning( - f"{err}. Retrying without map, -map 0 not working in all situations. Path: {path}" - ) - return await split_file( - path, - size, - file_, - dirpath, - split_size, - listener, - start_time, - i, - False, - ) - LOGGER.warning( - f"{err}. Unable to split this video, if it's size less than {MAX_SPLIT_SIZE} will be uploaded as it is. Path: {path}" - ) - return "errored" - out_size = await aiopath.getsize(out_path) - if out_size > MAX_SPLIT_SIZE: - dif = out_size - MAX_SPLIT_SIZE - split_size -= dif + 5000000 - await aioremove(out_path) - return await split_file( - path, - size, - file_, - dirpath, - split_size, - listener, - start_time, - i, - ) - lpd = (await get_media_info(out_path))[0] - if lpd == 0: - LOGGER.error( - f"Something went wrong while splitting, mostly file is corrupted. Path: {path}" - ) - break - if duration == lpd: - LOGGER.warning( - f"This file has been splitted with default stream and audio, so you will only see one part with less size from orginal one because it doesn't have all streams and audios. This happens mostly with MKV videos. Path: {path}" - ) - break - if lpd <= 3: - await aioremove(out_path) - break - start_time += lpd - 3 - i += 1 - else: - out_path = ospath.join(dirpath, f"{file_}.") - listener.suproc = await create_subprocess_exec( - "split", - "--numeric-suffixes=1", - "--suffix-length=3", - f"--bytes={split_size}", - path, - out_path, - stderr=PIPE, - ) - code = await listener.suproc.wait() - if code == -9: - return False - if code != 0: - err = (await listener.suproc.stderr.read()).decode().strip() - LOGGER.error(err) - return True - - -async def process_file(file_, user_id, dirpath=None, is_mirror=False): - user_dict = user_data.get(user_id, {}) - prefix = user_dict.get("prefix", "") - remname = user_dict.get("remname", "") - suffix = user_dict.get("suffix", "") - lcaption = user_dict.get("lcaption", "") - metadata_key = user_dict.get("metadata", "") or config_dict["METADATA_KEY"] - prefile_ = file_ - - if metadata_key and dirpath and is_mkv(file_): - file_ = await change_metadata(file_, dirpath, metadata_key) - - file_ = re_sub(r"^www\S+\s*[-_]*\s*", "", file_) - if remname: - if not remname.startswith("|"): - remname = f"|{remname}" - remname = remname.replace(r"\s", " ") - slit = remname.split("|") - __new_file_name = ospath.splitext(file_)[0] - for rep in range(1, len(slit)): - args = slit[rep].split(":") - if len(args) == 3: - __new_file_name = re_sub( - args[0], args[1], __new_file_name, int(args[2]) - ) - elif len(args) == 2: - __new_file_name = re_sub(args[0], args[1], __new_file_name) - elif len(args) == 1: - __new_file_name = re_sub(args[0], "", __new_file_name) - file_ = __new_file_name + ospath.splitext(file_)[1] - LOGGER.info(f"New Filename : {file_}") - - nfile_ = file_ - if prefix: - nfile_ = prefix.replace(r"\s", " ") + file_ - prefix = re_sub(r"<.*?>", "", prefix).replace(r"\s", " ") - if not file_.startswith(prefix): - file_ = f"{prefix}{file_}" - - if suffix and not is_mirror: - suffix = suffix.replace(r"\s", " ") - suf_len = len(suffix) - file_dict = file_.split(".") - _ext_in = 1 + len(file_dict[-1]) - _ext_out_name = ".".join(file_dict[:-1]).replace(".", " ").replace("-", " ") - _new_ext_file_name = f"{_ext_out_name}{suffix}.{file_dict[-1]}" - if len(_ext_out_name) > (64 - (suf_len + _ext_in)): - _new_ext_file_name = ( - _ext_out_name[: 64 - (suf_len + _ext_in)] - + f"{suffix}.{file_dict[-1]}" - ) - file_ = _new_ext_file_name - elif suffix: - suffix = suffix.replace(r"\s", " ") - file_ = ( - f"{ospath.splitext(file_)[0]}{suffix}{ospath.splitext(file_)[1]}" - if "." in file_ - else f"{file_}{suffix}" - ) - - cap_mono = nfile_ - if lcaption and dirpath and not is_mirror: - - def lower_vars(match): - return f"{{{match.group(1).lower()}}}" - - lcaption = ( - lcaption.replace(r"\|", "%%") - .replace(r"\{", "&%&") - .replace(r"\}", "$%$") - .replace(r"\s", " ") - ) - slit = lcaption.split("|") - slit[0] = re_sub(r"\{([^}]+)\}", lower_vars, slit[0]) - up_path = ospath.join(dirpath, prefile_) - dur, qual, lang, subs = await get_media_info(up_path, True) - cap_mono = slit[0].format( - filename=nfile_, - size=get_readable_file_size(await aiopath.getsize(up_path)), - duration=get_readable_time(dur, True), - quality=qual, - languages=lang, - subtitles=subs, - md5_hash=get_md5_hash(up_path), - ) - if len(slit) > 1: - for rep in range(1, len(slit)): - args = slit[rep].split(":") - if len(args) == 3: - cap_mono = cap_mono.replace(args[0], args[1], int(args[2])) - elif len(args) == 2: - cap_mono = cap_mono.replace(args[0], args[1]) - elif len(args) == 1: - cap_mono = cap_mono.replace(args[0], "") - cap_mono = ( - cap_mono.replace("%%", "|").replace("&%&", "{").replace("$%$", "}") - ) - return file_, cap_mono - - -async def get_ss(up_path, ss_no): - thumbs_path, tstamps = await take_ss(up_path, total=ss_no, gen_ss=True) - th_html = f"

{ospath.basename(up_path)}


Total Screenshots: {ss_no}

" - th_html += "".join( - f'
Screenshot at {tstamps[thumb]}
' - for thumb in natsorted(await listdir(thumbs_path)) - ) - await aiormtree(thumbs_path) - link_id = (await telegraph.create_page(title="ScreenShots", content=th_html))[ - "path" - ] - return f"https://graph.org/{link_id}" - - -async def get_mediainfo_link(up_path): - stdout, __, _ = await cmd_exec(ssplit(f'mediainfo "{up_path}"')) - tc = f"

{ospath.basename(up_path)}



" - if len(stdout) != 0: - tc += parseinfo(stdout) - link_id = (await telegraph.create_page(title="MediaInfo", content=tc))["path"] - return f"https://graph.org/{link_id}" - - -def get_md5_hash(up_path): - md5_hash = md5() - with open(up_path, "rb") as f: - for byte_block in iter(lambda: f.read(4096), b""): - md5_hash.update(byte_block) - return md5_hash.hexdigest() +SPLIT_REGEX = r"\.r\d+$|\.7z\.\d+$|\.z\d+$|\.zip\.\d+$" def is_first_archive_split(file): - return bool(re_search(FIRST_SPLIT_REGEX, file)) + return bool(re_search(FIRST_SPLIT_REGEX, file.lower(), IGNORECASE)) def is_archive(file): - return file.endswith(tuple(ARCH_EXT)) + return file.lower().endswith(tuple(ARCH_EXT)) def is_archive_split(file): - return bool(re_search(SPLIT_REGEX, file)) + return bool(re_search(SPLIT_REGEX, file.lower(), IGNORECASE)) async def clean_target(path): if await aiopath.exists(path): LOGGER.info(f"Cleaning Target: {path}") - if await aiopath.isdir(path): - with contextlib.suppress(Exception): - await aiormtree(path) - elif await aiopath.isfile(path): - with contextlib.suppress(Exception): - await aioremove(path) + try: + if await aiopath.isdir(path): + await aiormtree(path, ignore_errors=True) + else: + await remove(path) + except Exception as e: + LOGGER.error(str(e)) async def clean_download(path): if await aiopath.exists(path): LOGGER.info(f"Cleaning Download: {path}") - with contextlib.suppress(Exception): - await aiormtree(path) - - -async def start_cleanup(): - xnox_client.torrents_delete(torrent_hashes="all") - with contextlib.suppress(Exception): - await aiormtree("/usr/src/app/downloads/") - await makedirs("/usr/src/app/downloads/", exist_ok=True) + try: + await aiormtree(path, ignore_errors=True) + except Exception as e: + LOGGER.error(str(e)) def clean_all(): aria2.remove_all(True) xnox_client.torrents_delete(torrent_hashes="all") - with contextlib.suppress(Exception): - rmtree("/usr/src/app/downloads/") + try: + LOGGER.info("Cleaning Download Directory") + rmtree(Config.DOWNLOAD_DIR, ignore_errors=True) + if ospath.exists("Thumbnails"): + rmtree("Thumbnails", ignore_errors=True) + except Exception: + pass + makedirs(Config.DOWNLOAD_DIR, exist_ok=True) def exit_clean_up(_, __): try: - LOGGER.info("Please wait, while we clean up and stop the running downloads") + LOGGER.info("Please wait! Bot clean up and stop the running downloads...") clean_all() srun( - ["pkill", "-9", "-f", "-e", "gunicorn|xria|xnox|xtra|xone"], check=False + ["pkill", "-9", "-f", "gunicorn|xria|xnox|xtra|xone|7z|split"], + check=False, ) - sexit(0) + exit(0) except KeyboardInterrupt: LOGGER.warning("Force Exiting before the cleanup finishes!") - sexit(1) + exit(1) -async def clean_unwanted(path): - LOGGER.info(f"Cleaning unwanted files/folders: {path}") - for dirpath, _, files in await sync_to_async(walk, path, topdown=False): +async def clean_unwanted(opath): + LOGGER.info(f"Cleaning unwanted files/folders: {opath}") + for dirpath, _, files in await sync_to_async(walk, opath, topdown=False): for filee in files: - if ( - filee.endswith(".!qB") - or filee.endswith(".parts") - and filee.startswith(".") + f_path = ospath.join(dirpath, filee) + if filee.endswith(".!qB") or ( + filee.endswith(".parts") and filee.startswith(".") ): - await aioremove(ospath.join(dirpath, filee)) - if dirpath.endswith((".unwanted", "splited_files", "copied")): - await aiormtree(dirpath) - for dirpath, _, files in await sync_to_async(walk, path, topdown=False): + await remove(f_path) + if dirpath.endswith(".unwanted"): + await aiormtree(dirpath, ignore_errors=True) + for dirpath, _, __ in await sync_to_async(walk, opath, topdown=False): if not await listdir(dirpath): await rmdir(dirpath) -async def get_path_size(path): - if await aiopath.isfile(path): - return await aiopath.getsize(path) +async def get_path_size(opath): total_size = 0 - for root, dirs, files in await sync_to_async(walk, path): + if await aiopath.isfile(opath): + if await aiopath.islink(opath): + opath = await aioreadlink(opath) + return await aiopath.getsize(opath) + for root, _, files in await sync_to_async(walk, opath): for f in files: abs_path = ospath.join(root, f) + if await aiopath.islink(abs_path): + abs_path = await aioreadlink(abs_path) total_size += await aiopath.getsize(abs_path) return total_size -async def count_files_and_folders(path): +async def count_files_and_folders(opath, extension_filter): total_files = 0 total_folders = 0 - for _, dirs, files in await sync_to_async(walk, path): + for _, dirs, files in await sync_to_async(walk, opath): total_files += len(files) for f in files: - if f.endswith(tuple(GLOBAL_EXTENSION_FILTER)): + if f.lower().endswith(tuple(extension_filter)): total_files -= 1 total_folders += len(dirs) return total_folders, total_files @@ -667,55 +210,242 @@ async def count_files_and_folders(path): def get_base_name(orig_path): extension = next( - (ext for ext in ARCH_EXT if orig_path.lower().endswith(ext)), "" + (ext for ext in ARCH_EXT if orig_path.lower().endswith(ext)), + "", ) if extension != "": return re_split(f"{extension}$", orig_path, maxsplit=1, flags=IGNORECASE)[0] - raise ExtractionArchiveError("File format not supported for extraction") + raise NotSupportedExtractionArchive("File format not supported for extraction") + + +async def create_recursive_symlink(source, destination): + if ospath.isdir(source): + await aiomakedirs(destination, exist_ok=True) + for item in await listdir(source): + item_source = ospath.join(source, item) + item_dest = ospath.join(destination, item) + await create_recursive_symlink(item_source, item_dest) + elif ospath.isfile(source): + try: + await symlink(source, destination) + except FileExistsError: + LOGGER.error(f"Shortcut already exists: {destination}") + except Exception as e: + LOGGER.error(f"Error creating shortcut for {source}: {e}") def get_mime_type(file_path): + if ospath.islink(file_path): + file_path = readlink(file_path) mime = Magic(mime=True) mime_type = mime.from_file(file_path) return mime_type or "text/plain" -def check_storage_threshold(size, threshold, arch=False, alloc=False): - free = disk_usage("/usr/src/app/downloads/").free - if not alloc: - if ( - not arch - and free - size < threshold - or arch - and free - (size * 2) < threshold - ): - return False - elif not arch: - if free < threshold: - return False - elif free - size < threshold: - return False - return True - - -async def join_files(path): - files = await listdir(path) +async def join_files(opath): + files = await listdir(opath) results = [] + exists = False for file_ in files: - if ( - re_search(r"\.0+2$", file_) - and await sync_to_async(get_mime_type, f"{path}/{file_}") - == "application/octet-stream" - ): + if re_search(r"\.0+2$", file_) and await sync_to_async( + get_mime_type, + f"{opath}/{file_}", + ) not in ["application/x-7z-compressed", "application/zip"]: + exists = True final_name = file_.rsplit(".", 1)[0] - cmd = f"cat {path}/{final_name}.* > {path}/{final_name}" + fpath = f"{opath}/{final_name}" + cmd = f'cat "{fpath}."* > "{fpath}"' _, stderr, code = await cmd_exec(cmd, True) if code != 0: LOGGER.error(f"Failed to join {final_name}, stderr: {stderr}") + if await aiopath.isfile(fpath): + await remove(fpath) else: results.append(final_name) - if results: + + if not exists: + LOGGER.warning("No files to join!") + elif results: + LOGGER.info("Join Completed!") for res in results: for file_ in files: - if re_search(rf"{res}\.0[0-9]+$", file_): - await aioremove(f"{path}/{file_}") + if re_search(rf"{escape(res)}\.0[0-9]+$", file_): + await remove(f"{opath}/{file_}") + + +async def split_file(f_path, split_size, listener): + out_path = f"{f_path}." + if listener.is_cancelled: + return False + listener.subproc = await create_subprocess_exec( + "split", + "--numeric-suffixes=1", + "--suffix-length=3", + f"--bytes={split_size}", + f_path, + out_path, + stderr=PIPE, + ) + _, stderr = await listener.subproc.communicate() + code = listener.subproc.returncode + if listener.is_cancelled: + return False + if code == -9: + listener.is_cancelled = True + return False + if code != 0: + try: + stderr = stderr.decode().strip() + except Exception: + stderr = "Unable to decode the error!" + LOGGER.error(f"{stderr}. Split Document: {f_path}") + return True + + +class SevenZ: + def __init__(self, listener): + self._listener = listener + self._processed_bytes = 0 + self._percentage = "0%" + + @property + def processed_bytes(self): + return self._processed_bytes + + @property + def progress(self): + return self._percentage + + async def _sevenz_progress(self): + pattern = r"(\d+)\s+bytes" + while not ( + self._listener.subproc.returncode is not None + or self._listener.is_cancelled + or self._listener.subproc.stdout.at_eof() + ): + try: + line = await wait_for(self._listener.subproc.stdout.readline(), 5) + except Exception: + break + line = line.decode().strip() + if match := re_search(pattern, line): + self._listener.subsize = int(match.group(1)) + await sleep(0.05) + s = b"" + while not ( + self._listener.is_cancelled + or self._listener.subproc.returncode is not None + or self._listener.subproc.stdout.at_eof() + ): + try: + char = await wait_for(self._listener.subproc.stdout.read(1), 60) + except Exception: + break + if not char: + break + s += char + if char == b"%": + try: + self._percentage = s.decode().rsplit(" ", 1)[-1].strip() + self._processed_bytes = ( + int(self._percentage.strip("%")) / 100 + ) * self._listener.subsize + except Exception: + self._processed_bytes = 0 + self._percentage = "0%" + s = b"" + await sleep(0.05) + + self._processed_bytes = 0 + self._percentage = "0%" + + async def extract(self, f_path, t_path, pswd): + cmd = [ + "7z", + "x", + f"-p{pswd}", + f_path, + f"-o{t_path}", + "-aot", + "-xr!@PaxHeader", + "-bsp1", + "-bse1", + "-bb3", + ] + if not pswd: + del cmd[2] + if self._listener.is_cancelled: + return False + self._listener.subproc = await create_subprocess_exec( + *cmd, + stdout=PIPE, + stderr=PIPE, + ) + await self._sevenz_progress() + _, stderr = await self._listener.subproc.communicate() + code = self._listener.subproc.returncode + if self._listener.is_cancelled: + return False + if code == -9: + self._listener.is_cancelled = True + return False + if code != 0: + try: + stderr = stderr.decode().strip() + except Exception: + stderr = "Unable to decode the error!" + LOGGER.error(f"{stderr}. Unable to extract archive!. Path: {f_path}") + return code + + async def zip(self, dl_path, up_path, pswd): + size = await get_path_size(dl_path) + split_size = self._listener.split_size + cmd = [ + "7z", + f"-v{split_size}b", + "a", + "-mx=0", + f"-p{pswd}", + up_path, + dl_path, + "-bsp1", + "-bse1", + "-bb3", + ] + if not self._listener.is_file: + cmd.extend(f"-xr!*.{ext}" for ext in self._listener.extension_filter) + if self._listener.is_leech and int(size) > self._listener.split_size: + if not pswd: + del cmd[4] + LOGGER.info(f"Zip: orig_path: {dl_path}, zip_path: {up_path}.0*") + else: + del cmd[1] + if not pswd: + del cmd[3] + LOGGER.info(f"Zip: orig_path: {dl_path}, zip_path: {up_path}") + if self._listener.is_cancelled: + return False + self._listener.subproc = await create_subprocess_exec( + *cmd, + stdout=PIPE, + stderr=PIPE, + ) + await self._sevenz_progress() + _, stderr = await self._listener.subproc.communicate() + code = self._listener.subproc.returncode + if self._listener.is_cancelled: + return False + if code == -9: + self._listener.is_cancelled = True + return False + if code == 0: + await clean_target(dl_path) + return up_path + if await aiopath.exists(up_path): + await remove(up_path) + try: + stderr = stderr.decode().strip() + except Exception: + stderr = "Unable to decode the error!" + LOGGER.error(f"{stderr}. Unable to zip this path: {dl_path}") + return dl_path diff --git a/bot/helper/ext_utils/help_messages.py b/bot/helper/ext_utils/help_messages.py new file mode 100644 index 000000000..4bb0ec72d --- /dev/null +++ b/bot/helper/ext_utils/help_messages.py @@ -0,0 +1,408 @@ +from bot.helper.telegram_helper.bot_commands import BotCommands + +nsfw_keywords = [ + "porn", + "onlyfans", + "nsfw", + "Brazzers", + "adult", + "xnxx", + "xvideos", + "nsfwcherry", + "hardcore", + "Pornhub", + "xvideos2", + "youporn", + "pornrip", + "playboy", + "hentai", + "erotica", + "blowjob", + "redtube", + "stripchat", + "camgirl", + "nude", + "fetish", + "cuckold", + "orgy", + "horny", + "swingers", + "ullu", +] + +mirror = """Send link along with command line or + +/cmd link + +By replying to link/file: + +/cmd -n new name -e -up upload destination + +NOTE: +1. Commands that start with qb are ONLY for torrents.""" + +yt = """Send link along with command line: + +/cmd link +By replying to link: +/cmd -n new name -z password -opt x:y|x1:y1 + +Check here all supported SITES +Check all yt-dlp api options from this FILE or use this script to convert cli arguments to api options.""" + +clone = """Send Gdrive|Gdot|Filepress|Filebee|Appdrive|Gdflix link or rclone path along with command or by replying to the link/rc_path by command. +Use -sync to use sync method in rclone. Example: /cmd rcl/rclone_path -up rcl/rclone_path/rc -sync""" + +new_name = """New Name: -n + +/cmd link -n new name +Note: Doesn't work with torrents""" + +multi_link = """Multi links only by replying to first link/file: -i + +/cmd -i 10(number of links/files)""" + +same_dir = """Move file(s)/folder(s) to new folder: -m + +You can use this arg also to move multiple links/torrents contents to the same directory, so all links will be uploaded together as one task + +/cmd link -m new folder (only one link inside new folder) +/cmd -i 10(number of links/files) -m folder name (all links contents in one folder) +/cmd -b -m folder name (reply to batch of message/file(each link on new line)) + +While using bulk you can also use this arg with different folder name along with the links in message or file batch +Example: +link1 -m folder1 +link2 -m folder1 +link3 -m folder2 +link4 -m folder2 +link5 -m folder3 +link6 +so link1 and link2 content will be uploaded from same folder which is folder1 +link3 and link4 content will be uploaded from same folder also which is folder2 +link5 will uploaded alone inside new folder named folder3 +link6 will get uploaded normally alone +""" + +thumb = """Thumbnail for current task: -t + +/cmd link -t tg-message-link (doc or photo) or none (file without thumb)""" + +split_size = """Split size for current task: -sp + +/cmd link -sp (500mb or 2gb or 4000000000) +Note: Only mb and gb are supported or write in bytes without unit!""" + +upload = """Upload Destination: -up + +/cmd link -up rcl/gdl (rcl: to select rclone config, remote & path | gdl: To select token.pickle, gdrive id) using buttons +You can directly add the upload path: -up remote:dir/subdir or -up Gdrive_id or -up id/username (telegram) or -up id/username|topic_id (telegram) +If DEFAULT_UPLOAD is `rc` then you can pass up: `gd` to upload using gdrive tools to GDRIVE_ID. +If DEFAULT_UPLOAD is `gd` then you can pass up: `rc` to upload to RCLONE_PATH. + +If you want to add path or gdrive manually from your config/token (UPLOADED FROM USETTING) add mrcc: for rclone and mtp: before the path/gdrive_id without space. +/cmd link -up mrcc:main:dump or -up mtp:gdrive_id or you can simply edit upload using owner/user token/config from usetting without adding mtp: or mrcc: before the upload path/id + +To add leech destination: +-up id/@username/pm +-up b:id/@username/pm (b: means leech by bot) (id or username of the chat or write pm means private message so bot will send the files in private to you) +when you should use b:(leech by bot)? When your default settings is leech by user and you want to leech by bot for specific task. +-up u:id/@username(u: means leech by user) This incase OWNER added USER_STRING_SESSION. +-up m:id/@username(mixed leech) m: to upload files by bot and user based on file size. +-up id/@username|topic_id(leech in specific chat and topic) add | without space and write topic id after chat id or username. + +In case you want to specify whether using token.pickle or service accounts you can add tp:gdrive_id (using token.pickle) or sa:gdrive_id (using service accounts) or mtp:gdrive_id (using token.pickle uploaded from usetting). +DEFAULT_UPLOAD doesn't affect on leech cmds. +""" + +user_download = """User Download: link + +/cmd tp:link to download using owner token.pickle incase service account enabled. +/cmd sa:link to download using service account incase service account disabled. +/cmd tp:gdrive_id to download using token.pickle and file_id incase service account enabled. +/cmd sa:gdrive_id to download using service account and file_id incase service account disabled. +/cmd mtp:gdrive_id or mtp:link to download using user token.pickle uploaded from usetting +/cmd mrcc:remote:path to download using user rclone config uploaded from usetting +you can simply edit upload using owner/user token/config from usetting without adding mtp: or mrcc: before the path/id""" + +rcf = """Rclone Flags: -rcf + +/cmd link|path|rcl -up path|rcl -rcf --buffer-size:8M|--drive-starred-only|key|key:value +This will override all other flags except --exclude +Check here all RcloneFlags.""" + +bulk = """Bulk Download: -b + +Bulk can be used only by replying to text message or text file contains links separated by new line. +Example: +link1 -n new name -up remote1:path1 -rcf |key:value|key:value +link2 -z -n new name -up remote2:path2 +link3 -e -n new name -up remote2:path2 +Reply to this example by this cmd -> /cmd -b(bulk) + +Note: Any arg along with the cmd will be setted to all links +/cmd -b -up remote: -z -m folder name (all links contents in one zipped folder uploaded to one destination) +so you can't set different upload destinations along with link incase you have added -m along with cmd +You can set start and end of the links from the bulk like seed, with -b start:end or only end by -b :end or only start by -b start. +The default start is from zero(first link) to inf.""" + +rlone_dl = """Rclone Download: + +Treat rclone paths exactly like links +/cmd main:dump/ubuntu.iso or rcl(To select config, remote and path) +Users can add their own rclone from user settings +If you want to add path manually from your config add mrcc: before the path without space +/cmd mrcc:main:dump/ubuntu.iso +You can simply edit using owner/user config from usetting without adding mrcc: before the path""" + +extract_zip = """Extract/Zip: -e -z + +/cmd link -e password (extract password protected) +/cmd link -z password (zip password protected) +/cmd link -z password -e (extract and zip password protected) +Note: When both extract and zip added with cmd it will extract first and then zip, so always extract first""" + +join = """Join Splitted Files: -j + +This option will only work before extract and zip, so mostly it will be used with -m argument (samedir) +By Reply: +/cmd -i 3 -j -m folder name +/cmd -b -j -m folder name +if u have link(folder) have splitted files: +/cmd link -j""" + +tg_links = """TG Links: + +Treat links like any direct link +Some links need user access so you must add USER_SESSION_STRING for it. +Three types of links: +Public: https://t.me/channel_name/message_id +Private: tg://openmessage?user_id=xxxxxx&message_id=xxxxx +Super: https://t.me/c/channel_id/message_id +Range: https://t.me/channel_name/first_message_id-last_message_id +Range Example: tg://openmessage?user_id=xxxxxx&message_id=555-560 or https://t.me/channel_name/100-150 +Note: Range link will work only by replying cmd to it""" + +sample_video = """Sample Video: -sv + +Create sample video for one video or folder of videos. +/cmd -sv (it will take the default values which 60sec sample duration and part duration is 4sec). +You can control those values. Example: /cmd -sv 70:5(sample-duration:part-duration) or /cmd -sv :5 or /cmd -sv 70.""" + +screenshot = """ScreenShots: -ss + +Create screenshots for one video or folder of videos. +/cmd -ss (it will take the default values which is 10 photos). +You can control this value. Example: /cmd -ss 6.""" + +seed = """Bittorrent seed: -d + +/cmd link -d ratio:seed_time or by replying to file/link +To specify ratio and seed time add -d ratio:time. +Example: -d 0.7:10 (ratio and time) or -d 0.7 (only ratio) or -d :10 (only time) where time in minutes""" + +zip_arg = """Zip: -z password + +/cmd link -z (zip) +/cmd link -z password (zip password protected)""" + +qual = """Quality Buttons: -s + +In case default quality added from yt-dlp options using format option and you need to select quality for specific link or links with multi links feature. +/cmd link -s""" + +yt_opt = """Options: -opt + +/cmd link -opt playliststart:^10|fragment_retries:^inf|matchtitle:S13|writesubtitles:true|live_from_start:true|postprocessor_args:{"xtra": ["-threads", "4"]}|wait_for_video:(5, 100)|download_ranges:[{"start_time": 0, "end_time": 10}] +Note: Add `^` before integer or float, some values must be numeric and some string. +Like playlist_items:10 works with string, so no need to add `^` before the number but playlistend works only with integer so you must add `^` before the number like example above. +You can add tuple and dict also. Use double quotes inside dict.""" + +convert_media = """Convert Media: -ca -cv +/cmd link -ca mp3 -cv mp4 (convert all audios to mp3 and all videos to mp4) +/cmd link -ca mp3 (convert all audios to mp3) +/cmd link -cv mp4 (convert all videos to mp4) +/cmd link -ca mp3 + flac ogg (convert only flac and ogg audios to mp3) +/cmd link -cv mkv - webm flv (convert all videos to mp4 except webm and flv)""" + +force_start = """Force Start: -f -fd -fu +/cmd link -f (force download and upload) +/cmd link -fd (force download only) +/cmd link -fu (force upload directly after download finish)""" + +gdrive = """Gdrive: link +If DEFAULT_UPLOAD is `rc` then you can pass up: `gd` to upload using gdrive tools to GDRIVE_ID. +/cmd gdriveLink or gdl or gdriveId -up gdl or gdriveId or gd +/cmd tp:gdriveLink or tp:gdriveId -up tp:gdriveId or gdl or gd (to use token.pickle if service account enabled) +/cmd sa:gdriveLink or sa:gdriveId -p sa:gdriveId or gdl or gd (to use service account if service account disabled) +/cmd mtp:gdriveLink or mtp:gdriveId -up mtp:gdriveId or gdl or gd(if you have added upload gdriveId from usetting) (to use user token.pickle that uploaded by usetting) +You can simply edit using owner/user token from usetting without adding mtp: before the id""" + +rclone_cl = """Rclone: path +If DEFAULT_UPLOAD is `gd` then you can pass up: `rc` to upload to RCLONE_PATH. +/cmd rcl/rclone_path -up rcl/rclone_path/rc -rcf flagkey:flagvalue|flagkey|flagkey:flagvalue +/cmd rcl or rclone_path -up rclone_path or rc or rcl +/cmd mrcc:rclone_path -up rcl or rc(if you have add rclone path from usetting) (to use user config)""" + +name_sub = r"""Name Substitution: -ns +/cmd link -ns script/code/s | mirror/leech | tea/ /s | clone | cpu/ | \[hello\]/hello | \\text\\/text/s +This will affect on all files. Format: wordToReplace/wordToReplaceWith/sensitiveCase +Word Subtitions. You can add pattern instead of normal text. Timeout: 60 sec +NOTE: You must add \ before any character, those are the characters: \^$.|?*+()[]{}- +1. script will get replaced by code with sensitive case +2. mirror will get replaced by leech +4. tea will get replaced by space with sensitive case +5. clone will get removed +6. cpu will get replaced by space +7. [hello] will get replaced by hello +8. \text\ will get replaced by text with sensitive case +""" + +mixed_leech = """Mixed leech: -ml +/cmd link -ml (leech by user and bot session with respect to size)""" + +thumbnail_layout = """Thumbnail Layout: -tl +/cmd link -tl 3x3 (widthxheight) 3 photos in row and 3 photos in column""" + +leech_as = """Leech as: -doc -med +/cmd link -doc (Leech as document) +/cmd link -med (Leech as media)""" + +ffmpeg_cmds = """FFmpeg Commands: -ff +list of lists of ffmpeg commands. You can set multiple ffmpeg commands for all files before upload. Don't write ffmpeg at beginning, start directly with the arguments. +Notes: +1. Add -del to the list(s) which you want from the bot to delete the original files after command run complete! +2. To execute one of pre-added lists in bot like: ({"subtitle": ["-i mltb.mkv -c copy -c:s srt mltb.mkv"]}), you must use -ff subtitle (list key) +Examples: ["-i mltb.mkv -c copy -c:s srt mltb.mkv", "-i mltb.video -c copy -c:s srt mltb", "-i mltb.m4a -c:a libmp3lame -q:a 2 mltb.mp3", "-i mltb.audio -c:a libmp3lame -q:a 2 mltb.mp3", "-i mltb -map 0:a -c copy mltb.mka -map 0:s -c copy mltb.srt"] +Here I will explain how to use mltb.* which is reference to files you want to work on. +1. First cmd: the input is mltb.mkv so this cmd will work only on mkv videos and the output is mltb.mkv also so all outputs is mkv. -del will delete the original media after complete run of the cmd. +2. Second cmd: the input is mltb.video so this cmd will work on all videos and the output is only mltb so the extenstion is same as input files. +3. Third cmd: the input in mltb.m4a so this cmd will work only on m4a audios and the output is mltb.mp3 so the output extension is mp3. +4. Fourth cmd: the input is mltb.audio so this cmd will work on all audios and the output is mltb.mp3 so the output extension is mp3.""" + +YT_HELP_DICT = { + "main": yt, + "New-Name": f"{new_name}\nNote: Don't add file extension", + "Zip": zip_arg, + "Quality": qual, + "Options": yt_opt, + "Multi-Link": multi_link, + "Same-Directory": same_dir, + "Thumb": thumb, + "Split-Size": split_size, + "Upload-Destination": upload, + "Rclone-Flags": rcf, + "Bulk": bulk, + "Sample-Video": sample_video, + "Screenshot": screenshot, + "Convert-Media": convert_media, + "Force-Start": force_start, + "Name-Substitute": name_sub, + "Mixed-Leech": mixed_leech, + "Thumbnail-Layout": thumbnail_layout, + "Leech-Type": leech_as, + "FFmpeg-Cmds": ffmpeg_cmds, +} + +MIRROR_HELP_DICT = { + "main": mirror, + "New-Name": new_name, + "DL-Auth": "Direct link authorization: -au -ap\n\n/cmd link -au username -ap password", + "Headers": "Direct link custom headers: -h\n\n/cmd link -h key: value key1: value1", + "Extract/Zip": extract_zip, + "Select-Files": "Bittorrent File Selection: -s\n\n/cmd link -s or by replying to file/link", + "Torrent-Seed": seed, + "Multi-Link": multi_link, + "Same-Directory": same_dir, + "Thumb": thumb, + "Split-Size": split_size, + "Upload-Destination": upload, + "Rclone-Flags": rcf, + "Bulk": bulk, + "Join": join, + "Rclone-DL": rlone_dl, + "Tg-Links": tg_links, + "Sample-Video": sample_video, + "Screenshot": screenshot, + "Convert-Media": convert_media, + "Force-Start": force_start, + "User-Download": user_download, + "Name-Substitute": name_sub, + "Mixed-Leech": mixed_leech, + "Thumbnail-Layout": thumbnail_layout, + "Leech-Type": leech_as, + "FFmpeg-Cmds": ffmpeg_cmds, +} + +CLONE_HELP_DICT = { + "main": clone, + "Multi-Link": multi_link, + "Bulk": bulk, + "Gdrive": gdrive, + "Rclone": rclone_cl, +} + +RSS_HELP_MESSAGE = """ +Use this format to add feed url: +Title1 link (required) +Title2 link -c cmd -inf xx -exf xx +Title3 link -c cmd -d ratio:time -z password + +-c command -up mrcc:remote:path/subdir -rcf --buffer-size:8M|key|key:value +-inf For included words filter. +-exf For excluded words filter. +-stv true or false (sensitive filter) + +Example: Title https://www.rss-url.com -inf 1080 or 720 or 144p|mkv or mp4|hevc -exf flv or web|xxx +This filter will parse links that its titles contain `(1080 or 720 or 144p) and (mkv or mp4) and hevc` and doesn't contain (flv or web) and xxx words. You can add whatever you want. + +Another example: -inf 1080 or 720p|.web. or .webrip.|hvec or x264. This will parse titles that contain ( 1080 or 720p) and (.web. or .webrip.) and (hvec or x264). I have added space before and after 1080 to avoid wrong matching. If this `10805695` number in title it will match 1080 if added 1080 without spaces after it. + +Filter Notes: +1. | means and. +2. Add `or` between similar keys, you can add it between qualities or between extensions, so don't add filter like this f: 1080|mp4 or 720|web because this will parse 1080 and (mp4 or 720) and web ... not (1080 and mp4) or (720 and web). +3. You can add `or` and `|` as much as you want. +4. Take a look at the title if it has a static special character after or before the qualities or extensions or whatever and use them in the filter to avoid wrong match. +Timeout: 60 sec. +""" + +PASSWORD_ERROR_MESSAGE = """ +This link requires a password! +- Insert :: after the link and write the password after the sign. + +Example: link::my password +""" + + +help_string = f""" +NOTE: Try each command without any argument to see more detalis. +/{BotCommands.MirrorCommand[0]} or /{BotCommands.MirrorCommand[1]}: Start mirroring to cloud. +/{BotCommands.YtdlCommand[0]} or /{BotCommands.YtdlCommand[1]}: Mirror yt-dlp supported link. +/{BotCommands.LeechCommand[0]} or /{BotCommands.LeechCommand[1]}: Start leeching to Telegram. +/{BotCommands.YtdlLeechCommand[0]} or /{BotCommands.YtdlLeechCommand[1]}: Leech yt-dlp supported link. +/{BotCommands.CloneCommand} [drive_url]: Copy file/folder to Google Drive. +/{BotCommands.CountCommand} [drive_url]: Count file/folder of Google Drive. +/{BotCommands.DeleteCommand} [drive_url]: Delete file/folder from Google Drive (Only Owner & Sudo). +/{BotCommands.UserSetCommand[0]} or /{BotCommands.UserSetCommand} [query]: Users settings. +/{BotCommands.BotSetCommand[0]} or /{BotCommands.BotSetCommand} [query]: Bot settings. +/{BotCommands.SelectCommand}: Select files from torrents by gid or reply. +/{BotCommands.ForceStartCommand[0]} or /{BotCommands.ForceStartCommand[1]} [gid]: Force start task by gid or reply. +/{BotCommands.CancelAllCommand} [query]: Cancel all [status] tasks. +/{BotCommands.ListCommand} [query]: Search in Google Drive(s). +/{BotCommands.SearchCommand} [query]: Search for torrents with API. +/{BotCommands.StatusCommand}: Shows a status of all the downloads. +/{BotCommands.StatsCommand}: Show stats of the machine where the bot is hosted in. +/{BotCommands.PingCommand}: Check how long it takes to Ping the Bot (Only Owner & Sudo). +/{BotCommands.AuthorizeCommand}: Authorize a chat or a user to use the bot (Only Owner & Sudo). +/{BotCommands.UnAuthorizeCommand}: Unauthorize a chat or a user to use the bot (Only Owner & Sudo). +/{BotCommands.UsersCommand}: show users settings (Only Owner & Sudo). +/{BotCommands.AddSudoCommand}: Add sudo user (Only Owner). +/{BotCommands.RmSudoCommand}: Remove sudo users (Only Owner). +/{BotCommands.RestartCommand}: Restart and update the bot (Only Owner & Sudo). +/{BotCommands.LogCommand}: Get a log file of the bot. Handy for getting crash reports (Only Owner & Sudo). +/{BotCommands.ShellCommand}: Run shell commands (Only Owner). +/{BotCommands.AExecCommand}: Exec async functions (Only Owner). +/{BotCommands.ExecCommand}: Exec sync functions (Only Owner). +/{BotCommands.ClearLocalsCommand}: Clear {BotCommands.AExecCommand} or {BotCommands.ExecCommand} locals (Only Owner). +/{BotCommands.RssCommand}: RSS Menu. +""" diff --git a/bot/helper/ext_utils/help_strings.py b/bot/helper/ext_utils/help_strings.py deleted file mode 100644 index 6522d08fc..000000000 --- a/bot/helper/ext_utils/help_strings.py +++ /dev/null @@ -1,171 +0,0 @@ -from bot import GROUPS_EMAIL - -YT_HELP_MESSAGE = """ -To use the commands, follow this format: -/{cmd} link options or replying to link -/{cmd} options - -OPTIONS: -
-s: Select quality for specific link or links. --z password: Create a password-protected zip file. --n new_name: Rename the file. --t thumbnail url: Custom thumbnail for each leech(raw or tg image url). --ss value: Generate ss for leech video, max 10 for each leach. --id drive_folder_link or drive_id -index https://anything.in/0: Upload to a custom drive. --opt playliststart:^10|fragment_retries:^inf|matchtitle:S13|writesubtitles:true|live_from_start:true|postprocessor_args:{{"ffmpeg": ["-threads", "4"]}}|wait_for_video:(5, 100): Set additional options. --i 10: Process multiple links. --b: Perform bulk download by replying to a text message or file with links separated with new line.
- - -Check all yt-dlp api options from this FILE or use this script to convert cli arguments to api options. -""" - -MIRROR_HELP_MESSAGE = """ -To use the commands, follow this format: -/{cmd} link options or replying to link -/{cmd} options - -OPTIONS: -
-n new name: Rename the file or folder. --atc attachment url: Custom attachment for each mkv.(raw only) --t thumbnail url: Custom thumbnail for each leech.(raw or tg image url) --ss value: Generate ss for leech video, max 10 for each leach. --z or -z password: Zip the file or folder with or without password. --e or -e password: Extract the file or folder with or without password. --up upload destination: Upload the file or folder to a specific destination. --id drive_folder_link or -id drive_id -index https://anything.in/0:: Upload to a custom Google Drive folder or ID. --u username -p password: Provide authorization for a direct link. --s: Select a torrent file. --h Direct link custom headers: -h /cmd link -h Key: value Key1: value1. --d ratio:seed_time: Set the seeding ratio and time for a torrent. --i number of links/files: Process multiple links or files. --m folder name: Process multiple links or files within the same upload directory. --b: Perform bulk download by replying to a text message or file with multiple links separated with new line. --j: Join split files together before extracting or zipping. --rcf: Set Rclone flags for the command. -main:dump/ubuntu.iso or rcl: Treat a path as an rclone download.
-""" - -CLONE_HELP_MESSAGE = """ -Send Gdrive link or rclone path along with command or by replying to the link/rc_path by command. - -Multi links only by replying to first gdlink or rclone_path: -/{cmd} -i 10 (number of links/pathies) - -Gdrive: -/{cmd} gdrivelink - -Upload Custom Drive: link -id -index --id drive_folder_link or drive_id -index https://anything.in/0: -drive_id must be a folder ID, and index must be a URL, otherwise it will not accept. - -Rclone: -/{cmd} (rcl or rclone_path) -up (rcl or rclone_path) -rcf flagkey:flagvalue|flagkey|flagkey:flagvalue - -Note: If -up is not specified, the rclone destination will be the RCLONE_PATH from config.env. -""" - -PASSWORD_ERROR_MESSAGE = """ -This link requires a password! -- Insert sign :: after the link and write the password after the sign. -Example: {}::love you -Note: No spaces between the signs :: -For the password, you can use a space! -""" - - -bset_display_dict = { - "AS_DOCUMENT": "Default type of Telegram file upload. Default is False, meaning as media.", - "BASE_URL": "Valid BASE URL where the bot is deployed to use torrent web files selection. Collect it from Heroku.", - "LEECH_LIMIT": "To limit the Torrent/Direct/ytdlp leech size. The default unit is GB. Int", - "CLONE_LIMIT": "To limit the size of Google Drive folder/file which you can clone. The default unit is GB. Int", - "MEGA_LIMIT": "To limit the size of Mega download. The default unit is GB. Int", - "TORRENT_LIMIT": "To limit the size of torrent download. The default unit is GB. Int", - "DIRECT_LIMIT": "To limit the size of direct link download. The default unit is GB. Int", - "YTDLP_LIMIT": "To limit the size of ytdlp download. The default unit is GB. Int", - "PLAYLIST_LIMIT": "To limit the maximum number of playlists. Int", - "IMAGES": "Add multiple Telegraph (graph.org) image links, separated by spaces.", - "USER_MAX_TASKS": "Limit the maximum tasks for users of a group at a time. Use an integer.", - "GDRIVE_LIMIT": "To limit the size of Google Drive folder/file link for leech, zip, and unzip. The default unit is GB. Int", - "USER_TASKS_LIMIT": "The maximum limit on tasks for each user. Int", - "FSUB_IDS": "Fill in the chat_id (-100xxxxxx) of groups/channels you want to force subscribe. Separate them by space. Int\n\nNote: Bot should be added in the filled chat_id as admin.", - "BOT_TOKEN": "The Telegram Bot Token that you got from @BotFather.", - "CMD_SUFFIX": "Commands index number. This number will be added at the end of all commands.", - "DATABASE_URL": "Your Mongo Database URL (Connection string). Follow this Generate Database to generate the database. Data will be saved in the database: auth and sudo users, user settings including thumbnails for each user.\n\nNOTE: You can always edit all settings saved in the database from the official site -> (Browse collections).", - "DEFAULT_UPLOAD": 'Whether "rc" to upload to RCLONE_PATH or "gd" to upload to GDRIVE_ID. Default is "gd".', - "LEECH_DUMP_ID": "Chat ID where leeched files would be uploaded. Int. NOTE: Only available for superGroup/channel. Add -100 before the channel/superGroup ID. In short, don't add bot ID or your ID!", - "MIRROR_LOG_ID": "Chat ID where mirror files would be sent. Int. NOTE: Only available for superGroup/channel. Add -100 before the channel/superGroup ID. In short, don't add bot ID or your ID! For multiple IDs, separate them by space.", - "EXTENSION_FILTER": "File extensions that won't be uploaded/cloned. Separate them by space.", - "GDRIVE_ID": "This is the Folder/TeamDrive ID of Google Drive or root to which you want to upload all the mirrors using google-api-python-client.", - "INDEX_URL": "Refer to https://gitlab.com/ParveenBhadooOfficial/Google-Drive-Index.", - "SHOW_MEDIAINFO": "Add a button to show MediaInfo in leeched files. Bool", - "TOKEN_TIMEOUT": "Token timeout for each group member in seconds. Int", - "MEDIA_GROUP": "View uploaded split file parts in media group. Default is False.", - "MEGA_EMAIL": "Email used to sign in on mega.nz for using a premium account. Str", - "MEGA_PASSWORD": "Password for mega.nz account. Str", - "OWNER_ID": "The Telegram User ID (not username) of the owner of the bot.", - "QUEUE_ALL": "Number of parallel tasks for downloads and uploads. For example, if 20 tasks are added and QUEUE_ALL is 8, then the sum of uploading and downloading tasks is 8 and the rest are in the queue. Int. NOTE: If you want to fill QUEUE_DOWNLOAD or QUEUE_UPLOAD, then the QUEUE_ALL value must be greater than or equal to the largest one and less than or equal to the sum of QUEUE_UPLOAD and QUEUE_DOWNLOAD.", - "QUEUE_DOWNLOAD": "Number of all parallel downloading tasks. Int", - "QUEUE_UPLOAD": "Number of all parallel uploading tasks. Int", - "RCLONE_FLAGS": "key:value|key|key|key:value. Check here all RcloneFlags.", - "RCLONE_PATH": "Default rclone path to which you want to upload all the mirrors using rclone.", - "SEARCH_API_LINK": "Search API app link. Get your API from deploying this repository. Supported sites: 1337x, Piratebay, Nyaasi, Torlock, Torrent Galaxy, Zooqle, Kickass, Bitsearch, MagnetDL, Libgen, YTS, Limetorrent, TorrentFunk, Glodls, TorrentProject, and YourBittorrent.", - "SEARCH_LIMIT": "Search limit for the search API, limit for each site and not overall result limit. Default is zero (default API limit for each site).", - "STOP_DUPLICATE": "Bot will check file/folder name in Drive in case of uploading to GDRIVE_ID. If it's present in Drive, then downloading or cloning will be stopped. (NOTE: Item will be checked using name and not hash, so this feature is not perfect yet). Default is False.", - "TELEGRAM_API": "This is to authenticate your Telegram account for downloading Telegram files. You can get this from https://my.telegram.org.", - "TELEGRAM_HASH": "This is to authenticate your Telegram account for downloading Telegram files. You can get this from https://my.telegram.org.", - "TORRENT_TIMEOUT": "Timeout for dead torrents downloading with qBittorrent and Aria2c in seconds. Int", - "UPSTREAM_REPO": "Your GitHub repository link. If your repo is private, add https://username:{githubtoken}@github.com/{username}/{reponame} format. Get the token from GitHub settings. So you can update your bot from the filled repository on each restart.", - "UPSTREAM_BRANCH": "Upstream branch for updates. Default is main.", - "SET_COMMANDS": "Set bot commands automatically. Bool", - "USE_SERVICE_ACCOUNTS": "Whether to use Service Accounts or not, with google-api-python-client. For this to work see Using Service Accounts section below. Default is False", - "USER_SESSION_STRING": "To download/upload from your Telegram account. To generate a session string, use this command python3 generate_string_session.py after mounting the repo folder for sure.\n\nNOTE: You can't use the bot with private messages. Use it with superGroup.", - "YT_DLP_OPTIONS": 'Default yt-dlp options. Check all possible options HERE or use this script to convert CLI arguments to API options. Format: key:value|key:value|key:value. Add ^ before an integer or float, some numbers must be numeric and some strings. \nExample: "format:bv*+mergeall[vcodec=none]|nocheckcertificate:True".', -} - -uset_display_dict = { - "rcc": [ - "RClone is a command-line program to sync files and directories to and from different cloud storage providers like GDrive, OneDrive...", - "Send rclone.conf. Timeout: 60 sec", - ], - "prefix": [ - "Filename Prefix is the front part attached to the filename of the leech files.", - "Send filename prefix. Timeout: 60 sec", - ], - "suffix": [ - "Filename Suffix is the end part attached to the filename of the leech files.", - "Send filename suffix. Timeout: 60 sec", - ], - "remname": [ - "Filename Remname is a combination of regex patterns used for removing or manipulating the filename of the leech files.", - "Send filename remname. Timeout: 60 sec", - ], - "metadata": [ - "Metadata will change MKV video files including all audio, streams, and subtitle titles.", - "Send metadata title. Timeout: 60 sec", - ], - "attachment": [ - "Attachment url, it will added in mkv as thumbnail or cover photo, whetever you say.", - "Send raw photo url, example from imgbb.com . Timeout: 60 sec", - ], - "lcaption": [ - "Leech Caption is the custom caption on the leech files uploaded by the bot.", - "Send leech caption. You can add HTML tags. Timeout: 60 sec", - ], - "ldump": [ - "Leech Files User Dump for personal use as a storage.", - "Send leech dump channel ID. Timeout: 60 sec", - ], - "thumb": [ - "Custom thumbnail to appear on the leeched files uploaded by the bot.", - "Send a photo to save it as a custom thumbnail. Timeout: 60 sec", - ], - "yt_opt": [ - "YT-DLP Options are the custom quality settings for the extraction of videos from yt-dlp supported sites.", - 'Send YT-DLP options. Timeout: 60 sec\nFormat: key:value|key:value|key:value.\nExample: format:bv*+mergeall[vcodec=none]|nocheckcertificate:True\nCheck all yt-dlp API options from this file or use this script to convert CLI arguments to API options.', - ], - "user_tds": [ - f'UserTD helps to upload files via the bot to your custom drive destination through global SA mail.\n\nSA Mail: {SA if (SA := GROUPS_EMAIL) else "Not Specified"}', - "Send User TD details for use while mirroring/cloning.\nFormat:\nname drive_id/link index (optional)\n\nNOTE:\n1. You must add our SA mail to your drive with write permission.\n2. Names can have spaces.\n3. Drive ID must be valid for acceptance.\n\nTimeout: 60 sec.", - ], -} diff --git a/bot/helper/ext_utils/links_utils.py b/bot/helper/ext_utils/links_utils.py new file mode 100644 index 000000000..a01f4cdae --- /dev/null +++ b/bot/helper/ext_utils/links_utils.py @@ -0,0 +1,57 @@ +from re import match as re_match + + +def is_magnet(url: str): + return bool(re_match(r"magnet:\?xt=urn:(btih|btmh):[a-zA-Z0-9]*\s*", url)) + + +def is_url(url: str): + return bool( + re_match( + r"^(?!\/)(rtmps?:\/\/|mms:\/\/|rtsp:\/\/|https?:\/\/|ftp:\/\/)?([^\/:]+:[^\/@]+@)?(www\.)?(?=[^\/:\s]+\.[^\/:\s]+)([^\/:\s]+\.[^\/:\s]+)(:\d+)?(\/[^#\s]*[\s\S]*)?(\?[^#\s]*)?(#.*)?$", + url, + ), + ) + + +def is_gdrive_link(url: str): + return "drive.google.com" in url or "drive.usercontent.google.com" in url + + +def is_telegram_link(url: str): + return url.startswith(("https://t.me/", "tg://openmessage?user_id=")) + + +def is_share_link(url: str): + return bool( + re_match( + r"https?:\/\/.+\.gdtot\.\S+|https?:\/\/(filepress|filebee|appdrive|gdflix)\.\S+", + url, + ), + ) + + +def is_rclone_path(path: str): + return bool( + re_match( + r"^(mrcc:)?(?!(magnet:|mtp:|sa:|tp:))(?![- ])[a-zA-Z0-9_\. -]+(? 1 or audios > 1 + return False + + +async def get_media_info(path): + try: + result = await cmd_exec( + [ + "ffprobe", + "-hide_banner", + "-loglevel", + "error", + "-print_format", + "json", + "-show_format", + path, + ], + ) + except Exception as e: + LOGGER.error(f"Get Media Info: {e}. Mostly File not found! - File: {path}") + return 0, None, None + if result[0] and result[2] == 0: + fields = eval(result[0]).get("format") + if fields is None: + LOGGER.error(f"get_media_info: {result}") + return 0, None, None + duration = round(float(fields.get("duration", 0))) + tags = fields.get("tags", {}) + artist = tags.get("artist") or tags.get("ARTIST") or tags.get("Artist") + title = tags.get("title") or tags.get("TITLE") or tags.get("Title") + return duration, artist, title + return 0, None, None + + +async def get_document_type(path): + is_video, is_audio, is_image = False, False, False + if path.endswith(tuple(ARCH_EXT)) or re_search( + r".+(\.|_)(rar|7z|zip|bin)(\.0*\d+)?$", + path, + ): + return is_video, is_audio, is_image + mime_type = await sync_to_async(get_mime_type, path) + if mime_type.startswith("image"): + return False, False, True + try: + result = await cmd_exec( + [ + "ffprobe", + "-hide_banner", + "-loglevel", + "error", + "-print_format", + "json", + "-show_streams", + path, + ], + ) + if result[1] and mime_type.startswith("video"): + is_video = True + except Exception as e: + LOGGER.error( + f"Get Document Type: {e}. Mostly File not found! - File: {path}", + ) + if mime_type.startswith("audio"): + return False, True, False + if not mime_type.startswith("video") and not mime_type.endswith( + "octet-stream", + ): + return is_video, is_audio, is_image + if mime_type.startswith("video"): + is_video = True + return is_video, is_audio, is_image + if result[0] and result[2] == 0: + fields = eval(result[0]).get("streams") + if fields is None: + LOGGER.error(f"get_document_type: {result}") + return is_video, is_audio, is_image + is_video = False + for stream in fields: + if stream.get("codec_type") == "video": + is_video = True + elif stream.get("codec_type") == "audio": + is_audio = True + return is_video, is_audio, is_image + + +async def take_ss(video_file, ss_nb) -> bool: + duration = (await get_media_info(video_file))[0] + if duration != 0: + dirpath, name = video_file.rsplit("/", 1) + name, _ = ospath.splitext(name) + dirpath = f"{dirpath}/{name}_ss" + await makedirs(dirpath, exist_ok=True) + interval = duration // (ss_nb + 1) + cap_time = interval + cmds = [] + for i in range(ss_nb): + output = f"{dirpath}/SS.{name}_{i:02}.png" + cmd = [ + "xtra", + "-hide_banner", + "-loglevel", + "error", + "-ss", + f"{cap_time}", + "-i", + video_file, + "-q:v", + "1", + "-frames:v", + "1", + "-threads", + f"{max(1, cpu_count() // 2)}", + output, + ] + cap_time += interval + cmds.append(cmd_exec(cmd)) + try: + resutls = await wait_for(gather(*cmds), timeout=60) + if resutls[0][2] != 0: + LOGGER.error( + f"Error while creating sreenshots from video. Path: {video_file}. stderr: {resutls[0][1]}", + ) + await rmtree(dirpath, ignore_errors=True) + return False + except Exception: + LOGGER.error( + f"Error while creating sreenshots from video. Path: {video_file}. Error: Timeout some issues with ffmpeg with specific arch!", + ) + await rmtree(dirpath, ignore_errors=True) + return False + return dirpath + LOGGER.error("take_ss: Can't get the duration of video") + return False + + +async def get_audio_thumbnail(audio_file): + output_dir = f"{Config.DOWNLOAD_DIR}Thumbnails" + await makedirs(output_dir, exist_ok=True) + output = ospath.join(output_dir, f"{time()}.jpg") + cmd = [ + "xtra", + "-hide_banner", + "-loglevel", + "error", + "-i", + audio_file, + "-an", + "-vcodec", + "copy", + "-threads", + f"{max(1, cpu_count() // 2)}", + output, + ] + try: + _, err, code = await wait_for(cmd_exec(cmd), timeout=60) + if code != 0 or not await aiopath.exists(output): + LOGGER.error( + f"Error while extracting thumbnail from audio. Name: {audio_file} stderr: {err}", + ) + return None + except Exception: + LOGGER.error( + f"Error while extracting thumbnail from audio. Name: {audio_file}. Error: Timeout some issues with ffmpeg with specific arch!", + ) + return None + return output + + +async def get_video_thumbnail(video_file, duration): + output_dir = f"{Config.DOWNLOAD_DIR}Thumbnails" + await makedirs(output_dir, exist_ok=True) + output = ospath.join(output_dir, f"{time()}.jpg") + if duration is None: + duration = (await get_media_info(video_file))[0] + if duration == 0: + duration = 3 + duration = duration // 2 + cmd = [ + "xtra", + "-hide_banner", + "-loglevel", + "error", + "-ss", + f"{duration}", + "-i", + video_file, + "-vf", + "thumbnail", + "-q:v", + "1", + "-frames:v", + "1", + "-threads", + f"{max(1, cpu_count() // 2)}", + output, + ] + try: + _, err, code = await wait_for(cmd_exec(cmd), timeout=60) + if code != 0 or not await aiopath.exists(output): + LOGGER.error( + f"Error while extracting thumbnail from video. Name: {video_file} stderr: {err}", + ) + return None + except Exception: + LOGGER.error( + f"Error while extracting thumbnail from video. Name: {video_file}. Error: Timeout some issues with ffmpeg with specific arch!", + ) + return None + return output + + +async def get_multiple_frames_thumbnail(video_file, layout, keep_screenshots): + ss_nb = layout.split("x") + ss_nb = int(ss_nb[0]) * int(ss_nb[1]) + dirpath = await take_ss(video_file, ss_nb) + if not dirpath: + return None + output_dir = f"{Config.DOWNLOAD_DIR}Thumbnails" + await makedirs(output_dir, exist_ok=True) + output = ospath.join(output_dir, f"{time()}.jpg") + cmd = [ + "xtra", + "-hide_banner", + "-loglevel", + "error", + "-pattern_type", + "glob", + "-i", + f"{escape(dirpath)}/*.png", + "-vf", + f"tile={layout}, thumbnail", + "-q:v", + "1", + "-frames:v", + "1", + "-f", + "mjpeg", + "-threads", + f"{max(1, cpu_count() // 2)}", + output, + ] + try: + _, err, code = await wait_for(cmd_exec(cmd), timeout=60) + if code != 0 or not await aiopath.exists(output): + LOGGER.error( + f"Error while combining thumbnails for video. Name: {video_file} stderr: {err}", + ) + return None + except Exception: + LOGGER.error( + f"Error while combining thumbnails from video. Name: {video_file}. Error: Timeout some issues with ffmpeg with specific arch!", + ) + return None + finally: + if not keep_screenshots: + await rmtree(dirpath, ignore_errors=True) + return output + + +def is_mkv(file): + return file.lower().endswith(".mkv") + + +class FFMpeg: + def __init__(self, listener): + self._listener = listener + self._processed_bytes = 0 + self._last_processed_bytes = 0 + self._processed_time = 0 + self._last_processed_time = 0 + self._speed_raw = 0 + self._progress_raw = 0 + self._total_time = 0 + self._eta_raw = 0 + self._time_rate = 0.1 + self._start_time = 0 + + @property + def processed_bytes(self): + return self._processed_bytes + + @property + def speed_raw(self): + return self._speed_raw + + @property + def progress_raw(self): + return self._progress_raw + + @property + def eta_raw(self): + return self._eta_raw + + def clear(self): + self._start_time = time() + self._processed_bytes = 0 + self._processed_time = 0 + self._speed_raw = 0 + self._progress_raw = 0 + self._eta_raw = 0 + self._time_rate = 0.1 + self._last_processed_time = 0 + self._last_processed_bytes = 0 + + async def _ffmpeg_progress(self): + while not ( + self._listener.subproc.returncode is not None + or self._listener.is_cancelled + or self._listener.subproc.stdout.at_eof() + ): + try: + line = await wait_for(self._listener.subproc.stdout.readline(), 60) + except Exception: + break + line = line.decode().strip() + if not line: + break + if "=" in line: + key, value = line.split("=", 1) + if value != "N/A": + if key == "total_size": + self._processed_bytes = ( + int(value) + self._last_processed_bytes + ) + self._speed_raw = self._processed_bytes / ( + time() - self._start_time + ) + elif key == "speed": + self._time_rate = max(0.1, float(value.strip("x"))) + elif key == "out_time": + self._processed_time = ( + time_to_seconds(value) + self._last_processed_time + ) + try: + self._progress_raw = ( + self._processed_time / self._total_time * 100 + ) + self._eta_raw = ( + self._total_time - self._processed_time + ) / self._time_rate + except Exception: + self._progress_raw = 0 + self._eta_raw = 0 + await sleep(0.05) + + async def ffmpeg_cmds(self, ffmpeg, f_path): + self.clear() + self._total_time = (await get_media_info(f_path))[0] + base_name, ext = ospath.splitext(f_path) + dir, base_name = base_name.rsplit("/", 1) + indices = [ + index + for index, item in enumerate(ffmpeg) + if item.startswith("mltb") or item == "mltb" + ] + outputs = [] + for index in indices: + output_file = ffmpeg[index] + if output_file != "mltb" and output_file.startswith("mltb"): + bo, oext = ospath.splitext(output_file) + if oext: + if ext == oext: + prefix = f"ffmpeg{index}." if bo == "mltb" else "" + else: + prefix = "" + ext = "" + else: + prefix = "" + else: + prefix = f"ffmpeg{index}." + output = f"{dir}/{prefix}{output_file.replace('mltb', base_name)}{ext}" + outputs.append(output) + ffmpeg[index] = output + if self._listener.is_cancelled: + return False + self._listener.subproc = await create_subprocess_exec( + *ffmpeg, + stdout=PIPE, + stderr=PIPE, + ) + await self._ffmpeg_progress() + _, stderr = await self._listener.subproc.communicate() + code = self._listener.subproc.returncode + if self._listener.is_cancelled: + return False + if code == 0: + return outputs + if code == -9: + self._listener.is_cancelled = True + return False + try: + stderr = stderr.decode().strip() + except Exception: + stderr = "Unable to decode the error!" + LOGGER.error( + f"{stderr}. Something went wrong while running ffmpeg cmd, mostly file requires different/specific arguments. Path: {f_path}", + ) + for op in outputs: + if await aiopath.exists(op): + await remove(op) + return False + + async def metadata_watermark_cmds(self, ffmpeg, f_path): + self.clear() + self._total_time = (await get_media_info(f_path))[0] + if self._listener.is_cancelled: + return False + self._listener.subproc = await create_subprocess_exec( + *ffmpeg, + stdout=PIPE, + stderr=PIPE, + ) + await self._ffmpeg_progress() + _, stderr = await self._listener.subproc.communicate() + code = self._listener.subproc.returncode + if self._listener.is_cancelled: + return False + if code == 0: + return True + if code == -9: + self._listener.is_cancelled = True + return False + try: + stderr = stderr.decode().strip() + except Exception: + stderr = "Unable to decode the error!" + LOGGER.error( + f"{stderr}. Something went wrong while running ffmpeg cmd, mostly file requires different/specific arguments. Path: {f_path}", + ) + return False + + async def convert_video(self, video_file, ext, retry=False): + self.clear() + self._total_time = (await get_media_info(video_file))[0] + base_name = ospath.splitext(video_file)[0] + output = f"{base_name}.{ext}" + if retry: + cmd = [ + "xtra", + "-hide_banner", + "-loglevel", + "error", + "-progress", + "pipe:1", + "-i", + video_file, + "-map", + "0", + "-c:v", + "libx264", + "-c:a", + "aac", + "-threads", + f"{max(1, cpu_count() // 2)}", + output, + ] + if ext == "mp4": + cmd[14:14] = ["-c:s", "mov_text"] + elif ext == "mkv": + cmd[14:14] = ["-c:s", "ass"] + else: + cmd[14:14] = ["-c:s", "copy"] + else: + cmd = [ + "xtra", + "-hide_banner", + "-loglevel", + "error", + "-progress", + "pipe:1", + "-i", + video_file, + "-map", + "0", + "-c", + "copy", + "-threads", + f"{max(1, cpu_count() // 2)}", + output, + ] + if self._listener.is_cancelled: + return False + self._listener.subproc = await create_subprocess_exec( + *cmd, + stdout=PIPE, + stderr=PIPE, + ) + await self._ffmpeg_progress() + _, stderr = await self._listener.subproc.communicate() + code = self._listener.subproc.returncode + if self._listener.is_cancelled: + return False + if code == 0: + return output + if code == -9: + self._listener.is_cancelled = True + return False + if await aiopath.exists(output): + await remove(output) + if not retry: + return await self.convert_video(video_file, ext, True) + try: + stderr = stderr.decode().strip() + except Exception: + stderr = "Unable to decode the error!" + LOGGER.error( + f"{stderr}. Something went wrong while converting video, mostly file need specific codec. Path: {video_file}", + ) + return False + + async def convert_audio(self, audio_file, ext): + self.clear() + self._total_time = (await get_media_info(audio_file))[0] + base_name = ospath.splitext(audio_file)[0] + output = f"{base_name}.{ext}" + cmd = [ + "xtra", + "-hide_banner", + "-loglevel", + "error", + "-progress", + "pipe:1", + "-i", + audio_file, + "-threads", + f"{max(1, cpu_count() // 2)}", + output, + ] + if self._listener.is_cancelled: + return False + self._listener.subproc = await create_subprocess_exec( + *cmd, + stdout=PIPE, + stderr=PIPE, + ) + await self._ffmpeg_progress() + _, stderr = await self._listener.subproc.communicate() + code = self._listener.subproc.returncode + if self._listener.is_cancelled: + return False + if code == 0: + return output + if code == -9: + self._listener.is_cancelled = True + return False + try: + stderr = stderr.decode().strip() + except Exception: + stderr = "Unable to decode the error!" + LOGGER.error( + f"{stderr}. Something went wrong while converting audio, mostly file need specific codec. Path: {audio_file}", + ) + if await aiopath.exists(output): + await remove(output) + return False + + async def sample_video(self, video_file, sample_duration, part_duration): + self.clear() + self._total_time = sample_duration + dir, name = video_file.rsplit("/", 1) + output_file = f"{dir}/SAMPLE.{name}" + segments = [(0, part_duration)] + duration = (await get_media_info(video_file))[0] + remaining_duration = duration - (part_duration * 2) + parts = (sample_duration - (part_duration * 2)) // part_duration + time_interval = remaining_duration // parts + next_segment = time_interval + for _ in range(parts): + segments.append((next_segment, next_segment + part_duration)) + next_segment += time_interval + segments.append((duration - part_duration, duration)) + + filter_complex = "" + for i, (start, end) in enumerate(segments): + filter_complex += ( + f"[0:v]trim=start={start}:end={end},setpts=PTS-STARTPTS[v{i}]; " + ) + filter_complex += ( + f"[0:a]atrim=start={start}:end={end},asetpts=PTS-STARTPTS[a{i}]; " + ) + + for i in range(len(segments)): + filter_complex += f"[v{i}][a{i}]" + + filter_complex += f"concat=n={len(segments)}:v=1:a=1[vout][aout]" + + cmd = [ + "xtra", + "-hide_banner", + "-loglevel", + "error", + "-progress", + "pipe:1", + "-i", + video_file, + "-filter_complex", + filter_complex, + "-map", + "[vout]", + "-map", + "[aout]", + "-c:v", + "libx264", + "-c:a", + "aac", + "-threads", + f"{max(1, cpu_count() // 2)}", + output_file, + ] + + if self._listener.is_cancelled: + return False + self._listener.subproc = await create_subprocess_exec( + *cmd, + stdout=PIPE, + stderr=PIPE, + ) + await self._ffmpeg_progress() + _, stderr = await self._listener.subproc.communicate() + code = self._listener.subproc.returncode + if self._listener.is_cancelled: + return False + if code == -9: + self._listener.is_cancelled = True + return False + if code == 0: + return output_file + try: + stderr = stderr.decode().strip() + except Exception: + stderr = "Unable to decode the error!" + LOGGER.error( + f"{stderr}. Something went wrong while creating sample video, mostly file is corrupted. Path: {video_file}", + ) + if await aiopath.exists(output_file): + await remove(output_file) + return False + + async def split(self, f_path, file_, parts, split_size): + self.clear() + multi_streams = True + self._total_time = duration = (await get_media_info(f_path))[0] + base_name, extension = ospath.splitext(file_) + split_size -= 3000000 + start_time = 0 + i = 1 + while i <= parts or start_time < duration - 4: + out_path = f_path.replace(file_, f"{base_name}.part{i:03}{extension}") + cmd = [ + "xtra", + "-hide_banner", + "-loglevel", + "error", + "-progress", + "pipe:1", + "-ss", + str(start_time), + "-i", + f_path, + "-fs", + str(split_size), + "-map", + "0", + "-map_chapters", + "-1", + "-async", + "1", + "-strict", + "-2", + "-c", + "copy", + "-threads", + f"{max(1, cpu_count() // 2)}", + out_path, + ] + if not multi_streams: + del cmd[12] + del cmd[12] + if self._listener.is_cancelled: + return False + self._listener.subproc = await create_subprocess_exec( + *cmd, + stdout=PIPE, + stderr=PIPE, + ) + await self._ffmpeg_progress() + _, stderr = await self._listener.subproc.communicate() + code = self._listener.subproc.returncode + if self._listener.is_cancelled: + return False + if code == -9: + self._listener.is_cancelled = True + return False + if code != 0: + try: + stderr = stderr.decode().strip() + except Exception: + stderr = "Unable to decode the error!" + with contextlib.suppress(Exception): + await remove(out_path) + if multi_streams: + LOGGER.warning( + f"{stderr}. Retrying without map, -map 0 not working in all situations. Path: {f_path}", + ) + multi_streams = False + continue + LOGGER.warning( + f"{stderr}. Unable to split this video, if it's size less than {self._listener.max_split_size} will be uploaded as it is. Path: {f_path}", + ) + return False + out_size = await aiopath.getsize(out_path) + if out_size > self._listener.max_split_size: + split_size -= (out_size - self._listener.max_split_size) + 5000000 + LOGGER.warning( + f"Part size is {out_size}. Trying again with lower split size!. Path: {f_path}", + ) + await remove(out_path) + continue + lpd = (await get_media_info(out_path))[0] + if lpd == 0: + LOGGER.error( + f"Something went wrong while splitting, mostly file is corrupted. Path: {f_path}", + ) + break + if duration == lpd: + LOGGER.warning( + f"This file has been splitted with default stream and audio, so you will only see one part with less size from orginal one because it doesn't have all streams and audios. This happens mostly with MKV videos. Path: {f_path}", + ) + break + if lpd <= 3: + await remove(out_path) + break + self._last_processed_time += lpd + self._last_processed_bytes += out_size + start_time += lpd - 3 + i += 1 + + return True diff --git a/bot/helper/ext_utils/shorteners.py b/bot/helper/ext_utils/shorteners.py deleted file mode 100644 index 12692c6b7..000000000 --- a/bot/helper/ext_utils/shorteners.py +++ /dev/null @@ -1,83 +0,0 @@ -from time import sleep -from base64 import b64encode -from random import choice, random, randrange -from urllib.parse import quote - -from urllib3 import disable_warnings -from cloudscraper import create_scraper - -from bot import LOGGER, shorteners_list - - -def short_url(longurl, attempt=0): - if not shorteners_list: - return longurl - if attempt >= 4: - return longurl - i = 0 if len(shorteners_list) == 1 else randrange(len(shorteners_list)) - _shorten_dict = shorteners_list[i] - _shortener = _shorten_dict["domain"] - _shortener_api = _shorten_dict["api_key"] - cget = create_scraper().request - disable_warnings() - try: - if "shorte.st" in _shortener: - headers = {"public-api-token": _shortener_api} - data = {"urlToShorten": quote(longurl)} - return cget( - "PUT", - "https://api.shorte.st/v1/data/url", - headers=headers, - data=data, - ).json()["shortenedUrl"] - if "linkvertise" in _shortener: - url = quote(b64encode(longurl.encode("utf-8"))) - linkvertise = [ - f"https://link-to.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}", - f"https://up-to-down.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}", - f"https://direct-link.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}", - f"https://file-link.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}", - ] - return choice(linkvertise) - if "bitly.com" in _shortener: - headers = {"Authorization": f"Bearer {_shortener_api}"} - return cget( - "POST", - "https://api-ssl.bit.ly/v4/shorten", - json={"long_url": longurl}, - headers=headers, - ).json()["link"] - if "ouo.io" in _shortener: - return cget( - "GET", - f"http://ouo.io/api/{_shortener_api}?s={longurl}", - verify=False, - ).text - if "cutt.ly" in _shortener: - return cget( - "GET", - f"http://cutt.ly/api/api.php?key={_shortener_api}&short={longurl}", - ).json()["url"]["shortLink"] - res = cget( - "GET", - f"https://{_shortener}/api?api={_shortener_api}&url={quote(longurl)}", - ).json() - shorted = res["shortenedUrl"] - if not shorted: - shrtco_res = cget( - "GET", f"https://api.shrtco.de/v2/shorten?url={quote(longurl)}" - ).json() - shrtco_link = shrtco_res["result"]["full_short_link"] - res = cget( - "GET", - f"https://{_shortener}/api?api={_shortener_api}&url={shrtco_link}", - ).json() - shorted = res["shortenedUrl"] - if not shorted: - shorted = longurl - return shorted - except Exception as e: - LOGGER.error(e) - sleep(1) - attempt += 1 - return short_url(longurl, attempt) diff --git a/bot/helper/ext_utils/status_utils.py b/bot/helper/ext_utils/status_utils.py new file mode 100644 index 000000000..3d939b16c --- /dev/null +++ b/bot/helper/ext_utils/status_utils.py @@ -0,0 +1,264 @@ +import contextlib +from asyncio import iscoroutinefunction +from html import escape +from time import time + +from psutil import cpu_percent, disk_usage, virtual_memory + +from bot import bot_start_time, status_dict, task_dict, task_dict_lock +from bot.core.config_manager import Config +from bot.helper.telegram_helper.button_build import ButtonMaker + +from .bot_utils import sync_to_async + +SIZE_UNITS = ["B", "KB", "MB", "GB", "TB", "PB"] + + +class MirrorStatus: + STATUS_UPLOAD = "Upload" + STATUS_DOWNLOAD = "Download" + STATUS_CLONE = "Clone" + STATUS_QUEUEDL = "QueueDl" + STATUS_QUEUEUP = "QueueUp" + STATUS_PAUSED = "Pause" + STATUS_ARCHIVE = "Archive" + STATUS_EXTRACT = "Extract" + STATUS_SPLIT = "Split" + STATUS_CHECK = "CheckUp" + STATUS_SEED = "Seed" + STATUS_SAMVID = "SamVid" + STATUS_CONVERT = "Convert" + STATUS_FFMPEG = "FFmpeg" + STATUS_METADATA = "Metadata" + STATUS_WATERMARK = "Watermark" + + +STATUSES = { + "ALL": "All", + "DL": MirrorStatus.STATUS_DOWNLOAD, + "UP": MirrorStatus.STATUS_UPLOAD, + "QD": MirrorStatus.STATUS_QUEUEDL, + "QU": MirrorStatus.STATUS_QUEUEUP, + "AR": MirrorStatus.STATUS_ARCHIVE, + "EX": MirrorStatus.STATUS_EXTRACT, + "SD": MirrorStatus.STATUS_SEED, + "CL": MirrorStatus.STATUS_CLONE, + "CM": MirrorStatus.STATUS_CONVERT, + "SP": MirrorStatus.STATUS_SPLIT, + "SV": MirrorStatus.STATUS_SAMVID, + "FF": MirrorStatus.STATUS_FFMPEG, + "PA": MirrorStatus.STATUS_PAUSED, + "CK": MirrorStatus.STATUS_CHECK, +} + + +async def get_task_by_gid(gid: str): + async with task_dict_lock: + for task in task_dict.values(): + if hasattr(task, "seeding"): + await sync_to_async(task.update) + if task.gid().startswith(gid): + return task + return None + + +def get_specific_tasks(status, user_id): + if status == "All": + if user_id: + return [ + tk for tk in task_dict.values() if tk.listener.user_id == user_id + ] + return list(task_dict.values()) + if user_id: + return [ + tk + for tk in task_dict.values() + if tk.listener.user_id == user_id + and ( + ((st := tk.status()) and st == status) + or ( + status == MirrorStatus.STATUS_DOWNLOAD + and st not in STATUSES.values() + ) + ) + ] + return [ + tk + for tk in task_dict.values() + if ((st := tk.status()) and st == status) + or (status == MirrorStatus.STATUS_DOWNLOAD and st not in STATUSES.values()) + ] + + +async def get_all_tasks(req_status: str, user_id): + async with task_dict_lock: + return await sync_to_async(get_specific_tasks, req_status, user_id) + + +def get_readable_file_size(size_in_bytes): + if not size_in_bytes: + return "0B" + + index = 0 + while size_in_bytes >= 1024 and index < len(SIZE_UNITS) - 1: + size_in_bytes /= 1024 + index += 1 + + return f"{size_in_bytes:.2f}{SIZE_UNITS[index]}" + + +def get_readable_time(seconds, full_time=False): + periods = [ + ("millennium", 31536000000), + ("century", 3153600000), + ("decade", 315360000), + ("year", 31536000), + ("month", 2592000), + ("week", 604800), + ("day", 86400), + ("hour", 3600), + ("minute", 60), + ("second", 1), + ] + result = "" + for period_name, period_seconds in periods: + if seconds >= period_seconds: + period_value, seconds = divmod(seconds, period_seconds) + plural_suffix = "s" if period_value > 1 else "" + result += f"{int(period_value)} {period_name}{plural_suffix} " + if not full_time: + break + return result.strip() + + +def time_to_seconds(time_duration): + try: + parts = time_duration.split(":") + if len(parts) == 3: + hours, minutes, seconds = map(float, parts) + elif len(parts) == 2: + hours = 0 + minutes, seconds = map(float, parts) + elif len(parts) == 1: + hours = 0 + minutes = 0 + seconds = float(parts[0]) + else: + return 0 + return hours * 3600 + minutes * 60 + seconds + except Exception: + return 0 + + +def speed_string_to_bytes(size_text: str): + size = 0 + size_text = size_text.lower() + if "k" in size_text: + size += float(size_text.split("k")[0]) * 1024 + elif "m" in size_text: + size += float(size_text.split("m")[0]) * 1048576 + elif "g" in size_text: + size += float(size_text.split("g")[0]) * 1073741824 + elif "t" in size_text: + size += float(size_text.split("t")[0]) * 1099511627776 + elif "b" in size_text: + size += float(size_text.split("b")[0]) + return size + + +def get_progress_bar_string(pct): + if isinstance(pct, str): + pct = float(pct.strip("%")) + p = min(max(pct, 0), 100) + c_full = int((p + 5) // 10) + p_str = "●" * c_full + p_str += "○" * (10 - c_full) + return p_str + + +async def get_readable_message(sid, is_user, page_no=1, status="All", page_step=1): + msg = "" + button = None + + tasks = await sync_to_async(get_specific_tasks, status, sid if is_user else None) + + STATUS_LIMIT = 4 + tasks_no = len(tasks) + pages = (max(tasks_no, 1) + STATUS_LIMIT - 1) // STATUS_LIMIT + if page_no > pages: + page_no = (page_no - 1) % pages + 1 + status_dict[sid]["page_no"] = page_no + elif page_no < 1: + page_no = pages - (abs(page_no) % pages) + status_dict[sid]["page_no"] = page_no + start_position = (page_no - 1) * STATUS_LIMIT + + for index, task in enumerate( + tasks[start_position : STATUS_LIMIT + start_position], + start=1, + ): + tstatus = await sync_to_async(task.status) if status == "All" else status + if task.listener.is_super_chat: + msg += f"{index + start_position}.{tstatus}: " + else: + msg += f"{index + start_position}.{tstatus}: " + msg += f"{escape(f'{task.name()}')}" + if task.listener.subname: + msg += f"\n{task.listener.subname}" + if ( + tstatus not in [MirrorStatus.STATUS_SEED, MirrorStatus.STATUS_QUEUEUP] + and task.listener.progress + ): + progress = ( + await task.progress() + if iscoroutinefunction(task.progress) + else task.progress() + ) + msg += f"\n{get_progress_bar_string(progress)} {progress}" + if task.listener.subname: + subsize = f"/{get_readable_file_size(task.listener.subsize)}" + ac = len(task.listener.files_to_proceed) + count = f"({task.listener.proceed_count}/{ac or '?'})" + else: + subsize = "" + count = "" + msg += f"\nProcessed: {task.processed_bytes()}{subsize} {count}" + msg += f"\nSize: {task.size()}" + msg += f"\nSpeed: {task.speed()}" + msg += f"\nETA: {task.eta()}" + if hasattr(task, "seeders_num"): + with contextlib.suppress(Exception): + msg += f"\nSeeders: {task.seeders_num()} | Leechers: {task.leechers_num()}" + elif tstatus == MirrorStatus.STATUS_SEED: + msg += f"\nSize: {task.size()}" + msg += f"\nSpeed: {task.seed_speed()}" + msg += f" | Uploaded: {task.uploaded_bytes()}" + msg += f"\nRatio: {task.ratio()}" + msg += f" | Time: {task.seeding_time()}" + else: + msg += f"\nSize: {task.size()}" + msg += f"\n/stop_{task.gid()}\n\n" + + if len(msg) == 0: + if status == "All": + return None, None + msg = f"No Active {status} Tasks!\n\n" + buttons = ButtonMaker() + if not is_user: + buttons.data_button("📜", f"status {sid} ov", position="header") + if len(tasks) > STATUS_LIMIT: + msg += f"Page: {page_no}/{pages} | Tasks: {tasks_no} | Step: {page_step}\n" + buttons.data_button("<<", f"status {sid} pre", position="header") + buttons.data_button(">>", f"status {sid} nex", position="header") + if tasks_no > 30: + for i in [1, 2, 4, 6, 8, 10, 15]: + buttons.data_button(i, f"status {sid} ps {i}", position="footer") + if status != "All" or tasks_no > 20: + for label, status_value in list(STATUSES.items()): + if status_value != status: + buttons.data_button(label, f"status {sid} st {status_value}") + buttons.data_button("♻️", f"status {sid} ref", position="header") + button = buttons.build_menu(8) + msg += f"CPU: {cpu_percent()}% | FREE: {get_readable_file_size(disk_usage(Config.DOWNLOAD_DIR).free)}" + msg += f"\nRAM: {virtual_memory().percent}% | UPTIME: {get_readable_time(time() - bot_start_time)}" + return msg, button diff --git a/bot/helper/ext_utils/task_manager.py b/bot/helper/ext_utils/task_manager.py index 9192c2391..2d67b19b5 100644 --- a/bot/helper/ext_utils/task_manager.py +++ b/bot/helper/ext_utils/task_manager.py @@ -2,89 +2,111 @@ from bot import ( LOGGER, - OWNER_ID, - queued_dl, - queued_up, - user_data, - config_dict, - download_dict, non_queued_dl, non_queued_up, queue_dict_lock, + queued_dl, + queued_up, ) -from bot.helper.ext_utils.bot_utils import ( - sync_to_async, - get_user_tasks, - checking_access, - get_telegraph_list, - get_readable_file_size, -) -from bot.helper.ext_utils.files_utils import get_base_name, check_storage_threshold -from bot.helper.telegram_helper.message_utils import BotPm_check, isAdmin, forcesub -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper +from bot.core.config_manager import Config +from bot.helper.mirror_leech_utils.gdrive_utils.search import GoogleDriveSearch + +from .bot_utils import get_telegraph_list, sync_to_async +from .files_utils import get_base_name +from .links_utils import is_gdrive_id -async def stop_duplicate_check(name, listener): +async def stop_duplicate_check(listener): if ( - not config_dict["STOP_DUPLICATE"] + isinstance(listener.up_dest, int) or listener.is_leech - or listener.upPath != "gd" or listener.select + or not is_gdrive_id(listener.up_dest) + or (listener.up_dest.startswith("mtp:") and listener.stop_duplicate) + or not listener.stop_duplicate + or listener.same_dir ): return False, None + + name = listener.name LOGGER.info(f"Checking File/Folder if already in Drive: {name}") + if listener.compress: - name = f"{name}.zip" + name = f"{name}.7z" elif listener.extract: try: name = get_base_name(name) except Exception: name = None + if name is not None: telegraph_content, contents_no = await sync_to_async( - GoogleDriveHelper().drive_list, name, stopDup=True + GoogleDriveSearch(stop_dup=True, no_multi=listener.is_clone).drive_list, + name, + listener.up_dest, + listener.user_id, ) if telegraph_content: msg = f"File/Folder is already available in Drive.\nHere are {contents_no} list results:" button = await get_telegraph_list(telegraph_content) return msg, button + return False, None -async def is_queued(uid): - all_limit = config_dict["QUEUE_ALL"] - dl_limit = config_dict["QUEUE_DOWNLOAD"] +async def check_running_tasks(listener, state="dl"): + all_limit = Config.QUEUE_ALL + state_limit = Config.QUEUE_DOWNLOAD if state == "dl" else Config.QUEUE_UPLOAD event = None - added_to_queue = False - if all_limit or dl_limit: - async with queue_dict_lock: - dl = len(non_queued_dl) - up = len(non_queued_up) - if ( + is_over_limit = False + async with queue_dict_lock: + if state == "up" and listener.mid in non_queued_dl: + non_queued_dl.remove(listener.mid) + if ( + (all_limit or state_limit) + and not listener.force_run + and not (listener.force_upload and state == "up") + and not (listener.force_download and state == "dl") + ): + dl_count = len(non_queued_dl) + up_count = len(non_queued_up) + t_count = dl_count if state == "dl" else up_count + is_over_limit = ( all_limit - and dl + up >= all_limit - and (not dl_limit or dl >= dl_limit) - ) or (dl_limit and dl >= dl_limit): - added_to_queue = True + and dl_count + up_count >= all_limit + and (not state_limit or t_count >= state_limit) + ) or (state_limit and t_count >= state_limit) + if is_over_limit: event = Event() - queued_dl[uid] = event - return added_to_queue, event + if state == "dl": + queued_dl[listener.mid] = event + else: + queued_up[listener.mid] = event + if not is_over_limit: + if state == "up": + non_queued_up.add(listener.mid) + else: + non_queued_dl.add(listener.mid) + return is_over_limit, event -def start_dl_from_queued(uid): - queued_dl[uid].set() - del queued_dl[uid] +async def start_dl_from_queued(mid: int): + queued_dl[mid].set() + del queued_dl[mid] + non_queued_dl.add(mid) -def start_up_from_queued(uid): - queued_up[uid].set() - del queued_up[uid] + +async def start_up_from_queued(mid: int): + queued_up[mid].set() + del queued_up[mid] + non_queued_up.add(mid) async def start_from_queued(): - if all_limit := config_dict["QUEUE_ALL"]: - dl_limit = config_dict["QUEUE_DOWNLOAD"] - up_limit = config_dict["QUEUE_UPLOAD"] + if all_limit := Config.QUEUE_ALL: + dl_limit = Config.QUEUE_DOWNLOAD + up_limit = Config.QUEUE_UPLOAD async with queue_dict_lock: dl = len(non_queued_dl) up = len(non_queued_up) @@ -92,168 +114,44 @@ async def start_from_queued(): if all_ < all_limit: f_tasks = all_limit - all_ if queued_up and (not up_limit or up < up_limit): - for index, uid in enumerate(list(queued_up.keys()), start=1): - f_tasks = all_limit - all_ - start_up_from_queued(uid) + for index, mid in enumerate(list(queued_up.keys()), start=1): + await start_up_from_queued(mid) f_tasks -= 1 if f_tasks == 0 or (up_limit and index >= up_limit - up): break if queued_dl and (not dl_limit or dl < dl_limit) and f_tasks != 0: - for index, uid in enumerate(list(queued_dl.keys()), start=1): - start_dl_from_queued(uid) + for index, mid in enumerate(list(queued_dl.keys()), start=1): + await start_dl_from_queued(mid) if (dl_limit and index >= dl_limit - dl) or index == f_tasks: break return - if up_limit := config_dict["QUEUE_UPLOAD"]: + if up_limit := Config.QUEUE_UPLOAD: async with queue_dict_lock: up = len(non_queued_up) if queued_up and up < up_limit: f_tasks = up_limit - up - for index, uid in enumerate(list(queued_up.keys()), start=1): - start_up_from_queued(uid) + for index, mid in enumerate(list(queued_up.keys()), start=1): + await start_up_from_queued(mid) if index == f_tasks: break else: async with queue_dict_lock: if queued_up: - for uid in list(queued_up.keys()): - start_up_from_queued(uid) + for mid in list(queued_up.keys()): + await start_up_from_queued(mid) - if dl_limit := config_dict["QUEUE_DOWNLOAD"]: + if dl_limit := Config.QUEUE_DOWNLOAD: async with queue_dict_lock: dl = len(non_queued_dl) if queued_dl and dl < dl_limit: f_tasks = dl_limit - dl - for index, uid in enumerate(list(queued_dl.keys()), start=1): - start_dl_from_queued(uid) + for index, mid in enumerate(list(queued_dl.keys()), start=1): + await start_dl_from_queued(mid) if index == f_tasks: break else: async with queue_dict_lock: if queued_dl: - for uid in list(queued_dl.keys()): - start_dl_from_queued(uid) - - -async def limit_checker( - size, - listener, - is_torrent=False, - is_mega=False, - is_drive_link=False, - is_ytdlp=False, - is_playlist=None, -): - LOGGER.info("Checking limit") - user_id = listener.message.from_user.id - if ( - user_id == OWNER_ID - or user_id in user_data - and user_data[user_id].get("is_sudo") - ): - return None - if await isAdmin(listener.message): - return None - limit_exceeded = "" - if listener.is_clone: - if clone_limit := config_dict["CLONE_LIMIT"]: - limit = clone_limit * 1024**3 - if size > limit: - limit_exceeded = f"Clone limit is {get_readable_file_size(limit)}." - elif is_mega: - if mega_limit := config_dict["MEGA_LIMIT"]: - limit = mega_limit * 1024**3 - if size > limit: - limit_exceeded = f"Mega limit is {get_readable_file_size(limit)}" - elif is_drive_link: - if gd_limit := config_dict["GDRIVE_LIMIT"]: - limit = gd_limit * 1024**3 - if size > limit: - limit_exceeded = ( - f"Google drive limit is {get_readable_file_size(limit)}" - ) - elif is_ytdlp: - if ytdlp_limit := config_dict["YTDLP_LIMIT"]: - limit = ytdlp_limit * 1024**3 - if size > limit: - limit_exceeded = f"Ytdlp limit is {get_readable_file_size(limit)}" - if ( - is_playlist != 0 - and (playlist_limit := config_dict["PLAYLIST_LIMIT"]) - and is_playlist > playlist_limit - ): - limit_exceeded = f"Playlist limit is {PLAYLIST_LIMIT}" - elif is_torrent: - if torrent_limit := config_dict["TORRENT_LIMIT"]: - limit = torrent_limit * 1024**3 - if size > limit: - limit_exceeded = f"Torrent limit is {get_readable_file_size(limit)}" - elif direct_limit := config_dict["DIRECT_LIMIT"]: - limit = direct_limit * 1024**3 - if size > limit: - limit_exceeded = f"Direct limit is {get_readable_file_size(limit)}" - if not limit_exceeded: - if (leech_limit := config_dict["LEECH_LIMIT"]) and listener.is_leech: - limit = leech_limit * 1024**3 - if size > limit: - limit_exceeded = f"Leech limit is {get_readable_file_size(limit)}" - if not listener.is_clone: - arch = any([listener.compress, listener.extract]) - limit = 3 * 1024**3 - acpt = await sync_to_async(check_storage_threshold, size, limit, arch) - if not acpt: - limit_exceeded = "You must leave 3GB free storage." - if limit_exceeded: - if size: - return f"{limit_exceeded}.\nYour file or folder size is {get_readable_file_size(size)}." - if is_playlist != 0: - return f"{limit_exceeded}.\nYour playlist has {is_playlist} files." - return None - return None - - -async def task_utils(message): - msg = [] - button = None - user_id = message.from_user.id - token = config_dict["TOKEN_TIMEOUT"] - admin = await isAdmin(message) - if message.chat.type != message.chat.type.BOT: - if ids := config_dict["FSUB_IDS"]: - _msg, button = await forcesub(message, ids, button) - if _msg: - msg.append(_msg) - if not token or ( - token - and ( - admin - or user_id == OWNER_ID - or (user_id in user_data and user_data[user_id].get("is_sudo")) - ) - ): - _msg, button = await BotPm_check(message, button) - if _msg: - msg.append(_msg) - if ( - user_id == OWNER_ID - or user_id in user_data - and user_data[user_id].get("is_sudo") - ): - return msg, button - if admin: - return msg, button - token_msg, button = await checking_access(message.from_user.id, button) - if token_msg is not None: - msg.append(token_msg) - if (bmax_tasks := config_dict["BOT_MAX_TASKS"]) and len( - download_dict - ) >= bmax_tasks: - msg.append( - f"Bot Max Tasks limit exceeded.\nBot max tasks limit is {bmax_tasks}.\nPlease wait for the completion of other tasks." - ) - if (maxtask := config_dict["USER_MAX_TASKS"]) and await get_user_tasks( - message.from_user.id, maxtask - ): - msg.append(f"Your tasks limit exceeded for {maxtask} tasks") - return msg, button + for mid in list(queued_dl.keys()): + await start_dl_from_queued(mid) diff --git a/bot/helper/ext_utils/telegraph_helper.py b/bot/helper/ext_utils/telegraph_helper.py index 9a1ae5fcf..274dd1773 100644 --- a/bot/helper/ext_utils/telegraph_helper.py +++ b/bot/helper/ext_utils/telegraph_helper.py @@ -4,53 +4,53 @@ from telegraph.aio import Telegraph from telegraph.exceptions import RetryAfterError -from bot import LOGGER, bot_loop +from bot import LOGGER class TelegraphHelper: - def __init__(self): - self.telegraph = Telegraph(domain="graph.org") - self.short_name = token_hex(4) - self.access_token = None - self.author_name = "Aeon" - self.author_url = "https://t.me/ProjectAeon" + def __init__(self, author_name=None, author_url=None): + self._telegraph = Telegraph(domain="graph.org") + self._author_name = author_name + self._author_url = author_url async def create_account(self): - await self.telegraph.create_account( - short_name=self.short_name, - author_name=self.author_name, - author_url=self.author_url, - ) - self.access_token = self.telegraph.get_access_token() LOGGER.info("Creating Telegraph Account") + try: + await self._telegraph.create_account( + short_name=token_hex(4), + author_name=self._author_name, + author_url=self._author_url, + ) + except Exception as e: + LOGGER.error(f"Failed to create Telegraph Account: {e}") async def create_page(self, title, content): try: - return await self.telegraph.create_page( + return await self._telegraph.create_page( title=title, - author_name=self.author_name, - author_url=self.author_url, + author_name=self._author_name, + author_url=self._author_url, html_content=content, ) except RetryAfterError as st: LOGGER.warning( - f"Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds." + f"Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds.", ) await sleep(st.retry_after) return await self.create_page(title, content) async def edit_page(self, path, title, content): try: - return await self.telegraph.edit_page( + return await self._telegraph.edit_page( path=path, title=title, - author_name=self.author_name, - author_url=self.author_url, + author_name=self._author_name, + author_url=self._author_url, html_content=content, ) except RetryAfterError as st: LOGGER.warning( - f"Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds." + f"Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds.", ) await sleep(st.retry_after) return await self.edit_page(path, title, content) @@ -73,9 +73,15 @@ async def edit_telegraph(self, path, telegraph_content): content += f' | Next' nxt_page += 1 await self.edit_page( - path=path[prev_page], title="Torrent Search", content=content + path=path[prev_page], + title="Mirror-leech-bot Torrent Search", + content=content, ) -telegraph = TelegraphHelper() -bot_loop.run_until_complete(telegraph.create_account()) +telegraph = TelegraphHelper( + "Mirror-Leech-Telegram-Bot", + "https://github.com/anasty17/mirror-leech-telegram-bot", +) + +print(__name__) diff --git a/bot/helper/listeners/aria2_listener.py b/bot/helper/listeners/aria2_listener.py index 2187ee5ad..876d9e9cd 100644 --- a/bot/helper/listeners/aria2_listener.py +++ b/bot/helper/listeners/aria2_listener.py @@ -1,129 +1,65 @@ import contextlib -from time import time from asyncio import sleep +from time import time from aiofiles.os import path as aiopath -from aiofiles.os import remove as aioremove +from aiofiles.os import remove -from bot import LOGGER, aria2, config_dict, download_dict, download_dict_lock +from bot import LOGGER, aria2, intervals, task_dict, task_dict_lock +from bot.core.config_manager import Config from bot.helper.ext_utils.bot_utils import ( - new_thread, - sync_to_async, - get_task_by_gid, - get_telegraph_list, bt_selection_buttons, + loop_thread, + sync_to_async, ) -from bot.helper.ext_utils.files_utils import get_base_name, clean_unwanted -from bot.helper.ext_utils.task_manager import limit_checker +from bot.helper.ext_utils.files_utils import clean_unwanted +from bot.helper.ext_utils.status_utils import get_task_by_gid +from bot.helper.ext_utils.task_manager import stop_duplicate_check +from bot.helper.mirror_leech_utils.status_utils.aria2_status import Aria2Status from bot.helper.telegram_helper.message_utils import ( - delete_links, - send_message, delete_message, - update_all_messages, + send_message, + update_status_message, ) -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper -from bot.helper.mirror_leech_utils.status_utils.aria2_status import Aria2Status -@new_thread -async def __on_download_started(api, gid): +@loop_thread +async def _on_download_started(api, gid): download = await sync_to_async(api.get_download, gid) if download.options.follow_torrent == "false": return if download.is_metadata: - LOGGER.info(f"on_download_started: {gid} METADATA") + LOGGER.info(f"onDownloadStarted: {gid} METADATA") await sleep(1) - if dl := await get_task_by_gid(gid): - listener = dl.listener() - if listener.select: + if task := await get_task_by_gid(gid): + task.listener.is_torrent = True + if task.listener.select: metamsg = "Downloading Metadata, wait then you can select files. Use torrent file to avoid this wait." - meta = await send_message(listener.message, metamsg) + meta = await send_message(task.listener.message, metamsg) while True: await sleep(0.5) if download.is_removed or download.followed_by_ids: await delete_message(meta) break - download = download.live + await sync_to_async(download.update) return - LOGGER.info(f"Download Started: {download.name} - Gid: {gid}") - dl = None - if config_dict["STOP_DUPLICATE"]: - await sleep(1) - if dl is None: - dl = await get_task_by_gid(gid) - if dl: - if not hasattr(dl, "listener"): - LOGGER.warning( - f"on_download_start: {gid}. STOP_DUPLICATE didn't pass since download completed earlier!" - ) - return - listener = dl.listener() - if ( - not listener.is_leech - and not listener.select - and listener.upPath == "gd" - ): - download = await sync_to_async(api.get_download, gid) - if not download.is_torrent: - await sleep(3) - download = download.live - LOGGER.info("Checking File/Folder if already in Drive...") - name = download.name - if listener.compress: - name = f"{name}.zip" - elif listener.extract: - try: - name = get_base_name(name) - except Exception: - name = None - if name is not None: - telegraph_content, contents_no = await sync_to_async( - GoogleDriveHelper().drive_list, name, True - ) - if telegraph_content: - msg = f"File/Folder is already available in Drive.\nHere are {contents_no} list results:" - button = await get_telegraph_list(telegraph_content) - await listener.onDownloadError(msg, button) - await sync_to_async( - api.remove, [download], force=True, files=True - ) - await delete_links(listener.message) - return + LOGGER.info(f"onDownloadStarted: {download.name} - Gid: {gid}") await sleep(1) - if dl is None: - dl = await get_task_by_gid(gid) - if dl is not None: - if not hasattr(dl, "listener"): - LOGGER.warning( - f"on_download_start: {gid}. at Download limit didn't pass since download completed earlier!" - ) - return - listener = dl.listener() + + await sleep(2) + if task := await get_task_by_gid(gid): download = await sync_to_async(api.get_download, gid) - download = download.live - if download.total_length == 0: - start_time = time() - while time() - start_time <= 15: - await sleep(0.5) - download = await sync_to_async(api.get_download, gid) - download = download.live - if download.followed_by_ids: - download = await sync_to_async( - api.get_download, download.followed_by_ids[0] - ) - if download.total_length > 0: - break - size = download.total_length - if limit_exceeded := await limit_checker( - size, listener, download.is_torrent - ): - await listener.onDownloadError(limit_exceeded) + await sync_to_async(download.update) + task.listener.name = download.name + msg, button = await stop_duplicate_check(task.listener) + if msg: + await task.listener.on_download_error(msg, button) await sync_to_async(api.remove, [download], force=True, files=True) - await delete_links(listener.message) + return -@new_thread -async def __on_download_complete(api, gid): +@loop_thread +async def _on_download_complete(api, gid): try: download = await sync_to_async(api.get_download, gid) except Exception: @@ -133,103 +69,112 @@ async def __on_download_complete(api, gid): if download.followed_by_ids: new_gid = download.followed_by_ids[0] LOGGER.info(f"Gid changed from {gid} to {new_gid}") - if dl := await get_task_by_gid(new_gid): - listener = dl.listener() - if config_dict["BASE_URL"] and listener.select: - if not dl.queued: + if task := await get_task_by_gid(new_gid): + task.listener.is_torrent = True + if Config.BASE_URL and task.listener.select: + if not task.queued: await sync_to_async(api.client.force_pause, new_gid) - s_buttons = bt_selection_buttons(new_gid) + SBUTTONS = bt_selection_buttons(new_gid) msg = "Your download paused. Choose files then press Done Selecting button to start downloading." - await send_message(listener.message, msg, s_buttons) + await send_message(task.listener.message, msg, SBUTTONS) elif download.is_torrent: - if ( - (dl := await get_task_by_gid(gid)) - and hasattr(dl, "listener") - and dl.seeding - ): - LOGGER.info(f"Cancelling Seed: {download.name} on_download_complete") - listener = dl.listener() - await listener.onUploadError( - f"Seeding stopped with Ratio: {dl.ratio()} and Time: {dl.seeding_time()}" - ) - await sync_to_async(api.remove, [download], force=True, files=True) + if task := await get_task_by_gid(gid): + task.listener.is_torrent = True + if hasattr(task, "seeding") and task.seeding: + LOGGER.info(f"Cancelling Seed: {download.name} onDownloadComplete") + await task.listener.on_upload_error( + f"Seeding stopped with Ratio: {task.ratio()} and Time: {task.seeding_time()}", + ) + await sync_to_async(api.remove, [download], force=True, files=True) else: - LOGGER.info(f"on_download_complete: {download.name} - Gid: {gid}") - if dl := await get_task_by_gid(gid): - listener = dl.listener() - await listener.on_download_complete() + LOGGER.info(f"onDownloadComplete: {download.name} - Gid: {gid}") + if task := await get_task_by_gid(gid): + await task.listener.on_download_complete() + if intervals["stopAll"]: + return await sync_to_async(api.remove, [download], force=True, files=True) -@new_thread -async def __on_bt_dl_complete(api, gid): +@loop_thread +async def _on_bt_download_complete(api, gid): seed_start_time = time() await sleep(1) download = await sync_to_async(api.get_download, gid) - if download.options.follow_torrent == "false": - return LOGGER.info(f"onBtDownloadComplete: {download.name} - Gid: {gid}") - if dl := await get_task_by_gid(gid): - listener = dl.listener() - if listener.select: + if task := await get_task_by_gid(gid): + task.listener.is_torrent = True + if task.listener.select: res = download.files for file_o in res: f_path = file_o.path if not file_o.selected and await aiopath.exists(f_path): with contextlib.suppress(Exception): - await aioremove(f_path) + await remove(f_path) await clean_unwanted(download.dir) - if listener.seed: + if task.listener.seed: try: await sync_to_async( - api.set_options, {"max-upload-limit": "0"}, [download] + api.set_options, + {"max-upload-limit": "0"}, + [download], ) except Exception as e: LOGGER.error( - f"{e} You are not able to seed because you added global option seed-time=0 without adding specific seed_time for this torrent GID: {gid}" + f"{e} You are not able to seed because you added global option seed-time=0 without adding specific seed_time for this torrent GID: {gid}", ) else: try: await sync_to_async(api.client.force_pause, gid) except Exception as e: LOGGER.error(f"{e} GID: {gid}") - await listener.on_download_complete() - download = download.live - if listener.seed: - if download.is_complete: - if dl := await get_task_by_gid(gid): - LOGGER.info(f"Cancelling Seed: {download.name}") - await listener.onUploadError( - f"Seeding stopped with Ratio: {dl.ratio()} and Time: {dl.seeding_time()}" - ) + await task.listener.on_download_complete() + if intervals["stopAll"]: + return + await sync_to_async(download.update) + if ( + task.listener.seed + and download.is_complete + and await get_task_by_gid(gid) + ): + LOGGER.info(f"Cancelling Seed: {download.name}") + await task.listener.on_upload_error( + f"Seeding stopped with Ratio: {task.ratio()} and Time: {task.seeding_time()}", + ) + await sync_to_async(api.remove, [download], force=True, files=True) + elif ( + task.listener.seed + and download.is_complete + and not await get_task_by_gid(gid) + ): + pass + elif task.listener.seed and not task.listener.is_cancelled: + async with task_dict_lock: + if task.listener.mid not in task_dict: await sync_to_async( - api.remove, [download], force=True, files=True + api.remove, + [download], + force=True, + files=True, ) - else: - async with download_dict_lock: - if listener.uid not in download_dict: - await sync_to_async( - api.remove, [download], force=True, files=True - ) - return - download_dict[listener.uid] = Aria2Status(gid, listener, True) - download_dict[listener.uid].start_time = seed_start_time - LOGGER.info(f"Seeding started: {download.name} - Gid: {gid}") - await update_all_messages() + return + task_dict[task.listener.mid] = Aria2Status(task.listener, gid, True) + task_dict[task.listener.mid].start_time = seed_start_time + LOGGER.info(f"Seeding started: {download.name} - Gid: {gid}") + await update_status_message(task.listener.message.chat.id) else: await sync_to_async(api.remove, [download], force=True, files=True) -@new_thread -async def __on_download_stopped(_, gid): - await sleep(6) - if dl := await get_task_by_gid(gid): - listener = dl.listener() - await listener.onDownloadError("Dead torrent!") +@loop_thread +async def _on_download_stopped(_, gid): + await sleep(4) + if task := await get_task_by_gid(gid): + await task.listener.on_download_error("Dead torrent!") -@new_thread -async def __on_download_error(api, gid): +@loop_thread +async def _on_download_error(api, gid): + await sleep(1) LOGGER.info(f"onDownloadError: {gid}") error = "None" try: @@ -240,18 +185,17 @@ async def __on_download_error(api, gid): LOGGER.info(f"Download Error: {error}") except Exception: pass - if dl := await get_task_by_gid(gid): - listener = dl.listener() - await listener.onDownloadError(error) + if task := await get_task_by_gid(gid): + await task.listener.on_download_error(error) def start_aria2_listener(): aria2.listen_to_notifications( threaded=False, - on_download_start=__on_download_started, - on_download_error=__on_download_error, - on_download_stop=__on_download_stopped, - on_download_complete=__on_download_complete, - on_bt_download_complete=__on_bt_dl_complete, + on_download_start=_on_download_started, + on_download_error=_on_download_error, + on_download_stop=_on_download_stopped, + on_download_complete=_on_download_complete, + on_bt_download_complete=_on_bt_download_complete, timeout=60, ) diff --git a/bot/helper/listeners/direct_listener.py b/bot/helper/listeners/direct_listener.py index 7e0aaae9e..34ac060b7 100644 --- a/bot/helper/listeners/direct_listener.py +++ b/bot/helper/listeners/direct_listener.py @@ -5,78 +5,79 @@ class DirectListener: - def __init__(self, foldername, total_size, path, listener, a2c_opt): - self.__path = path - self.__listener = listener - self.__is_cancelled = False - self.__a2c_opt = a2c_opt - self.task = None - self.name = foldername - self.total_size = total_size - self.proc_bytes = 0 - self.failed = 0 + def __init__(self, path, listener, a2c_opt): + self.listener = listener + self._path = path + self._a2c_opt = a2c_opt + self._proc_bytes = 0 + self._failed = 0 + self.download_task = None + self.name = self.listener.name @property def processed_bytes(self): - if self.task: - return self.proc_bytes + self.task.completed_length - return self.proc_bytes + if self.download_task: + return self._proc_bytes + self.download_task.completed_length + return self._proc_bytes @property def speed(self): - return self.task.download_speed if self.task else 0 + return self.download_task.download_speed if self.download_task else 0 def download(self, contents): self.is_downloading = True for content in contents: - if self.__is_cancelled: + if self.listener.is_cancelled: break if content["path"]: - self.__a2c_opt["dir"] = f"{self.__path}/{content['path']}" + self._a2c_opt["dir"] = f"{self._path}/{content['path']}" else: - self.__a2c_opt["dir"] = self.__path + self._a2c_opt["dir"] = self._path filename = content["filename"] - self.__a2c_opt["out"] = filename + self._a2c_opt["out"] = filename try: - self.task = aria2.add_uris( - [content["url"]], self.__a2c_opt, position=0 + self.download_task = aria2.add_uris( + [content["url"]], + self._a2c_opt, + position=0, ) except Exception as e: - self.failed += 1 + self._failed += 1 LOGGER.error(f"Unable to download {filename} due to: {e}") continue - self.task = self.task.live + self.download_task = self.download_task.live while True: - if self.__is_cancelled: - if self.task: - self.task.remove(True, True) + if self.listener.is_cancelled: + if self.download_task: + self.download_task.remove(True, True) break - self.task = self.task.live - if error_message := self.task.error_message: - self.failed += 1 + self.download_task = self.download_task.live + if error_message := self.download_task.error_message: + self._failed += 1 LOGGER.error( - f"Unable to download {self.task.name} due to: {error_message}" + f"Unable to download {self.download_task.name} due to: {error_message}", ) - self.task.remove(True, True) + self.download_task.remove(True, True) break - if self.task.is_complete: - self.proc_bytes += self.task.total_length - self.task.remove(True) + if self.download_task.is_complete: + self._proc_bytes += self.download_task.total_length + self.download_task.remove(True) break sleep(1) - self.task = None - if self.__is_cancelled: + self.download_task = None + if self.listener.is_cancelled: return - if self.failed == len(contents): + if self._failed == len(contents): async_to_sync( - self.__listener.onDownloadError, "All files are failed to download!" + self.listener.on_download_error, + "All files are failed to download!", ) return - async_to_sync(self.__listener.on_download_complete) + async_to_sync(self.listener.on_download_complete) - async def cancel_download(self): - self.__is_cancelled = True - LOGGER.info(f"Cancelling Download: {self.name}") - await self.__listener.onDownloadError("Download Cancelled by User!") - if self.task: - await sync_to_async(self.task.remove, force=True, files=True) + async def cancel_task(self): + self.listener.is_cancelled = True + LOGGER.info(f"Cancelling Download: {self.listener.name}") + await self.listener.on_download_error("Download Cancelled by User!") + if self.download_task: + await sync_to_async(self.download_task.remove, force=True, files=True) diff --git a/bot/helper/listeners/mega_listener.py b/bot/helper/listeners/mega_listener.py new file mode 100644 index 000000000..da7d86dff --- /dev/null +++ b/bot/helper/listeners/mega_listener.py @@ -0,0 +1,140 @@ +from threading import Event + +from mega import MegaApi, MegaError, MegaListener, MegaRequest, MegaTransfer + +from bot import LOGGER +from bot.helper.ext_utils.bot_utils import async_to_sync, sync_to_async + + +class AsyncExecutor: + def __init__(self): + self.continue_event = Event() + + def do(self, function, args): + self.continue_event.clear() + function(*args) + self.continue_event.wait() + + +async def mega_login(executor, api, email, password): + if email and password: + await sync_to_async( + executor.do, + api.login, + (email, password), + ) + + +async def mega_logout(executor, api, folder_api=None): + await sync_to_async( + executor.do, + api.logout, + (), + ) + if folder_api: + await sync_to_async( + executor.do, + folder_api.logout, + (), + ) + + +class MegaAppListener(MegaListener): + _NO_EVENT_ON = ( + MegaRequest.TYPE_LOGIN, + MegaRequest.TYPE_FETCH_NODES, + ) + + def __init__(self, continue_event: Event, listener): + super().__init__() + self.continue_event = continue_event + self.node = None + self.public_node = None + self.listener = listener + self.is_cancelled = False + self.error = None + self._bytes_transferred = 0 + self._speed = 0 + self._name = "" + + @property + def speed(self): + return self._speed + + @property + def downloaded_bytes(self): + return self._bytes_transferred + + def onRequestFinish(self, api, request, error): # noqa: N802 + if str(error).lower() != "no error": + self.error = error.copy() + if str(self.error).casefold() != "not found": + LOGGER.error(f"Mega onRequestFinishError: {self.error}") + self.continue_event.set() + return + + request_type = request.getType() + + if request_type == MegaRequest.TYPE_LOGIN: + api.fetchNodes() + elif request_type == MegaRequest.TYPE_GET_PUBLIC_NODE: + self.public_node = request.getPublicMegaNode() + self._name = self.public_node.getName() + elif request_type == MegaRequest.TYPE_FETCH_NODES: + LOGGER.info("Fetching Root Node.") + self.node = api.getRootNode() + self._name = self.node.getName() + LOGGER.info(f"Node Name: {self.node.getName()}") + + if request_type not in self._NO_EVENT_ON or ( + self.node and "cloud drive" not in self._name.lower() + ): + self.continue_event.set() + + def onRequestTemporaryError(self, _, __, error: MegaError): # noqa: N802 + LOGGER.error(f"Mega Request error in {error}") + if not self.is_cancelled: + self.is_cancelled = True + async_to_sync( + self.listener.on_download_error, + f"RequestTempError: {error.toString()}", + ) + self.error = error.toString() + self.continue_event.set() + + def onTransferUpdate(self, api: MegaApi, transfer: MegaTransfer): # noqa: N802 + if self.is_cancelled: + api.cancelTransfer(transfer, None) + self.continue_event.set() + return + self._speed = transfer.getSpeed() + self._bytes_transferred = transfer.getTransferredBytes() + + def onTransferFinish(self, _: MegaApi, transfer: MegaTransfer, __): # noqa: N802 + try: + if self.is_cancelled: + self.continue_event.set() + elif transfer.isFinished() and ( + transfer.isFolderTransfer() or transfer.getFileName() == self._name + ): + async_to_sync(self.listener.on_download_complete) + self.continue_event.set() + except Exception as e: + LOGGER.error(e) + + def onTransferTemporaryError(self, _, transfer, error): # noqa: N802 + LOGGER.error( + f"Mega download error in file {transfer.getFileName()}: {error}", + ) + if transfer.getState() in [1, 4]: + return + self.error = ( + f"TransferTempError: {error.toString()} ({transfer.getFileName()})" + ) + if not self.is_cancelled: + self.is_cancelled = True + self.continue_event.set() + + async def cancel_task(self): + self.is_cancelled = True + await self.listener.on_download_error("Download Canceled by user") diff --git a/bot/helper/listeners/qbit_listener.py b/bot/helper/listeners/qbit_listener.py index 79952ae3e..adbb06c6a 100644 --- a/bot/helper/listeners/qbit_listener.py +++ b/bot/helper/listeners/qbit_listener.py @@ -1,159 +1,157 @@ -from time import time +import contextlib from asyncio import sleep +from time import time + +from aiofiles.os import path as aiopath +from aiofiles.os import remove from bot import ( LOGGER, - QbInterval, - QbTorrents, - bot_loop, - config_dict, - xnox_client, - download_dict, + intervals, qb_listener_lock, - download_dict_lock, -) -from bot.helper.ext_utils.bot_utils import ( - new_task, - sync_to_async, - get_task_by_gid, - get_readable_time, + qb_torrents, + task_dict, + task_dict_lock, + xnox_client, ) +from bot.core.config_manager import Config +from bot.helper.ext_utils.bot_utils import new_task, sync_to_async from bot.helper.ext_utils.files_utils import clean_unwanted -from bot.helper.ext_utils.task_manager import limit_checker, stop_duplicate_check -from bot.helper.telegram_helper.message_utils import update_all_messages +from bot.helper.ext_utils.status_utils import get_readable_time, get_task_by_gid +from bot.helper.ext_utils.task_manager import stop_duplicate_check from bot.helper.mirror_leech_utils.status_utils.qbit_status import QbittorrentStatus +from bot.helper.telegram_helper.message_utils import update_status_message -async def __remove_torrent(hash_, tag): +async def _remove_torrent(hash_, tag): await sync_to_async( - xnox_client.torrents_delete, torrent_hashes=hash_, delete_files=True + xnox_client.torrents_delete, + torrent_hashes=hash_, + delete_files=True, ) async with qb_listener_lock: - if tag in QbTorrents: - del QbTorrents[tag] + if tag in qb_torrents: + del qb_torrents[tag] await sync_to_async(xnox_client.torrents_delete_tags, tags=tag) @new_task -async def __on_download_error(err, tor, button=None): +async def _on_download_error(err, tor, button=None): LOGGER.info(f"Cancelling Download: {tor.name}") ext_hash = tor.hash - download = await get_task_by_gid(ext_hash[:8]) - listener = download.listener() - await listener.onDownloadError(err, button) - await sync_to_async(xnox_client.torrents_pause, torrent_hashes=ext_hash) + if task := await get_task_by_gid(ext_hash[:12]): + await task.listener.on_download_error(err, button) + await sync_to_async(xnox_client.torrents_stop, torrent_hashes=ext_hash) await sleep(0.3) - await __remove_torrent(ext_hash, tor.tags) + await _remove_torrent(ext_hash, tor.tags) @new_task -async def __on_seed_finish(tor): +async def _on_seed_finish(tor): ext_hash = tor.hash LOGGER.info(f"Cancelling Seed: {tor.name}") - download = await get_task_by_gid(ext_hash[:8]) - if not hasattr(download, "seeders_num"): - return - listener = download.listener() - msg = f"Seeding stopped with Ratio: {round(tor.ratio, 3)} and Time: {get_readable_time(tor.seeding_time, True)}" - await listener.onUploadError(msg) - await __remove_torrent(ext_hash, tor.tags) + if task := await get_task_by_gid(ext_hash[:12]): + msg = f"Seeding stopped with Ratio: {round(tor.ratio, 3)} and Time: {get_readable_time(tor.seeding_time)}" + await task.listener.on_upload_error(msg) + await _remove_torrent(ext_hash, tor.tags) @new_task -async def __stop_duplicate(tor): - download = await get_task_by_gid(tor.hash[:8]) - if not hasattr(download, "listener"): - return - listener = download.listener() - name = tor.content_path.rsplit("/", 1)[-1].rsplit(".!qB", 1)[0] - msg, button = await stop_duplicate_check(name, listener) - if msg: - __on_download_error(msg, tor, button) +async def _stop_duplicate(tor): + if ( + task := await get_task_by_gid(tor.hash[:12]) + ) and task.listener.stop_duplicate: + task.listener.name = tor.content_path.rsplit("/", 1)[-1].rsplit(".!qB", 1)[0] + msg, button = await stop_duplicate_check(task.listener) + if msg: + _on_download_error(msg, tor, button) @new_task -async def __size_checked(tor): - download = await get_task_by_gid(tor.hash[:8]) - if hasattr(download, "listener"): - listener = download.listener() - size = tor.size - if limit_exceeded := await limit_checker(size, listener, True): - await __on_download_error(limit_exceeded, tor) - - -@new_task -async def __on_download_complete(tor): +async def _on_download_complete(tor): ext_hash = tor.hash tag = tor.tags - await sleep(2) - download = await get_task_by_gid(ext_hash[:8]) - listener = download.listener() - if not listener.seed: - await sync_to_async(xnox_client.torrents_pause, torrent_hashes=ext_hash) - if listener.select: - await clean_unwanted(listener.dir) - await listener.on_download_complete() - if listener.seed: - async with download_dict_lock: - if listener.uid in download_dict: - removed = False - download_dict[listener.uid] = QbittorrentStatus(listener, True) - else: - removed = True - if removed: - await __remove_torrent(ext_hash, tag) + if task := await get_task_by_gid(ext_hash[:12]): + if not task.listener.seed: + await sync_to_async( + xnox_client.torrents_stop, + torrent_hashes=ext_hash, + ) + if task.listener.select: + await clean_unwanted(task.listener.dir) + path = tor.content_path.rsplit("/", 1)[0] + res = await sync_to_async( + xnox_client.torrents_files, + torrent_hash=ext_hash, + ) + for f in res: + if f.priority == 0 and await aiopath.exists(f"{path}/{f.name}"): + with contextlib.suppress(Exception): + await remove(f"{path}/{f.name}") + await task.listener.on_download_complete() + if intervals["stopAll"]: return - async with qb_listener_lock: - if tag in QbTorrents: - QbTorrents[tag]["seeding"] = True - else: + if task.listener.seed and not task.listener.is_cancelled: + async with task_dict_lock: + if task.listener.mid in task_dict: + removed = False + task_dict[task.listener.mid] = QbittorrentStatus( + task.listener, + True, + ) + else: + removed = True + if removed: + await _remove_torrent(ext_hash, tag) return - await update_all_messages() - LOGGER.info(f"Seeding started: {tor.name} - Hash: {ext_hash}") + async with qb_listener_lock: + if tag in qb_torrents: + qb_torrents[tag]["seeding"] = True + else: + return + await update_status_message(task.listener.message.chat.id) + LOGGER.info(f"Seeding started: {tor.name} - Hash: {ext_hash}") + else: + await _remove_torrent(ext_hash, tag) else: - await __remove_torrent(ext_hash, tag) + await _remove_torrent(ext_hash, tag) -async def __qb_listener(): +@new_task +async def _qb_listener(): while True: async with qb_listener_lock: try: - if len(await sync_to_async(xnox_client.torrents_info)) == 0: - QbInterval.clear() + torrents = await sync_to_async(xnox_client.torrents_info) + if len(torrents) == 0: + intervals["qb"] = "" break - for tor_info in await sync_to_async(xnox_client.torrents_info): + for tor_info in torrents: tag = tor_info.tags - if tag not in QbTorrents: + if tag not in qb_torrents: continue state = tor_info.state if state == "metaDL": - TORRENT_TIMEOUT = config_dict["TORRENT_TIMEOUT"] - QbTorrents[tag]["stalled_time"] = time() + qb_torrents[tag]["stalled_time"] = time() if ( - TORRENT_TIMEOUT - and time() - tor_info.added_on >= TORRENT_TIMEOUT + Config.TORRENT_TIMEOUT + and time() - qb_torrents[tag]["start_time"] + >= Config.TORRENT_TIMEOUT ): - __on_download_error("Dead Torrent!", tor_info) + await _on_download_error("Dead Torrent!", tor_info) else: await sync_to_async( xnox_client.torrents_reannounce, torrent_hashes=tor_info.hash, ) elif state == "downloading": - QbTorrents[tag]["stalled_time"] = time() - if ( - config_dict["STOP_DUPLICATE"] - and not QbTorrents[tag]["stop_dup_check"] - ): - QbTorrents[tag]["stop_dup_check"] = True - __stop_duplicate(tor_info) - if not QbTorrents[tag]["size_checked"]: - QbTorrents[tag]["size_checked"] = True - __size_checked(tor_info) + qb_torrents[tag]["stalled_time"] = time() + if not qb_torrents[tag]["stop_dup_check"]: + qb_torrents[tag]["stop_dup_check"] = True + await _stop_duplicate(tor_info) elif state == "stalledDL": - TORRENT_TIMEOUT = config_dict["TORRENT_TIMEOUT"] if ( - not QbTorrents[tag]["rechecked"] + not qb_torrents[tag]["rechecked"] and 0.99989999999999999 < tor_info.progress < 1 ): msg = f"Force recheck - Name: {tor_info.name} Hash: " @@ -164,13 +162,13 @@ async def __qb_listener(): xnox_client.torrents_recheck, torrent_hashes=tor_info.hash, ) - QbTorrents[tag]["rechecked"] = True + qb_torrents[tag]["rechecked"] = True elif ( - TORRENT_TIMEOUT - and time() - QbTorrents[tag]["stalled_time"] - >= TORRENT_TIMEOUT + Config.TORRENT_TIMEOUT + and time() - qb_torrents[tag]["stalled_time"] + >= Config.TORRENT_TIMEOUT ): - __on_download_error("Dead Torrent!", tor_info) + await _on_download_error("Dead Torrent!", tor_info) else: await sync_to_async( xnox_client.torrents_reannounce, @@ -182,23 +180,25 @@ async def __qb_listener(): torrent_hashes=tor_info.hash, ) elif state == "error": - __on_download_error( - "No enough space for this torrent on device", tor_info + await _on_download_error( + "No enough space for this torrent on device", + tor_info, ) elif ( - tor_info.completion_on != 0 - and not QbTorrents[tag]["uploaded"] + tor_info.completion_on != -1 + and not qb_torrents[tag]["uploaded"] and state not in ["checkingUP", "checkingDL", "checkingResumeData"] ): - QbTorrents[tag]["uploaded"] = True - __on_download_complete(tor_info) + qb_torrents[tag]["uploaded"] = True + await _on_download_complete(tor_info) elif ( - state in ["pausedUP", "pausedDL"] - and QbTorrents[tag]["seeding"] + state in ["stoppedUP", "stoppedDL"] + and qb_torrents[tag]["seeding"] ): - QbTorrents[tag]["seeding"] = False - __on_seed_finish(tor_info) + qb_torrents[tag]["seeding"] = False + await _on_seed_finish(tor_info) + await sleep(0.5) except Exception as e: LOGGER.error(str(e)) await sleep(3) @@ -206,14 +206,13 @@ async def __qb_listener(): async def on_download_start(tag): async with qb_listener_lock: - QbTorrents[tag] = { + qb_torrents[tag] = { + "start_time": time(), "stalled_time": time(), "stop_dup_check": False, "rechecked": False, "uploaded": False, "seeding": False, - "size_checked": False, } - if not QbInterval: - periodic = bot_loop.create_task(__qb_listener()) - QbInterval.append(periodic) + if not intervals["qb"]: + intervals["qb"] = await _qb_listener() diff --git a/bot/helper/listeners/task_listener.py b/bot/helper/listeners/task_listener.py new file mode 100644 index 000000000..92f809f71 --- /dev/null +++ b/bot/helper/listeners/task_listener.py @@ -0,0 +1,558 @@ +from asyncio import create_task, gather, sleep +from html import escape + +from aiofiles.os import listdir, makedirs, remove +from aiofiles.os import path as aiopath +from aioshutil import move +from requests import utils as rutils + +from bot import ( + LOGGER, + aria2, + intervals, + non_queued_dl, + non_queued_up, + queue_dict_lock, + queued_dl, + queued_up, + same_directory_lock, + task_dict, + task_dict_lock, +) +from bot.core.config_manager import Config +from bot.helper.common import TaskConfig +from bot.helper.ext_utils.bot_utils import sync_to_async +from bot.helper.ext_utils.db_handler import database +from bot.helper.ext_utils.files_utils import ( + clean_download, + clean_target, + create_recursive_symlink, + get_path_size, + join_files, +) +from bot.helper.ext_utils.links_utils import is_gdrive_id +from bot.helper.ext_utils.status_utils import get_readable_file_size +from bot.helper.ext_utils.task_manager import check_running_tasks, start_from_queued +from bot.helper.mirror_leech_utils.gdrive_utils.upload import GoogleDriveUpload +from bot.helper.mirror_leech_utils.rclone_utils.transfer import RcloneTransferHelper +from bot.helper.mirror_leech_utils.status_utils.gdrive_status import ( + GoogleDriveStatus, +) +from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus +from bot.helper.mirror_leech_utils.status_utils.rclone_status import RcloneStatus +from bot.helper.mirror_leech_utils.status_utils.telegram_status import TelegramStatus +from bot.helper.mirror_leech_utils.telegram_uploader import TelegramUploader +from bot.helper.telegram_helper.button_build import ButtonMaker +from bot.helper.telegram_helper.message_utils import ( + delete_status, + five_minute_del, + send_message, + update_status_message, +) + + +class TaskListener(TaskConfig): + def __init__(self): + super().__init__() + + async def clean(self): + try: + if st := intervals["status"]: + for intvl in list(st.values()): + intvl.cancel() + intervals["status"].clear() + await gather(sync_to_async(aria2.purge), delete_status()) + except Exception: + pass + + async def remove_from_same_dir(self): + async with task_dict_lock: + if ( + self.folder_name + and self.same_dir + and self.mid in self.same_dir[self.folder_name]["tasks"] + ): + self.same_dir[self.folder_name]["tasks"].remove(self.mid) + self.same_dir[self.folder_name]["total"] -= 1 + + async def on_download_start(self): + if ( + self.is_super_chat + and Config.INCOMPLETE_TASK_NOTIFIER + and Config.DATABASE_URL + ): + await database.add_incomplete_task( + self.message.chat.id, + self.message.link, + self.tag, + ) + + async def on_download_complete(self): + await sleep(2) + multi_links = False + if ( + self.folder_name + and self.same_dir + and self.mid in self.same_dir[self.folder_name]["tasks"] + ): + async with same_directory_lock: + while True: + async with task_dict_lock: + if self.mid not in self.same_dir[self.folder_name]["tasks"]: + return + if self.mid in self.same_dir[self.folder_name]["tasks"] and ( + self.same_dir[self.folder_name]["total"] <= 1 + or len(self.same_dir[self.folder_name]["tasks"]) > 1 + ): + if self.same_dir[self.folder_name]["total"] > 1: + self.same_dir[self.folder_name]["tasks"].remove( + self.mid, + ) + self.same_dir[self.folder_name]["total"] -= 1 + spath = f"{self.dir}{self.folder_name}" + des_id = next( + iter(self.same_dir[self.folder_name]["tasks"]), + ) + des_path = f"{Config.DOWNLOAD_DIR}{des_id}{self.folder_name}" + await makedirs(des_path, exist_ok=True) + LOGGER.info( + f"Moving files from {self.mid} to {des_id}", + ) + for item in await listdir(spath): + if item.endswith((".aria2", ".!qB")): + continue + item_path = ( + f"{self.dir}{self.folder_name}/{item}" + ) + if item in await listdir(des_path): + await move( + item_path, + f"{des_path}/{self.mid}-{item}", + ) + else: + await move(item_path, f"{des_path}/{item}") + multi_links = True + break + await sleep(1) + async with task_dict_lock: + download = task_dict[self.mid] + self.name = download.name() + gid = download.gid() + LOGGER.info(f"Download completed: {self.name}") + + if not (self.is_torrent or self.is_qbit): + self.seed = False + + if multi_links: + self.seed = False + await self.on_upload_error( + f"{self.name} Downloaded!\n\nWaiting for other tasks to finish...", + ) + return + + if self.folder_name: + self.name = self.folder_name.strip("/") + + if not await aiopath.exists(f"{self.dir}/{self.name}"): + try: + files = await listdir(self.dir) + self.name = files[-1] + if self.name == "yt-dlp-thumb": + self.name = files[0] + except Exception as e: + await self.on_upload_error(str(e)) + return + + dl_path = f"{self.dir}/{self.name}" + self.size = await get_path_size(dl_path) + self.is_file = await aiopath.isfile(dl_path) + if self.seed: + self.up_dir = f"{self.dir}10000" + up_path = f"{self.up_dir}/{self.name}" + await create_recursive_symlink(self.dir, self.up_dir) + LOGGER.info(f"Shortcut created: {dl_path} -> {up_path}") + else: + up_path = dl_path + if not Config.QUEUE_ALL: + async with queue_dict_lock: + if self.mid in non_queued_dl: + non_queued_dl.remove(self.mid) + await start_from_queued() + + if self.join and not self.is_file: + await join_files(up_path) + + if self.extract: + up_path = await self.proceed_extract(up_path, gid) + if self.is_cancelled: + return + self.is_file = await aiopath.isfile(up_path) + up_dir, self.name = up_path.rsplit("/", 1) + self.size = await get_path_size(up_dir) + self.subname = "" + self.subsize = 0 + self.files_to_proceed = [] + self.proceed_count = 0 + self.progress = True + + if self.watermark: + up_path = await self.proceed_watermark( + up_path, + gid, + ) + if self.is_cancelled: + return + self.is_file = await aiopath.isfile(up_path) + up_dir, self.name = up_path.rsplit("/", 1) + self.size = await get_path_size(up_dir) + self.subname = "" + self.subsize = 0 + self.files_to_proceed = [] + self.proceed_count = 0 + self.progress = True + + if self.metadata: + up_path = await self.proceed_metadata( + up_path, + gid, + ) + if self.is_cancelled: + return + self.is_file = await aiopath.isfile(up_path) + up_dir, self.name = up_path.rsplit("/", 1) + self.size = await get_path_size(up_dir) + self.subname = "" + self.subsize = 0 + self.files_to_proceed = [] + self.proceed_count = 0 + self.progress = True + + if self.ffmpeg_cmds: + up_path = await self.proceed_ffmpeg( + up_path, + gid, + ) + if self.is_cancelled: + return + self.is_file = await aiopath.isfile(up_path) + up_dir, self.name = up_path.rsplit("/", 1) + self.size = await get_path_size(up_dir) + self.subname = "" + self.subsize = 0 + self.files_to_proceed = [] + self.proceed_count = 0 + self.progress = True + + if self.name_sub: + up_path = await self.substitute(up_path) + if self.is_cancelled: + return + self.is_file = await aiopath.isfile(up_path) + self.name = up_path.rsplit("/", 1)[1] + + if self.screen_shots: + up_path = await self.generate_screenshots(up_path) + if self.is_cancelled: + return + self.is_file = await aiopath.isfile(up_path) + up_dir, self.name = up_path.rsplit("/", 1) + self.size = await get_path_size(up_dir) + + if self.convert_audio or self.convert_video: + up_path = await self.convert_media( + up_path, + gid, + ) + if self.is_cancelled: + return + self.is_file = await aiopath.isfile(up_path) + up_dir, self.name = up_path.rsplit("/", 1) + self.size = await get_path_size(up_dir) + self.subname = "" + self.subsize = 0 + self.files_to_proceed = [] + self.proceed_count = 0 + self.progress = True + + if self.sample_video: + up_path = await self.generate_sample_video( + up_path, + gid, + ) + if self.is_cancelled: + return + self.is_file = await aiopath.isfile(up_path) + up_dir, self.name = up_path.rsplit("/", 1) + self.size = await get_path_size(up_dir) + self.subname = "" + self.subsize = 0 + self.files_to_proceed = [] + self.proceed_count = 0 + self.progress = True + + if self.compress: + up_path = await self.proceed_compress( + up_path, + gid, + ) + self.is_file = await aiopath.isfile(up_path) + if self.is_cancelled: + return + self.subname = "" + self.subsize = 0 + self.files_to_proceed = [] + self.proceed_count = 0 + self.progress = True + + up_dir, self.name = up_path.rsplit("/", 1) + self.size = await get_path_size(up_dir) + + if self.is_leech and not self.compress: + await self.proceed_split( + up_path, + gid, + ) + if self.is_cancelled: + return + self.subname = "" + self.subsize = 0 + self.files_to_proceed = [] + self.proceed_count = 0 + self.progress = True + + add_to_queue, event = await check_running_tasks(self, "up") + await start_from_queued() + if add_to_queue: + LOGGER.info(f"Added to Queue/Upload: {self.name}") + async with task_dict_lock: + task_dict[self.mid] = QueueStatus(self, gid, "Up") + await event.wait() + if self.is_cancelled: + return + LOGGER.info(f"Start from Queued/Upload: {self.name}") + + self.size = await get_path_size(up_dir) + + if self.is_leech: + LOGGER.info(f"Leech Name: {self.name}") + tg = TelegramUploader(self, up_dir) + async with task_dict_lock: + task_dict[self.mid] = TelegramStatus(self, tg, gid, "up") + await gather( + update_status_message(self.message.chat.id), + tg.upload(), + ) + elif is_gdrive_id(self.up_dest): + LOGGER.info(f"Gdrive Upload Name: {self.name}") + drive = GoogleDriveUpload(self, up_path) + async with task_dict_lock: + task_dict[self.mid] = GoogleDriveStatus(self, drive, gid, "up") + await gather( + update_status_message(self.message.chat.id), + sync_to_async(drive.upload), + ) + else: + LOGGER.info(f"Rclone Upload Name: {self.name}") + RCTransfer = RcloneTransferHelper(self) + async with task_dict_lock: + task_dict[self.mid] = RcloneStatus(self, RCTransfer, gid, "up") + await gather( + update_status_message(self.message.chat.id), + RCTransfer.upload(up_path), + ) + + async def on_upload_complete( + self, + link, + files, + folders, + mime_type, + rclone_path="", + dir_id="", + ): + if ( + self.is_super_chat + and Config.INCOMPLETE_TASK_NOTIFIER + and Config.DATABASE_URL + ): + await database.rm_complete_task(self.message.link) + msg = f"Name: {escape(self.name)}\n\nSize: {get_readable_file_size(self.size)}" + done_msg = f"{self.tag}\nYour task is complete\nPlease check your inbox." + LOGGER.info(f"Task Done: {self.name}") + if self.is_leech: + msg += f"\nTotal Files: {folders}" + if mime_type != 0: + msg += f"\nCorrupted Files: {mime_type}" + msg += f"\ncc: {self.tag}\n\n" + if not files: + await send_message(self.message, msg) + else: + fmsg = "" + for index, (url, name) in enumerate(files.items(), start=1): + fmsg += f"{index}. {name}\n" + if len(fmsg.encode() + msg.encode()) > 4000: + await send_message( + self.user_id, + f"{msg}
{fmsg}
", + ) + if Config.LOG_CHAT_ID: + await send_message( + Config.LOG_CHAT_ID, + f"{msg}
{fmsg}
", + ) + await sleep(1) + fmsg = "" + if fmsg != "": + await send_message( + self.user_id, + f"{msg}
{fmsg}
", + ) + if Config.LOG_CHAT_ID: + await send_message( + Config.LOG_CHAT_ID, + f"{msg}
{fmsg}
", + ) + await send_message(self.message, done_msg) + else: + msg += f"\n\nType: {mime_type}" + if mime_type == "Folder": + msg += f"\nSubFolders: {folders}" + msg += f"\nFiles: {files}" + if link or ( + rclone_path and Config.RCLONE_SERVE_URL and not self.private_link + ): + buttons = ButtonMaker() + if link: + buttons.url_button("☁️ Cloud Link", link) + else: + msg += f"\n\nPath: {rclone_path}" + if rclone_path and Config.RCLONE_SERVE_URL and not self.private_link: + remote, rpath = rclone_path.split(":", 1) + url_path = rutils.quote(f"{rpath}") + share_url = f"{Config.RCLONE_SERVE_URL}/{remote}/{url_path}" + if mime_type == "Folder": + share_url += "/" + buttons.url_button("🔗 Rclone Link", share_url) + if not rclone_path and dir_id: + INDEX_URL = "" + if self.private_link: + INDEX_URL = self.user_dict.get("index_url", "") or "" + elif Config.INDEX_URL: + INDEX_URL = Config.INDEX_URL + if INDEX_URL: + share_url = f"{INDEX_URL}findpath?id={dir_id}" + buttons.url_button("⚡ Index Link", share_url) + if mime_type.startswith(("image", "video", "audio")): + share_urls = f"{INDEX_URL}findpath?id={dir_id}&view=true" + buttons.url_button("🌐 View Link", share_urls) + button = buttons.build_menu(2) + else: + msg += f"\n\nPath: {rclone_path}" + button = None + msg += f"\n\ncc: {self.tag}" + await send_message(self.user_id, msg, button) + if Config.LOG_CHAT_ID: + await send_message(Config.LOG_CHAT_ID, msg, button) + await send_message(self.message, done_msg) + if self.seed: + await clean_target(self.up_dir) + async with queue_dict_lock: + if self.mid in non_queued_up: + non_queued_up.remove(self.mid) + await start_from_queued() + return + await clean_download(self.dir) + async with task_dict_lock: + if self.mid in task_dict: + del task_dict[self.mid] + count = len(task_dict) + if count == 0: + await self.clean() + else: + await update_status_message(self.message.chat.id) + + async with queue_dict_lock: + if self.mid in non_queued_up: + non_queued_up.remove(self.mid) + + await start_from_queued() + + async def on_download_error(self, error, button=None): + async with task_dict_lock: + if self.mid in task_dict: + del task_dict[self.mid] + count = len(task_dict) + await self.remove_from_same_dir() + msg = f"{self.tag} Download: {escape(str(error))}" + x = await send_message(self.message, msg, button) + create_task(five_minute_del(x)) # noqa: RUF006 + if count == 0: + await self.clean() + else: + await update_status_message(self.message.chat.id) + + if ( + self.is_super_chat + and Config.INCOMPLETE_TASK_NOTIFIER + and Config.DATABASE_URL + ): + await database.rm_complete_task(self.message.link) + + async with queue_dict_lock: + if self.mid in queued_dl: + queued_dl[self.mid].set() + del queued_dl[self.mid] + if self.mid in queued_up: + queued_up[self.mid].set() + del queued_up[self.mid] + if self.mid in non_queued_dl: + non_queued_dl.remove(self.mid) + if self.mid in non_queued_up: + non_queued_up.remove(self.mid) + + await start_from_queued() + await sleep(3) + await clean_download(self.dir) + if self.up_dir: + await clean_download(self.up_dir) + if self.thumb and await aiopath.exists(self.thumb): + await remove(self.thumb) + + async def on_upload_error(self, error): + async with task_dict_lock: + if self.mid in task_dict: + del task_dict[self.mid] + count = len(task_dict) + x = await send_message(self.message, f"{self.tag} {escape(str(error))}") + create_task(five_minute_del(x)) # noqa: RUF006 + if count == 0: + await self.clean() + else: + await update_status_message(self.message.chat.id) + + if ( + self.is_super_chat + and Config.INCOMPLETE_TASK_NOTIFIER + and Config.DATABASE_URL + ): + await database.rm_complete_task(self.message.link) + + async with queue_dict_lock: + if self.mid in queued_dl: + queued_dl[self.mid].set() + del queued_dl[self.mid] + if self.mid in queued_up: + queued_up[self.mid].set() + del queued_up[self.mid] + if self.mid in non_queued_dl: + non_queued_dl.remove(self.mid) + if self.mid in non_queued_up: + non_queued_up.remove(self.mid) + + await start_from_queued() + await sleep(3) + await clean_download(self.dir) + if self.up_dir: + await clean_download(self.up_dir) + if self.thumb and await aiopath.exists(self.thumb): + await remove(self.thumb) diff --git a/bot/helper/listeners/tasks_listener.py b/bot/helper/listeners/tasks_listener.py deleted file mode 100644 index 7ceadfbcb..000000000 --- a/bot/helper/listeners/tasks_listener.py +++ /dev/null @@ -1,677 +0,0 @@ -from os import path as ospath -from os import walk -from html import escape -from time import time -from asyncio import Event, sleep, create_subprocess_exec - -from requests import utils as rutils -from aioshutil import move -from aiofiles.os import path as aiopath -from aiofiles.os import remove as aioremove -from aiofiles.os import listdir, makedirs -from pyrogram.enums import ChatType - -from bot import ( - LOGGER, - MAX_SPLIT_SIZE, - GLOBAL_EXTENSION_FILTER, - Interval, - aria2, - queued_dl, - queued_up, - config_dict, - download_dict, - non_queued_dl, - non_queued_up, - queue_dict_lock, - download_dict_lock, - status_reply_dict_lock, -) -from bot.helper.ext_utils.bot_utils import ( - extra_btns, - sync_to_async, - get_readable_time, - get_readable_file_size, -) -from bot.helper.ext_utils.exceptions import ExtractionArchiveError -from bot.helper.ext_utils.files_utils import ( - is_archive, - join_files, - split_file, - clean_target, - process_file, - get_base_name, - get_path_size, - clean_download, - is_archive_split, - is_first_archive_split, -) -from bot.helper.ext_utils.task_manager import start_from_queued -from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.telegram_helper.message_utils import ( - delete_links, - edit_message, - send_message, - sendCustomMsg, - delete_message, - five_minute_del, - sendMultiMessage, - delete_all_messages, - update_all_messages, -) -from bot.helper.mirror_leech_utils.rclone_utils.transfer import RcloneTransferHelper -from bot.helper.mirror_leech_utils.status_utils.zip_status import ZipStatus -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper -from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus -from bot.helper.mirror_leech_utils.status_utils.split_status import SplitStatus -from bot.helper.mirror_leech_utils.status_utils.gdrive_status import GdriveStatus -from bot.helper.mirror_leech_utils.status_utils.rclone_status import RcloneStatus -from bot.helper.mirror_leech_utils.status_utils.extract_status import ExtractStatus -from bot.helper.mirror_leech_utils.upload_utils.telegramEngine import TgUploader -from bot.helper.mirror_leech_utils.status_utils.telegram_status import TelegramStatus - - -class MirrorLeechListener: - def __init__( - self, - message, - compress=False, - extract=False, - is_qbit=False, - is_leech=False, - tag=None, - select=False, - seed=False, - same_dir=None, - rc_flags=None, - upPath=None, - is_clone=False, - join=False, - is_ytdlp=False, - drive_id=None, - index_link=None, - attachment=None, - files_utils={}, - ): - if same_dir is None: - same_dir = {} - self.message = message - self.uid = message.id - self.extract = extract - self.compress = compress - self.is_qbit = is_qbit - self.is_leech = is_leech - self.is_clone = is_clone - self.is_ytdlp = is_ytdlp - self.tag = tag - self.seed = seed - self.newDir = "" - self.dir = f"/usr/src/app/downloads/{self.uid}" - self.select = select - self.isSuperGroup = message.chat.type in [ - ChatType.SUPERGROUP, - ChatType.CHANNEL, - ] - self.isPrivate = message.chat.type == ChatType.BOT - self.suproc = None - self.same_dir = same_dir - self.rc_flags = rc_flags - self.upPath = upPath - self.join = join - self.linkslogmsg = None - self.botpmmsg = None - self.drive_id = drive_id - self.index_link = index_link - self.files_utils = files_utils - self.attachment = attachment - - async def clean(self): - try: - async with status_reply_dict_lock: - if Interval: - Interval[0].cancel() - Interval.clear() - await sync_to_async(aria2.purge) - await delete_all_messages() - except Exception: - pass - - async def on_download_start(self): - if config_dict["LEECH_LOG_ID"]: - msg = "Task Started\n\n" - msg += f"• Task by: {self.tag}\n" - msg += f"• User ID: {self.message.from_user.id}" - self.linkslogmsg = await sendCustomMsg(config_dict["LEECH_LOG_ID"], msg) - self.botpmmsg = await sendCustomMsg( - self.message.from_user.id, "Task started" - ) - - async def on_download_complete(self): - multi_links = False - while True: - if self.same_dir: - if ( - self.same_dir["total"] in [1, 0] - or self.same_dir["total"] > 1 - and len(self.same_dir["tasks"]) > 1 - ): - break - else: - break - await sleep(0.2) - async with download_dict_lock: - if self.same_dir and self.same_dir["total"] > 1: - self.same_dir["tasks"].remove(self.uid) - self.same_dir["total"] -= 1 - folder_name = self.same_dir["name"] - spath = f"{self.dir}/{folder_name}" - des_path = f"/usr/src/app/downloads/{next(iter(self.same_dir['tasks']))}/{folder_name}" - await makedirs(des_path, exist_ok=True) - for item in await listdir(spath): - if item.endswith((".aria2", ".!qB")): - continue - item_path = f"{self.dir}/{folder_name}/{item}" - if item in await listdir(des_path): - await move(item_path, f"{des_path}/{self.uid}-{item}") - else: - await move(item_path, f"{des_path}/{item}") - multi_links = True - download = download_dict[self.uid] - name = str(download.name()).replace("/", "") - gid = download.gid() - LOGGER.info(f"Download completed: {name}") - if multi_links: - await self.onUploadError( - "Downloaded! Starting other part of the Task..." - ) - return - if ( - name == "None" - or self.is_qbit - or not await aiopath.exists(f"{self.dir}/{name}") - ): - try: - files = await listdir(self.dir) - except Exception as e: - await self.onUploadError(str(e)) - return - name = files[-1] - if name == "yt-dlp-thumb": - name = files[0] - - dl_path = f"{self.dir}/{name}" - up_path = "" - size = await get_path_size(dl_path) - async with queue_dict_lock: - if self.uid in non_queued_dl: - non_queued_dl.remove(self.uid) - await start_from_queued() - - if self.join and await aiopath.isdir(dl_path): - await join_files(dl_path) - - if self.extract: - pswd = self.extract if isinstance(self.extract, str) else "" - try: - if await aiopath.isfile(dl_path): - up_path = get_base_name(dl_path) - LOGGER.info(f"Extracting: {name}") - async with download_dict_lock: - download_dict[self.uid] = ExtractStatus(name, size, gid, self) - if await aiopath.isdir(dl_path): - if self.seed: - self.newDir = f"{self.dir}10000" - up_path = f"{self.newDir}/{name}" - else: - up_path = dl_path - for dirpath, _, files in await sync_to_async( - walk, dl_path, topdown=False - ): - for file_ in files: - if ( - is_first_archive_split(file_) - or is_archive(file_) - and not file_.endswith(".rar") - ): - f_path = ospath.join(dirpath, file_) - t_path = ( - dirpath.replace(self.dir, self.newDir) - if self.seed - else dirpath - ) - cmd = [ - "7z", - "x", - f"-p{pswd}", - f_path, - f"-o{t_path}", - "-aot", - "-xr!@PaxHeader", - ] - if not pswd: - del cmd[2] - if ( - self.suproc == "cancelled" - or self.suproc is not None - and self.suproc.returncode == -9 - ): - return - self.suproc = await create_subprocess_exec(*cmd) - code = await self.suproc.wait() - if code == -9: - return - if code != 0: - LOGGER.error("Unable to extract archive splits!") - if ( - not self.seed - and self.suproc is not None - and self.suproc.returncode == 0 - ): - for file_ in files: - if is_archive_split(file_) or is_archive(file_): - del_path = ospath.join(dirpath, file_) - try: - await aioremove(del_path) - except Exception: - return - else: - if self.seed: - self.newDir = f"{self.dir}10000" - up_path = up_path.replace(self.dir, self.newDir) - cmd = [ - "7z", - "x", - f"-p{pswd}", - dl_path, - f"-o{up_path}", - "-aot", - "-xr!@PaxHeader", - ] - if not pswd: - del cmd[2] - if self.suproc == "cancelled": - return - self.suproc = await create_subprocess_exec(*cmd) - code = await self.suproc.wait() - if code == -9: - return - if code == 0: - LOGGER.info(f"Extracted Path: {up_path}") - if not self.seed: - try: - await aioremove(dl_path) - except Exception: - return - else: - LOGGER.error("Unable to extract archive! Uploading anyway") - self.newDir = "" - up_path = dl_path - except ExtractionArchiveError: - LOGGER.info("Not any valid archive, uploading file as it is.") - self.newDir = "" - up_path = dl_path - - if self.compress: - pswd = self.compress if isinstance(self.compress, str) else "" - if up_path: - dl_path = up_path - up_path = f"{up_path}.zip" - elif self.seed and self.is_leech: - self.newDir = f"{self.dir}10000" - up_path = f"{self.newDir}/{name}.zip" - else: - up_path = f"{dl_path}.zip" - async with download_dict_lock: - download_dict[self.uid] = ZipStatus(name, size, gid, self) - LEECH_SPLIT_SIZE = MAX_SPLIT_SIZE - cmd = [ - "7z", - f"-v{LEECH_SPLIT_SIZE}b", - "a", - "-mx=0", - f"-p{pswd}", - up_path, - dl_path, - ] - for ext in GLOBAL_EXTENSION_FILTER: - ex_ext = f"-xr!*.{ext}" - cmd.append(ex_ext) - if self.is_leech and int(size) > LEECH_SPLIT_SIZE: - if not pswd: - del cmd[4] - LOGGER.info(f"Zip: orig_path: {dl_path}, zip_path: {up_path}.0*") - else: - del cmd[1] - if not pswd: - del cmd[3] - LOGGER.info(f"Zip: orig_path: {dl_path}, zip_path: {up_path}") - if self.suproc == "cancelled": - return - self.suproc = await create_subprocess_exec(*cmd) - code = await self.suproc.wait() - if code == -9: - return - if not self.seed: - await clean_target(dl_path) - - if not self.compress and not self.extract: - up_path = dl_path - - up_dir, up_name = up_path.rsplit("/", 1) - size = await get_path_size(up_dir) - if self.is_leech: - m_size = [] - o_files = [] - if not self.compress: - checked = False - LEECH_SPLIT_SIZE = MAX_SPLIT_SIZE - for dirpath, _, files in await sync_to_async( - walk, up_dir, topdown=False - ): - for file_ in files: - f_path = ospath.join(dirpath, file_) - f_size = await aiopath.getsize(f_path) - if f_size > LEECH_SPLIT_SIZE: - if not checked: - checked = True - async with download_dict_lock: - download_dict[self.uid] = SplitStatus( - up_name, size, gid, self - ) - LOGGER.info(f"Splitting: {up_name}") - res = await split_file( - f_path, - f_size, - file_, - dirpath, - LEECH_SPLIT_SIZE, - self, - ) - if not res: - return - if res == "errored": - if f_size <= MAX_SPLIT_SIZE: - continue - try: - await aioremove(f_path) - except Exception: - return - elif not self.seed or self.newDir: - try: - await aioremove(f_path) - except Exception: - return - else: - m_size.append(f_size) - o_files.append(file_) - - up_limit = config_dict["QUEUE_UPLOAD"] - all_limit = config_dict["QUEUE_ALL"] - added_to_queue = False - async with queue_dict_lock: - dl = len(non_queued_dl) - up = len(non_queued_up) - if ( - all_limit - and dl + up >= all_limit - and (not up_limit or up >= up_limit) - ) or (up_limit and up >= up_limit): - added_to_queue = True - LOGGER.info(f"Added to Queue/Upload: {name}") - event = Event() - queued_up[self.uid] = event - if added_to_queue: - async with download_dict_lock: - download_dict[self.uid] = QueueStatus(name, size, gid, self, "Up") - await event.wait() - async with download_dict_lock: - if self.uid not in download_dict: - return - LOGGER.info(f"Start from Queued/Upload: {name}") - async with queue_dict_lock: - non_queued_up.add(self.uid) - if self.is_leech: - size = await get_path_size(up_dir) - for s in m_size: - size = size - s - LOGGER.info(f"Leech Name: {up_name}") - tg = TgUploader(up_name, up_dir, self) - tg_upload_status = TelegramStatus(tg, size, self.message, gid, "up") - async with download_dict_lock: - download_dict[self.uid] = tg_upload_status - await update_all_messages() - await tg.upload(o_files, m_size, size) - elif self.upPath == "gd": - size = await get_path_size(up_path) - LOGGER.info(f"Upload Name: {up_name}") - drive = GoogleDriveHelper(up_name, up_dir, self) - upload_status = GdriveStatus(drive, size, self.message, gid, "up") - async with download_dict_lock: - download_dict[self.uid] = upload_status - await update_all_messages() - await sync_to_async(drive.upload, up_name, size, self.drive_id) - else: - size = await get_path_size(up_path) - LOGGER.info(f"Upload Name: {up_name} via RClone") - RCTransfer = RcloneTransferHelper(self, up_name) - async with download_dict_lock: - download_dict[self.uid] = RcloneStatus( - RCTransfer, self.message, gid, "up" - ) - await update_all_messages() - await RCTransfer.upload(up_path, size) - - async def onUploadComplete( - self, link, size, files, folders, mime_type, name, rclonePath="" - ): - user_id = self.message.from_user.id - name, _ = await process_file(name, user_id, is_mirror=not self.is_leech) - msg = f"{escape(name)}\n\n" - msg += f"
• Size: {get_readable_file_size(size)}\n" - msg += f"• Elapsed: {get_readable_time(time() - self.message.date.timestamp())}\n" - LOGGER.info(f"Task Done: {name}") - buttons = ButtonMaker() - inboxButton = ButtonMaker() - inboxButton.callback("View in inbox", f"aeon {user_id} private", "header") - inboxButton = extra_btns(inboxButton) - if self.is_leech: - if folders > 1: - msg += f"• Total files: {folders}\n" - if mime_type != 0: - msg += f"• Corrupted files: {mime_type}\n" - msg += f"• User ID: {self.message.from_user.id}\n" - msg += f"• By: {self.tag}
\n\n" - if not files: - if self.isPrivate: - msg += ( - "Files have not been sent for an unspecified reason" - ) - await send_message(self.message, msg) - else: - attachmsg = True - fmsg, totalmsg = "\n\n", "" - lmsg = "Files have been sent. Access them via the provided links." - for index, (dlink, name) in enumerate(files.items(), start=1): - fmsg += f"{index}. {name}\n" - totalmsg = (msg + lmsg + fmsg) if attachmsg else fmsg - if len(totalmsg.encode()) > 3900: - if self.linkslogmsg: - await edit_message(self.linkslogmsg, totalmsg) - await send_message(self.botpmmsg, totalmsg) - self.linkslogmsg = await send_message( - self.linkslogmsg, "Fetching Details..." - ) - attachmsg = False - await sleep(1) - fmsg = "\n\n" - if fmsg != "\n\n" and self.linkslogmsg: - await send_message(self.linkslogmsg, msg + lmsg + fmsg) - await delete_message(self.linkslogmsg) - await send_message(self.botpmmsg, msg + lmsg + fmsg) - await delete_message(self.botpmmsg) - if self.isSuperGroup: - await send_message( - self.message, - f"{msg}Files has been sent to your inbox", - inboxButton.column(1), - ) - else: - await delete_message(self.botpmmsg) - if self.seed: - if self.newDir: - await clean_target(self.newDir) - async with queue_dict_lock: - if self.uid in non_queued_up: - non_queued_up.remove(self.uid) - await start_from_queued() - return - else: - if mime_type == "Folder": - msg += f"• Total files: {files}\n" - if link: - buttons.url("Cloud link", link) - INDEX_URL = ( - self.index_link if self.drive_id else config_dict["INDEX_URL"] - ) - if not rclonePath and INDEX_URL: - url_path = rutils.quote(f"{name}") - share_url = f"{INDEX_URL}/{url_path}" - if mime_type == "Folder": - share_url += "/" - buttons.url("Index link", share_url) - buttons = extra_btns(buttons) - button = buttons.column(2) - elif rclonePath: - msg += f"• Path: {rclonePath}\n" - button = None - buttons = extra_btns(buttons) - button = buttons.column(2) - msg += f"• User ID: {self.message.from_user.id}\n" - msg += f"• By: {self.tag}
\n\n" - - if config_dict["MIRROR_LOG_ID"]: - await sendMultiMessage(config_dict["MIRROR_LOG_ID"], msg, button) - if self.linkslogmsg: - await delete_message(self.linkslogmsg) - await send_message(self.botpmmsg, msg, button, "Random") - await delete_message(self.botpmmsg) - if self.isSuperGroup: - await send_message( - self.message, - f"{msg} Links has been sent to your inbox", - inboxButton.column(1), - ) - else: - await delete_message(self.botpmmsg) - if self.seed: - if self.newDir: - await clean_target(self.newDir) - elif self.compress: - await clean_target(f"{self.dir}/{name}") - async with queue_dict_lock: - if self.uid in non_queued_up: - non_queued_up.remove(self.uid) - await start_from_queued() - return - - await clean_download(self.dir) - async with download_dict_lock: - if self.uid in download_dict: - del download_dict[self.uid] - count = len(download_dict) - if count == 0: - await self.clean() - else: - await update_all_messages() - - async with queue_dict_lock: - if self.uid in non_queued_up: - non_queued_up.remove(self.uid) - - await start_from_queued() - await delete_links(self.message) - - async def onDownloadError(self, error, button=None): - async with download_dict_lock: - if self.uid in download_dict: - del download_dict[self.uid] - count = len(download_dict) - if self.same_dir and self.uid in self.same_dir["tasks"]: - self.same_dir["tasks"].remove(self.uid) - self.same_dir["total"] -= 1 - msg = f"Hey, {self.tag}!\n" - msg += "Your download has been stopped!\n\n" - msg += f"
Reason: {escape(error)}\n" - msg += f"Elapsed: {get_readable_time(time() - self.message.date.timestamp())}
" - x = await send_message(self.message, msg, button) - await delete_links(self.message) - if self.botpmmsg: - await delete_message(self.botpmmsg) - if self.linkslogmsg: - await delete_message(self.linkslogmsg) - if count == 0: - await self.clean() - else: - await update_all_messages() - if self.isSuperGroup and self.botpmmsg: - await send_message(self.botpmmsg, msg, button) - await five_minute_del(x) - - async with queue_dict_lock: - if self.uid in queued_dl: - queued_dl[self.uid].set() - del queued_dl[self.uid] - if self.uid in queued_up: - queued_up[self.uid].set() - del queued_up[self.uid] - if self.uid in non_queued_dl: - non_queued_dl.remove(self.uid) - if self.uid in non_queued_up: - non_queued_up.remove(self.uid) - - await start_from_queued() - await sleep(3) - await clean_download(self.dir) - if self.newDir: - await clean_download(self.newDir) - - async def onUploadError(self, error): - async with download_dict_lock: - if self.uid in download_dict: - del download_dict[self.uid] - count = len(download_dict) - msg = f"Hey, {self.tag}!\n" - msg += "Your upload has been stopped!\n\n" - msg += f"
Reason: {escape(error)}\n" - msg += f"Elapsed: {get_readable_time(time() - self.message.date.timestamp())}
" - x = await send_message(self.message, msg) - if self.linkslogmsg: - await delete_message(self.linkslogmsg) - await delete_links(self.message) - if self.botpmmsg: - await delete_message(self.botpmmsg) - if count == 0: - await self.clean() - else: - await update_all_messages() - if self.isSuperGroup and self.botpmmsg: - await send_message(self.botpmmsg, msg) - await five_minute_del(x) - - async with queue_dict_lock: - if self.uid in queued_dl: - queued_dl[self.uid].set() - del queued_dl[self.uid] - if self.uid in queued_up: - queued_up[self.uid].set() - del queued_up[self.uid] - if self.uid in non_queued_dl: - non_queued_dl.remove(self.uid) - if self.uid in non_queued_up: - non_queued_up.remove(self.uid) - - await start_from_queued() - await sleep(3) - await clean_download(self.dir) - if self.newDir: - await clean_download(self.newDir) diff --git a/bot/helper/mirror_leech_utils/__init__.py b/bot/helper/mirror_leech_utils/__init__.py index 8b1378917..e69de29bb 100644 --- a/bot/helper/mirror_leech_utils/__init__.py +++ b/bot/helper/mirror_leech_utils/__init__.py @@ -1 +0,0 @@ - diff --git a/bot/helper/mirror_leech_utils/download_utils/aria2_download.py b/bot/helper/mirror_leech_utils/download_utils/aria2_download.py index b6236b559..01467de54 100644 --- a/bot/helper/mirror_leech_utils/download_utils/aria2_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/aria2_download.py @@ -1,97 +1,86 @@ from aiofiles.os import path as aiopath -from aiofiles.os import remove as aioremove +from aiofiles.os import remove -from bot import ( - LOGGER, - aria2, - config_dict, - aria2_options, - aria2c_global, - download_dict, - non_queued_dl, - queue_dict_lock, - download_dict_lock, -) -from bot.helper.ext_utils.bot_utils import sync_to_async, bt_selection_buttons -from bot.helper.ext_utils.task_manager import is_queued -from bot.helper.telegram_helper.message_utils import send_message, sendStatusMessage +from bot import LOGGER, aria2, task_dict, task_dict_lock +from bot.core.config_manager import Config +from bot.helper.ext_utils.bot_utils import bt_selection_buttons, sync_to_async +from bot.helper.ext_utils.task_manager import check_running_tasks from bot.helper.mirror_leech_utils.status_utils.aria2_status import Aria2Status +from bot.helper.telegram_helper.message_utils import ( + send_message, + send_status_message, +) -async def add_aria2c_download( - link, path, listener, filename, header, ratio, seed_time -): - a2c_opt = {**aria2_options} - [a2c_opt.pop(k) for k in aria2c_global if k in aria2_options] - a2c_opt["dir"] = path - if filename: - a2c_opt["out"] = filename +async def add_aria2c_download(listener, dpath, header, ratio, seed_time): + a2c_opt = {"dir": dpath} + if listener.name: + a2c_opt["out"] = listener.name if header: a2c_opt["header"] = header if ratio: a2c_opt["seed-ratio"] = ratio if seed_time: a2c_opt["seed-time"] = seed_time - if TORRENT_TIMEOUT := config_dict["TORRENT_TIMEOUT"]: + if TORRENT_TIMEOUT := Config.TORRENT_TIMEOUT: a2c_opt["bt-stop-timeout"] = f"{TORRENT_TIMEOUT}" - added_to_queue, event = await is_queued(listener.uid) - if added_to_queue: - if link.startswith("magnet:"): + + add_to_queue, event = await check_running_tasks(listener) + if add_to_queue: + if listener.link.startswith("magnet:"): a2c_opt["pause-metadata"] = "true" else: a2c_opt["pause"] = "true" + try: - download = (await sync_to_async(aria2.add, link, a2c_opt))[0] + download = (await sync_to_async(aria2.add, listener.link, a2c_opt))[0] except Exception as e: LOGGER.info(f"Aria2c Download Error: {e}") - await send_message(listener.message, f"{e}") + await listener.on_download_error(f"{e}") return - if await aiopath.exists(link): - await aioremove(link) + if await aiopath.exists(listener.link): + await remove(listener.link) if download.error_message: error = str(download.error_message).replace("<", " ").replace(">", " ") LOGGER.info(f"Aria2c Download Error: {error}") - await send_message(listener.message, error) + await listener.on_download_error(error) return gid = download.gid name = download.name - async with download_dict_lock: - download_dict[listener.uid] = Aria2Status( - gid, listener, queued=added_to_queue - ) - if added_to_queue: + async with task_dict_lock: + task_dict[listener.mid] = Aria2Status(listener, gid, queued=add_to_queue) + if add_to_queue: LOGGER.info(f"Added to Queue/Download: {name}. Gid: {gid}") - if not listener.select or not download.is_torrent: - await sendStatusMessage(listener.message) + if (not listener.select or not download.is_torrent) and listener.multi <= 1: + await send_status_message(listener.message) else: - async with queue_dict_lock: - non_queued_dl.add(listener.uid) LOGGER.info(f"Aria2Download started: {name}. Gid: {gid}") await listener.on_download_start() - if not added_to_queue and (not listener.select or not config_dict["BASE_URL"]): - await sendStatusMessage(listener.message) + if ( + not add_to_queue + and (not listener.select or not Config.BASE_URL) + and listener.multi <= 1 + ): + await send_status_message(listener.message) elif listener.select and download.is_torrent and not download.is_metadata: - if not added_to_queue: + if not add_to_queue: await sync_to_async(aria2.client.force_pause, gid) - s_buttons = bt_selection_buttons(gid) + SBUTTONS = bt_selection_buttons(gid) msg = "Your download paused. Choose files then press Done Selecting button to start downloading." - await send_message(listener.message, msg, s_buttons) + await send_message(listener.message, msg, SBUTTONS) - if added_to_queue: + if add_to_queue: await event.wait() - - async with download_dict_lock: - if listener.uid not in download_dict: - return - download = download_dict[listener.uid] - download.queued = False - new_gid = download.gid() + if listener.is_cancelled: + return + async with task_dict_lock: + task = task_dict[listener.mid] + task.queued = False + await sync_to_async(task.update) + new_gid = task.gid() await sync_to_async(aria2.client.unpause, new_gid) LOGGER.info(f"Start Queued Download from Aria2c: {name}. Gid: {gid}") - - async with queue_dict_lock: - non_queued_dl.add(listener.uid) diff --git a/bot/helper/mirror_leech_utils/download_utils/direct_downloader.py b/bot/helper/mirror_leech_utils/download_utils/direct_downloader.py index 5c1649e7c..289cd89a2 100644 --- a/bot/helper/mirror_leech_utils/download_utils/direct_downloader.py +++ b/bot/helper/mirror_leech_utils/download_utils/direct_downloader.py @@ -1,93 +1,60 @@ from secrets import token_hex -from bot import ( - LOGGER, - aria2_options, - aria2c_global, - download_dict, - non_queued_dl, - queue_dict_lock, - download_dict_lock, -) +from bot import LOGGER, task_dict, task_dict_lock from bot.helper.ext_utils.bot_utils import sync_to_async -from bot.helper.aeon_utils.nsfw_check import is_nsfw_data from bot.helper.ext_utils.task_manager import ( - is_queued, - limit_checker, + check_running_tasks, stop_duplicate_check, ) from bot.helper.listeners.direct_listener import DirectListener -from bot.helper.telegram_helper.message_utils import ( - delete_links, - send_message, - one_minute_del, - sendStatusMessage, -) -from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus from bot.helper.mirror_leech_utils.status_utils.direct_status import DirectStatus +from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus +from bot.helper.telegram_helper.message_utils import send_status_message -async def add_direct_download(details, path, listener, foldername): +async def add_direct_download(listener, path): + details = listener.link if not (contents := details.get("contents")): - await send_message(listener.message, "There is nothing to download!") - return - size = details["total_size"] - if not foldername: - foldername = details["title"] - if is_nsfw_data(details): - await listener.onDownloadError("NSFW detected") + await listener.on_download_error("There is nothing to download!") return - path = f"{path}/{foldername}" - msg, button = await stop_duplicate_check(foldername, listener) + listener.size = details["total_size"] + + if not listener.name: + listener.name = details["title"] + path = f"{path}/{listener.name}" + + msg, button = await stop_duplicate_check(listener) if msg: - msg = await send_message(listener.message, msg, button) - await delete_links(listener.message) - await one_minute_del(msg) - return - if limit_exceeded := await limit_checker(size, listener): - LOGGER.info(f"Limit Exceeded: {foldername} | {size}") - msg = await send_message(listener.message, limit_exceeded) - await delete_links(listener.message) - await one_minute_del(msg) + await listener.on_download_error(msg, button) return gid = token_hex(4) - added_to_queue, event = await is_queued(listener.uid) - if added_to_queue: - LOGGER.info(f"Added to Queue/Download: {foldername}") - async with download_dict_lock: - download_dict[listener.uid] = QueueStatus( - foldername, size, gid, listener, "dl" - ) + add_to_queue, event = await check_running_tasks(listener) + if add_to_queue: + LOGGER.info(f"Added to Queue/Download: {listener.name}") + async with task_dict_lock: + task_dict[listener.mid] = QueueStatus(listener, gid, "dl") await listener.on_download_start() - await sendStatusMessage(listener.message) + if listener.multi <= 1: + await send_status_message(listener.message) await event.wait() - async with download_dict_lock: - if listener.uid not in download_dict: - return - from_queue = True - else: - from_queue = False + if listener.is_cancelled: + return - a2c_opt = {**aria2_options} - [a2c_opt.pop(k) for k in aria2c_global if k in aria2_options] + a2c_opt = {"follow-torrent": "false", "follow-metalink": "false"} if header := details.get("header"): a2c_opt["header"] = header - a2c_opt["follow-torrent"] = "false" - a2c_opt["follow-metalink"] = "false" - directListener = DirectListener(foldername, size, path, listener, a2c_opt) - async with download_dict_lock: - download_dict[listener.uid] = DirectStatus(directListener, gid, listener) + directListener = DirectListener(path, listener, a2c_opt) - async with queue_dict_lock: - non_queued_dl.add(listener.uid) + async with task_dict_lock: + task_dict[listener.mid] = DirectStatus(listener, directListener, gid) - if from_queue: - LOGGER.info(f"Start Queued Download from Direct Download: {foldername}") + if add_to_queue: + LOGGER.info(f"Start Queued Download from Direct Download: {listener.name}") else: - LOGGER.info(f"Download from Direct Download: {foldername}") + LOGGER.info(f"Download from Direct Download: {listener.name}") await listener.on_download_start() - await sendStatusMessage(listener.message) + if listener.multi <= 1: + await send_status_message(listener.message) - await delete_links(listener.message) await sync_to_async(directListener.download, contents) diff --git a/bot/helper/mirror_leech_utils/download_utils/direct_link_generator.py b/bot/helper/mirror_leech_utils/download_utils/direct_link_generator.py index d1c049ae1..c79aa48c1 100644 --- a/bot/helper/mirror_leech_utils/download_utils/direct_link_generator.py +++ b/bot/helper/mirror_leech_utils/download_utils/direct_link_generator.py @@ -1,126 +1,207 @@ -from os import path -from re import match, search, findall +# ruff: noqa +from base64 import b64decode +from hashlib import sha256 +from http.cookiejar import MozillaCookieJar from json import loads +from os import path as ospath +from re import findall, match, search from time import sleep -from uuid import uuid4 -from hashlib import sha256 from urllib.parse import parse_qs, urlparse +from uuid import uuid4 -from bs4 import BeautifulSoup -from requests import Session, get, post -from requests import session as req_session -from lxml.etree import HTML from cloudscraper import create_scraper +from lxml.etree import HTML +from requests import RequestException, Session, get, post from requests.adapters import HTTPAdapter from urllib3.util.retry import Retry -from bot import config_dict -from bot.helper.ext_utils.bot_utils import text_to_bytes -from bot.helper.ext_utils.exceptions import DirectDownloadLinkError -from bot.helper.ext_utils.help_strings import PASSWORD_ERROR_MESSAGE +from bot.core.config_manager import Config +from bot.helper.ext_utils.exceptions import DirectDownloadLinkException +from bot.helper.ext_utils.help_messages import PASSWORD_ERROR_MESSAGE +from bot.helper.ext_utils.links_utils import is_share_link +from bot.helper.ext_utils.status_utils import speed_string_to_bytes -_caches = {} user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:122.0) Gecko/20100101 Firefox/122.0" -domain_dict = { - "mediafire": ["mediafire.com"], - "osdn": ["osdn.net"], - "github": ["github.com"], - "hxfile": ["hxfile.co"], - "onedrive": ["1drv.ms"], - "pixeldrain": ["pixeldrain.com"], - "racaty": ["racaty"], - "fichier": ["1fichier.com"], - "solidfiles": ["solidfiles.com"], - "krakenfiles": ["krakenfiles.com"], - "uploadee": ["upload.ee"], - "gofile": ["gofile.io"], - "send_cm": ["send.cm"], - "easyupload": ["easyupload.io"], - "hubdrive": ["hubdrive"], - "streamvid": ["streamvid.net"], - "shrdsk": ["shrdsk.me"], - "streamhub": ["streamhub.ink"], - "appflix": ["appdrive", "gdflix"], - "akmfiles": ["akmfiles.com", "akmfls.xyz"], - "doods": [ - "dood.watch", - "doodstream.com", - "dood.to", - "dood.so", - "dood.cx", - "dood.la", - "dood.ws", - "dood.sh", - "doodstream.co", - "dood.pm", - "dood.wf", - "dood.re", - "dood.video", - "dooood.com", - "dood.yt", - "doods.yt", - "dood.stream", - "doods.pro", - "ds2play.com", - ], - "streamtape": [ - "streamtape.com", - "streamtape.co", - "streamtape.cc", - "streamtape.to", - "streamtape.net", - "streamta.pe", - "streamtape.xyz", - ], - "wetransfer": ["wetransfer.com", "we.tl"], - "terabox": [ - "terabox.com", - "nephobox.com", - "4funbox.com", - "mirrobox.com", - "momerybox.com", - "teraboxapp.com", - "1024tera.com", - "terabox.app", - "gibibox.com", - "goaibox.com", - "terasharelink.com", - "teraboxlink.com", - "freeterabox.com", - "1024terabox.com", - "teraboxshare.com", - ], - "filewish": [ - "filelions.co", - "filelions.site", - "filelions.live", - "filelions.lol", - "filelions.to", - "cabecabean.lol", - "filelions.online", - "embedwish.com", - "streamwish.com", - "kitabmarkaz.xyz", - "wishfast.top", - "streamwish.to", - ], - "linkBox": ["linkbox.to", "lbx.to", "telbx.net", "teltobx.net"], - "filepress": ["filepress"], - "pcloud": ["u.pcloud.link"], -} def direct_link_generator(link): + """direct links generator""" domain = urlparse(link).hostname if not domain: - raise DirectDownloadLinkError("ERROR: Invalid URL") - if "youtube.com" in domain or "youtu.be" in domain: - raise DirectDownloadLinkError("ERROR: Use ytdl cmds for Youtube links") - for func_name, domain_list in domain_dict.items(): - if any(x in domain for x in domain_list): - func = globals().get(func_name) - return func(link) - raise DirectDownloadLinkError(f"No Direct link function found for {link}") + raise DirectDownloadLinkException("ERROR: Invalid URL") + if "yadi.sk" in link or "disk.yandex." in link: + return yandex_disk(link) + if "buzzheavier.com" in domain: + return buzzheavier(link) + if "mediafire.com" in domain: + return mediafire(link) + if "osdn.net" in domain: + return osdn(link) + if "github.com" in domain: + return github(link) + if "hxfile.co" in domain: + return hxfile(link) + if "1drv.ms" in domain: + return onedrive(link) + if "pixeldrain.com" in domain: + return pixeldrain(link) + if "racaty" in domain: + return racaty(link) + if "1fichier.com" in domain: + return fichier(link) + if "solidfiles.com" in domain: + return solidfiles(link) + if "krakenfiles.com" in domain: + return krakenfiles(link) + if "upload.ee" in domain: + return uploadee(link) + if "gofile.io" in domain: + return gofile(link) + if "send.cm" in domain: + return send_cm(link) + if "tmpsend.com" in domain: + return tmpsend(link) + if "easyupload.io" in domain: + return easyupload(link) + if "streamvid.net" in domain: + return streamvid(link) + if "shrdsk.me" in domain: + return shrdsk(link) + if "u.pcloud.link" in domain: + return pcloud(link) + if "qiwi.gg" in domain: + return qiwi(link) + if "mp4upload.com" in domain: + return mp4upload(link) + if "berkasdrive.com" in domain: + return berkasdrive(link) + if any(x in domain for x in ["akmfiles.com", "akmfls.xyz"]): + return akmfiles(link) + if any( + x in domain + for x in [ + "dood.watch", + "doodstream.com", + "dood.to", + "dood.so", + "dood.cx", + "dood.la", + "dood.ws", + "dood.sh", + "doodstream.co", + "dood.pm", + "dood.wf", + "dood.re", + "dood.video", + "dooood.com", + "dood.yt", + "doods.yt", + "dood.stream", + "doods.pro", + "ds2play.com", + "d0o0d.com", + "ds2video.com", + "do0od.com", + "d000d.com", + ] + ): + return doods(link) + if any( + x in domain + for x in [ + "streamtape.com", + "streamtape.co", + "streamtape.cc", + "streamtape.to", + "streamtape.net", + "streamta.pe", + "streamtape.xyz", + ] + ): + return streamtape(link) + if any(x in domain for x in ["wetransfer.com", "we.tl"]): + return wetransfer(link) + if any( + x in domain + for x in [ + "terabox.com", + "nephobox.com", + "4funbox.com", + "mirrobox.com", + "momerybox.com", + "teraboxapp.com", + "1024tera.com", + "terabox.app", + "gibibox.com", + "goaibox.com", + "terasharelink.com", + "teraboxlink.com", + "freeterabox.com", + "1024terabox.com", + "teraboxshare.com", + ] + ): + return terabox(link) + if any( + x in domain + for x in [ + "filelions.co", + "filelions.site", + "filelions.live", + "filelions.to", + "mycloudz.cc", + "cabecabean.lol", + "filelions.online", + "embedwish.com", + "kitabmarkaz.xyz", + "wishfast.top", + "streamwish.to", + "kissmovies.net", + ] + ): + return filelions_and_streamwish(link) + if any(x in domain for x in ["streamhub.ink", "streamhub.to"]): + return streamhub(link) + if any( + x in domain + for x in [ + "linkbox.to", + "lbx.to", + "teltobx.net", + "telbx.net", + ] + ): + return linkBox(link) + if is_share_link(link): + if "gdtot" in domain: + return gdtot(link) + if "filepress" in domain: + return filepress(link) + return sharer_scraper(link) + if any( + x in domain + for x in [ + "anonfiles.com", + "zippyshare.com", + "letsupload.io", + "hotfile.io", + "bayfiles.com", + "megaupload.nz", + "letsupload.cc", + "filechan.org", + "myfile.is", + "vshare.is", + "rapidshare.nu", + "lolabits.se", + "openload.cc", + "share-online.is", + "upvid.cc", + "uptobox.com", + "uptobox.fr", + ] + ): + raise DirectDownloadLinkException(f"ERROR: R.I.P {domain}") + raise DirectDownloadLinkException(f"No Direct link function found for {link}") def get_captcha_token(session, params): @@ -139,30 +220,104 @@ def get_captcha_token(session, params): return None +def buzzheavier(url): + """ + Generate a direct download link for buzzheavier URLs. + @param link: URL from buzzheavier + @return: Direct download link + """ + session = Session() + if not "/download" in url: + url += "/download" + + # Normalize URL + url = url.strip() + session.headers.update( + { + "referer": url.split("/download")[0], + "hx-current-url": url.split("/download")[0], + "hx-request": "true", + "priority": "u=1, i", + } + ) + try: + response = session.get(url) + d_url = response.headers.get("Hx-Redirect") + + if not d_url: + raise DirectDownloadLinkException("ERROR: Failed to fetch direct link.") + parsed_url = urlparse(url) + direct_url = f"{parsed_url.scheme}://{parsed_url.netloc}{d_url}" + return direct_url + except Exception as e: + raise DirectDownloadLinkException(f"ERROR: {str(e)}") + finally: + session.close() + + def mediafire(url, session=None): if "/folder/" in url: return mediafireFolder(url) + if "::" in url: + _password = url.split("::")[-1] + url = url.split("::")[-2] + else: + _password = "" if final_link := findall( - r"https?:\/\/download\d+\.mediafire\.com\/\S+\/\S+\/\S+", url + r"https?:\/\/download\d+\.mediafire\.com\/\S+\/\S+\/\S+", + url, ): return final_link[0] + + def _repair_download(url, session): + try: + html = HTML(session.get(url).text) + if new_link := html.xpath('//a[@id="continue-btn"]/@href'): + return mediafire(f"https://mediafire.com/{new_link[0]}") + except Exception as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e + if session is None: - session = Session() + session = create_scraper() parsed_url = urlparse(url) url = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}" try: html = HTML(session.get(url).text) except Exception as e: session.close() - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e if error := html.xpath('//p[@class="notranslate"]/text()'): session.close() - raise DirectDownloadLinkError(f"ERROR: {error[0]}") - if not (final_link := html.xpath("//a[@id='downloadButton']/@href")): - session.close() - raise DirectDownloadLinkError("ERROR: No links found in this page Try Again") + raise DirectDownloadLinkException(f"ERROR: {error[0]}") + if html.xpath("//div[@class='passwordPrompt']"): + if not _password: + session.close() + raise DirectDownloadLinkException( + f"ERROR: {PASSWORD_ERROR_MESSAGE}".format(url), + ) + try: + html = HTML(session.post(url, data={"downloadp": _password}).text) + except Exception as e: + session.close() + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e + if html.xpath("//div[@class='passwordPrompt']"): + session.close() + raise DirectDownloadLinkException("ERROR: Wrong password.") + if not (final_link := html.xpath('//a[@aria-label="Download file"]/@href')): + if repair_link := html.xpath("//a[@class='retry']/@href"): + return _repair_download(repair_link[0], session) + raise DirectDownloadLinkException( + "ERROR: No links found in this page Try Again", + ) if final_link[0].startswith("//"): - return mediafire(f"https://{final_link[0][2:]}", session) + final_url = f"https://{final_link[0][2:]}" + if _password: + final_url += f"::{_password}" + return mediafire(final_url, session) session.close() return final_link[0] @@ -172,105 +327,116 @@ def osdn(url): try: html = HTML(session.get(url).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if not (direct_link := html.xapth('//a[@class="mirror_link"]/@href')): - raise DirectDownloadLinkError("ERROR: Direct link not found") + raise DirectDownloadLinkException("ERROR: Direct link not found") return f"https://osdn.net{direct_link[0]}" +def yandex_disk(url: str) -> str: + """Yandex.Disk direct link generator + Based on https://github.com/wldhx/yadisk-direct""" + try: + link = findall(r"\b(https?://(yadi\.sk|disk\.yandex\.(com|ru))\S+)", url)[0][ + 0 + ] + except IndexError: + return "No Yandex.Disk links found\n" + api = "https://cloud-api.yandex.net/v1/disk/public/resources/download?public_key={}" + try: + return get(api.format(link)).json()["href"] + except KeyError as e: + raise DirectDownloadLinkException( + "ERROR: File not found/Download limit reached", + ) from e + + def github(url): + """GitHub direct links generator""" try: findall(r"\bhttps?://.*github\.com.*releases\S+", url)[0] - except IndexError: - raise DirectDownloadLinkError("No GitHub Releases links found") + except IndexError as e: + raise DirectDownloadLinkException("No GitHub Releases links found") from e with create_scraper() as session: _res = session.get(url, stream=True, allow_redirects=False) if "location" in _res.headers: return _res.headers["location"] - raise DirectDownloadLinkError("ERROR: Can't extract the link") + raise DirectDownloadLinkException("ERROR: Can't extract the link") def hxfile(url): - with create_scraper() as session: + if not ospath.isfile("hxfile.txt"): + raise DirectDownloadLinkException("ERROR: hxfile.txt (cookies) Not Found!") + try: + jar = MozillaCookieJar() + jar.load("hxfile.txt") + except Exception as e: + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e + cookies = {cookie.name: cookie.value for cookie in jar} + with Session() as session: try: file_code = url.split("/")[-1] html = HTML( - session.post(url, data={"op": "download2", "id": file_code}).text + session.post( + url, + data={"op": "download2", "id": file_code}, + cookies=cookies, + ).text, ) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") - if direct_link := html.xpath('//a[@class="btn btn-dow"]/@href'): - return direct_link[0] - raise DirectDownloadLinkError("ERROR: Direct download link not found") - - -def filepress(url): - with create_scraper() as session: - try: - url = session.get(url).url - raw = urlparse(url) - json_data = { - "id": raw.path.split("/")[-1], - "method": "publicDownlaod", - } - api = f"{raw.scheme}://{raw.hostname}/api/file/downlaod/" - res2 = session.post( - api, - headers={"Referer": f"{raw.scheme}://{raw.hostname}"}, - json=json_data, - ).json() - json_data2 = { - "id": res2["data"], - "method": "publicUserDownlaod", - } - api2 = "https://new2.filepress.store/api/file/downlaod2/" - res = session.post( - api2, - headers={"Referer": f"{raw.scheme}://{raw.hostname}"}, - json=json_data2, - ).json() - except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") - if "data" not in res: - raise DirectDownloadLinkError(f'ERROR: {res["statusText"]}') - return f'https://drive.google.com/uc?id={res["data"]}&export=download' + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e + if direct_link := html.xpath("//a[@class='btn btn-dow']/@href"): + header = f"Referer: {url}" + return direct_link[0], header + raise DirectDownloadLinkException("ERROR: Direct download link not found") def onedrive(link): + """Onedrive direct link generator + By https://github.com/junedkh""" with create_scraper() as session: try: link = session.get(link).url parsed_link = urlparse(link) link_data = parse_qs(parsed_link.query) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if not link_data: - raise DirectDownloadLinkError("ERROR: Unable to find link_data") + raise DirectDownloadLinkException("ERROR: Unable to find link_data") folder_id = link_data.get("resid") if not folder_id: - raise DirectDownloadLinkError("ERROR: folder id not found") + raise DirectDownloadLinkException("ERROR: folder id not found") folder_id = folder_id[0] authkey = link_data.get("authkey") if not authkey: - raise DirectDownloadLinkError("ERROR: authkey not found") + raise DirectDownloadLinkException("ERROR: authkey not found") authkey = authkey[0] boundary = uuid4() headers = {"content-type": f"multipart/form-data;boundary={boundary}"} data = f"--{boundary}\r\nContent-Disposition: form-data;name=data\r\nPrefer: Migration=EnableRedirect;FailOnMigratedFiles\r\nX-HTTP-Method-Override: GET\r\nContent-Type: application/json\r\n\r\n--{boundary}--" try: resp = session.get( - f'https://api.onedrive.com/v1.0/drives/{folder_id.split("!", 1)[0]}/items/{folder_id}?$select=id,@content.downloadUrl&ump=1&authKey={authkey}', + f"https://api.onedrive.com/v1.0/drives/{folder_id.split('!', 1)[0]}/items/{folder_id}?$select=id,@content.downloadUrl&ump=1&authKey={authkey}", headers=headers, data=data, ).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if "@content.downloadUrl" not in resp: - raise DirectDownloadLinkError("ERROR: Direct link not found") + raise DirectDownloadLinkException("ERROR: Direct link not found") return resp["@content.downloadUrl"] def pixeldrain(url): + """Based on https://github.com/yash-dk/TorToolkit-Telegram""" url = url.strip("/ ") file_id = url.split("/")[-1] if url.split("/")[-2] == "l": @@ -283,10 +449,14 @@ def pixeldrain(url): try: resp = session.get(info_link).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if resp["success"]: return dl_link - raise DirectDownloadLinkError(f"ERROR: Cant't download due {resp['message']}.") + raise DirectDownloadLinkException( + f"ERROR: Cant't download due {resp['message']}.", + ) def streamtape(url): @@ -296,11 +466,14 @@ def streamtape(url): with Session() as session: html = HTML(session.get(url).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") - if not (script := html.xpath("//script[contains(text(),'ideoooolink')]/text()")): - raise DirectDownloadLinkError("ERROR: requeries script not found") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e + script = html.xpath( + "//script[contains(text(),'ideoooolink')]/text()", + ) or html.xpath("//script[contains(text(),'ideoolink')]/text()") + if not script: + raise DirectDownloadLinkException("ERROR: requeries script not found") if not (link := findall(r"(&expires\S+)'", script[0])): - raise DirectDownloadLinkError("ERROR: Download link not found") + raise DirectDownloadLinkException("ERROR: Download link not found") return f"https://streamtape.com/get_video?id={_id}{link[-1]}" @@ -311,17 +484,22 @@ def racaty(url): json_data = {"op": "download2", "id": url.split("/")[-1]} html = HTML(session.post(url, data=json_data).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if direct_link := html.xpath("//a[@id='uniqueExpirylink']/@href"): return direct_link[0] - raise DirectDownloadLinkError("ERROR: Direct link not found") + raise DirectDownloadLinkException("ERROR: Direct link not found") def fichier(link): + """1Fichier direct link generator + Based on https://github.com/Maujar + """ regex = r"^([http:\/\/|https:\/\/]+)?.*1fichier\.com\/\?.+" gan = match(regex, link) if not gan: - raise DirectDownloadLinkError("ERROR: The link you entered is wrong!") + raise DirectDownloadLinkException("ERROR: The link you entered is wrong!") if "::" in link: pswd = link.split("::")[-1] url = link.split("::")[-2] @@ -336,68 +514,73 @@ def fichier(link): pw = {"pass": pswd} req = cget("post", url, data=pw) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e if req.status_code == 404: - raise DirectDownloadLinkError( - "ERROR: File not found/The link you entered is wrong!" + raise DirectDownloadLinkException( + "ERROR: File not found/The link you entered is wrong!", ) html = HTML(req.text) if dl_url := html.xpath('//a[@class="ok btn-general btn-orange"]/@href'): return dl_url[0] if not (ct_warn := html.xpath('//div[@class="ct_warn"]')): - raise DirectDownloadLinkError( - "ERROR: Error trying to generate Direct Link from 1fichier!" + raise DirectDownloadLinkException( + "ERROR: Error trying to generate Direct Link from 1fichier!", ) if len(ct_warn) == 3: str_2 = ct_warn[-1].text if "you must wait" in str_2.lower(): if numbers := [int(word) for word in str_2.split() if word.isdigit()]: - raise DirectDownloadLinkError( - f"ERROR: 1fichier is on a limit. Please wait {numbers[0]} minute." + raise DirectDownloadLinkException( + f"ERROR: 1fichier is on a limit. Please wait {numbers[0]} minute.", ) - raise DirectDownloadLinkError( - "ERROR: 1fichier is on a limit. Please wait a few minutes/hour." + raise DirectDownloadLinkException( + "ERROR: 1fichier is on a limit. Please wait a few minutes/hour.", ) if "protect access" in str_2.lower(): - raise DirectDownloadLinkError( - f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(link)}" + raise DirectDownloadLinkException( + f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(link)}", ) - raise DirectDownloadLinkError( - "ERROR: Failed to generate Direct Link from 1fichier!" + raise DirectDownloadLinkException( + "ERROR: Failed to generate Direct Link from 1fichier!", ) if len(ct_warn) == 4: str_1 = ct_warn[-2].text str_3 = ct_warn[-1].text if "you must wait" in str_1.lower(): if numbers := [int(word) for word in str_1.split() if word.isdigit()]: - raise DirectDownloadLinkError( - f"ERROR: 1fichier is on a limit. Please wait {numbers[0]} minute." + raise DirectDownloadLinkException( + f"ERROR: 1fichier is on a limit. Please wait {numbers[0]} minute.", ) - raise DirectDownloadLinkError( - "ERROR: 1fichier is on a limit. Please wait a few minutes/hour." + raise DirectDownloadLinkException( + "ERROR: 1fichier is on a limit. Please wait a few minutes/hour.", ) if "bad password" in str_3.lower(): - raise DirectDownloadLinkError( - "ERROR: The password you entered is wrong!" + raise DirectDownloadLinkException( + "ERROR: The password you entered is wrong!", ) - raise DirectDownloadLinkError( - "ERROR: Error trying to generate Direct Link from 1fichier!" + raise DirectDownloadLinkException( + "ERROR: Error trying to generate Direct Link from 1fichier!", ) def solidfiles(url): + """Solidfiles direct link generator + Based on https://github.com/Xonshiz/SolidFiles-Downloader + By https://github.com/Jusidama18""" with create_scraper() as session: try: headers = { - "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36" + "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36", } pageSource = session.get(url, headers=headers).text mainOptions = str( - search(r"viewerOptions\'\,\ (.*?)\)\;", pageSource).group(1) + search(r"viewerOptions\'\,\ (.*?)\)\;", pageSource).group(1), ) return loads(mainOptions)["downloadUrl"] except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e def krakenfiles(url): @@ -405,25 +588,29 @@ def krakenfiles(url): try: _res = session.get(url) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e html = HTML(_res.text) if post_url := html.xpath('//form[@id="dl-form"]/@action'): - post_url = f"https:{post_url[0]}" + post_url = f"https://krakenfiles.com{post_url[0]}" else: - raise DirectDownloadLinkError("ERROR: Unable to find post link.") + raise DirectDownloadLinkException("ERROR: Unable to find post link.") if token := html.xpath('//input[@id="dl-token"]/@value'): data = {"token": token[0]} else: - raise DirectDownloadLinkError("ERROR: Unable to find token for post.") + raise DirectDownloadLinkException( + "ERROR: Unable to find token for post.", + ) try: _json = session.post(post_url, data=data).json() except Exception as e: - raise DirectDownloadLinkError( - f"ERROR: {e.__class__.__name__} While send post request" - ) + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__} While send post request", + ) from e if _json["status"] != "ok": - raise DirectDownloadLinkError( - "ERROR: Unable to find download after post request" + raise DirectDownloadLinkException( + "ERROR: Unable to find download after post request", ) return _json["url"] @@ -433,10 +620,12 @@ def uploadee(url): try: html = HTML(session.get(url).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if link := html.xpath("//a[@id='d_l']/@href"): return link[0] - raise DirectDownloadLinkError("ERROR: Direct Link not found") + raise DirectDownloadLinkException("ERROR: Direct Link not found") def terabox(url, video_quality="HD Video", save_dir="HD_Video"): @@ -445,7 +634,7 @@ def terabox(url, video_quality="HD Video", save_dir="HD_Video"): pattern = r"/s/(\w+)|surl=(\w+)" if not search(pattern, url): - raise DirectDownloadLinkError("ERROR: Invalid terabox URL") + raise DirectDownloadLinkException("ERROR: Invalid terabox URL") netloc = urlparse(url).netloc terabox_url = url.replace(netloc, "1024tera.com") @@ -479,10 +668,12 @@ def terabox(url, video_quality="HD Video", save_dir="HD_Video"): if response.status_code == 200: break - except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") from e + except RequestException as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e else: - raise DirectDownloadLinkError("ERROR: Unable to fetch the JSON data") + raise DirectDownloadLinkException("ERROR: Unable to fetch the JSON data") data = response.json() details = {"contents": [], "title": "", "total_size": 0} @@ -490,19 +681,18 @@ def terabox(url, video_quality="HD Video", save_dir="HD_Video"): for item in data["response"]: title = item["title"] resolutions = item.get("resolutions", {}) - links = resolutions.get(video_quality) - if links: + if zlink := resolutions.get(video_quality): details["contents"].append( { - "url": links, + "url": zlink, "filename": title, - "path": path.join(title, save_dir), - } + "path": ospath.join(title, save_dir), + }, ) details["title"] = title if not details["contents"]: - raise DirectDownloadLinkError("ERROR: No valid download links found") + raise DirectDownloadLinkException("ERROR: No valid download links found") if len(details["contents"]) == 1: return details["contents"][0]["url"] @@ -510,24 +700,102 @@ def terabox(url, video_quality="HD Video", save_dir="HD_Video"): return details -def appflix(url): +def filepress(url): + with create_scraper() as session: + try: + url = session.get(url).url + raw = urlparse(url) + json_data = { + "id": raw.path.split("/")[-1], + "method": "publicDownlaod", + } + api = f"{raw.scheme}://{raw.hostname}/api/file/downlaod/" + res2 = session.post( + api, + headers={"Referer": f"{raw.scheme}://{raw.hostname}"}, + json=json_data, + ).json() + json_data2 = { + "id": res2["data"], + "method": "publicUserDownlaod", + } + api2 = "https://new2.filepress.store/api/file/downlaod2/" + res = session.post( + api2, + headers={"Referer": f"{raw.scheme}://{raw.hostname}"}, + json=json_data2, + ).json() + except Exception as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e + if "data" not in res: + raise DirectDownloadLinkException(f"ERROR: {res['statusText']}") + return f"https://drive.google.com/uc?id={res['data']}&export=download" + + +def gdtot(url): + cget = create_scraper().request + try: + res = cget("GET", f"https://gdtot.pro/file/{url.split('/')[-1]}") + except Exception as e: + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e + token_url = HTML(res.text).xpath( + "//a[contains(@class,'inline-flex items-center justify-center')]/@href", + ) + if not token_url: + try: + url = cget("GET", url).url + p_url = urlparse(url) + res = cget( + "GET", + f"{p_url.scheme}://{p_url.hostname}/ddl/{url.split('/')[-1]}", + ) + except Exception as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e + if ( + drive_link := findall(r"myDl\('(.*?)'\)", res.text) + ) and "drive.google.com" in drive_link[0]: + return drive_link[0] + raise DirectDownloadLinkException( + "ERROR: Drive Link not found, Try in your broswer", + ) + token_url = token_url[0] + try: + token_page = cget("GET", token_url) + except Exception as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__} with {token_url}", + ) from e + path = findall(r'\("(.*?)"\)', token_page.text) + if not path: + raise DirectDownloadLinkException("ERROR: Cannot bypass this") + path = path[0] + raw = urlparse(token_url) + final_url = f"{raw.scheme}://{raw.hostname}{path}" + return sharer_scraper(final_url) + + +def sharer_scraper(url): cget = create_scraper().request try: url = cget("GET", url).url raw = urlparse(url) header = { - "useragent": "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/7.0.548.0 Safari/534.10" + "useragent": "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/7.0.548.0 Safari/534.10", } res = cget("GET", url, headers=header) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e key = findall(r'"key",\s+"(.*?)"', res.text) if not key: - raise DirectDownloadLinkError("ERROR: Key not found!") + raise DirectDownloadLinkException("ERROR: Key not found!") key = key[0] if not HTML(res.text).xpath("//button[@id='drc']"): - raise DirectDownloadLinkError( - "ERROR: This link don't have direct download button" + raise DirectDownloadLinkException( + "ERROR: This link don't have direct download button", ) boundary = uuid4() headers = { @@ -544,25 +812,31 @@ def appflix(url): ) try: res = cget( - "POST", url, cookies=res.cookies, headers=headers, data=data + "POST", + url, + cookies=res.cookies, + headers=headers, + data=data, ).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e if "url" not in res: - raise DirectDownloadLinkError( - "ERROR: Drive Link not found, Try in your broswer" + raise DirectDownloadLinkException( + "ERROR: Drive Link not found, Try in your broswer", ) if "drive.google.com" in res["url"]: return res["url"] try: res = cget("GET", res["url"]) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e if ( drive_link := HTML(res.text).xpath("//a[contains(@class,'btn')]/@href") ) and "drive.google.com" in drive_link[0]: return drive_link[0] - raise DirectDownloadLinkError("ERROR: Drive Link not found, Try in your broswer") + raise DirectDownloadLinkException( + "ERROR: Drive Link not found, Try in your broswer", + ) def wetransfer(url): @@ -579,14 +853,16 @@ def wetransfer(url): json=json_data, ).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if "direct_link" in res: return res["direct_link"] if "message" in res: - raise DirectDownloadLinkError(f"ERROR: {res['message']}") + raise DirectDownloadLinkException(f"ERROR: {res['message']}") if "error" in res: - raise DirectDownloadLinkError(f"ERROR: {res['error']}") - raise DirectDownloadLinkError("ERROR: cannot find direct link") + raise DirectDownloadLinkException(f"ERROR: {res['error']}") + raise DirectDownloadLinkException("ERROR: cannot find direct link") def akmfiles(url): @@ -594,26 +870,31 @@ def akmfiles(url): try: html = HTML( session.post( - url, data={"op": "download2", "id": url.split("/")[-1]} - ).text + url, + data={"op": "download2", "id": url.split("/")[-1]}, + ).text, ) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if direct_link := html.xpath("//a[contains(@class,'btn btn-dow')]/@href"): return direct_link[0] - raise DirectDownloadLinkError("ERROR: Direct link not found") + raise DirectDownloadLinkException("ERROR: Direct link not found") def shrdsk(url): with create_scraper() as session: try: _json = session.get( - f'https://us-central1-affiliate2apk.cloudfunctions.net/get_data?shortid={url.split("/")[-1]}' + f"https://us-central1-affiliate2apk.cloudfunctions.net/get_data?shortid={url.split('/')[-1]}", ).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if "download_data" not in _json: - raise DirectDownloadLinkError("ERROR: Download data not found") + raise DirectDownloadLinkException("ERROR: Download data not found") try: _res = session.get( f"https://shrdsk.me/download/{_json['download_data']}", @@ -622,8 +903,10 @@ def shrdsk(url): if "Location" in _res.headers: return _res.headers["Location"] except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") - raise DirectDownloadLinkError("ERROR: cannot find direct link in headers") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e + raise DirectDownloadLinkException("ERROR: cannot find direct link in headers") def linkBox(url: str): @@ -631,25 +914,28 @@ def linkBox(url: str): try: shareToken = parsed_url.path.split("/")[-1] except Exception: - raise DirectDownloadLinkError("ERROR: invalid URL") + raise DirectDownloadLinkException("ERROR: invalid URL") details = {"contents": [], "title": "", "total_size": 0} def __singleItem(session, itemId): try: _json = session.get( - "https://www.linkbox.to/api/file/detail", params={"itemId": itemId} + "https://www.linkbox.to/api/file/detail", + params={"itemId": itemId}, ).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e data = _json["data"] if not data: if "msg" in _json: - raise DirectDownloadLinkError(f"ERROR: {_json['msg']}") - raise DirectDownloadLinkError("ERROR: data not found") + raise DirectDownloadLinkException(f"ERROR: {_json['msg']}") + raise DirectDownloadLinkException("ERROR: data not found") itemInfo = data["itemInfo"] if not itemInfo: - raise DirectDownloadLinkError("ERROR: itemInfo not found") + raise DirectDownloadLinkException("ERROR: itemInfo not found") filename = itemInfo["name"] sub_type = itemInfo.get("sub_type") if sub_type and not filename.endswith(sub_type): @@ -676,17 +962,23 @@ def __fetch_links(session, _id=0, folderPath=""): } try: _json = session.get( - "https://www.linkbox.to/api/file/share_out_list", params=params + "https://www.linkbox.to/api/file/share_out_list", + params=params, ).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e data = _json["data"] if not data: if "msg" in _json: - raise DirectDownloadLinkError(f"ERROR: {_json['msg']}") - raise DirectDownloadLinkError("ERROR: data not found") - if data["shareType"] == "singleItem": - return __singleItem(session, data["itemId"]) + raise DirectDownloadLinkException(f"ERROR: {_json['msg']}") + raise DirectDownloadLinkException("ERROR: data not found") + try: + if data["shareType"] == "singleItem": + return __singleItem(session, data["itemId"]) + except Exception: + pass if not details["title"]: details["title"] = data["dirName"] contents = data["list"] @@ -695,9 +987,9 @@ def __fetch_links(session, _id=0, folderPath=""): for content in contents: if content["type"] == "dir" and "url" not in content: if not folderPath: - newFolderPath = path.join(details["title"], content["name"]) + newFolderPath = ospath.join(details["title"], content["name"]) else: - newFolderPath = path.join(folderPath, content["name"]) + newFolderPath = ospath.join(folderPath, content["name"]) if not details["title"]: details["title"] = content["name"] __fetch_links(session, content["id"], newFolderPath) @@ -706,11 +998,11 @@ def __fetch_links(session, _id=0, folderPath=""): folderPath = details["title"] filename = content["name"] if (sub_type := content.get("sub_type")) and not filename.endswith( - sub_type + sub_type, ): filename += f".{sub_type}" item = { - "path": path.join(folderPath), + "path": ospath.join(folderPath), "filename": filename, "url": content["url"], } @@ -725,7 +1017,7 @@ def __fetch_links(session, _id=0, folderPath=""): try: with Session() as session: __fetch_links(session) - except DirectDownloadLinkError as e: + except DirectDownloadLinkException as e: raise e return details @@ -740,7 +1032,7 @@ def gofile(url): _password = "" _id = url.split("/")[-1] except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") def __get_token(session): headers = { @@ -753,7 +1045,7 @@ def __get_token(session): try: __res = session.post(__url, headers=headers).json() if __res["status"] != "ok": - raise DirectDownloadLinkError("ERROR: Failed to get token.") + raise DirectDownloadLinkException("ERROR: Failed to get token.") return __res["data"]["token"] except Exception as e: raise e @@ -772,17 +1064,19 @@ def __fetch_links(session, _id, folderPath=""): try: _json = session.get(_url, headers=headers).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") if _json["status"] in "error-passwordRequired": - raise DirectDownloadLinkError( - f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(url)}" + raise DirectDownloadLinkException( + f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(url)}", ) if _json["status"] in "error-passwordWrong": - raise DirectDownloadLinkError("ERROR: This password is wrong !") + raise DirectDownloadLinkException("ERROR: This password is wrong !") if _json["status"] in "error-notFound": - raise DirectDownloadLinkError("ERROR: File not found on gofile's server") + raise DirectDownloadLinkException( + "ERROR: File not found on gofile's server", + ) if _json["status"] in "error-notPublic": - raise DirectDownloadLinkError("ERROR: This folder is not public") + raise DirectDownloadLinkException("ERROR: This folder is not public") data = _json["data"] @@ -795,15 +1089,15 @@ def __fetch_links(session, _id, folderPath=""): if not content["public"]: continue if not folderPath: - newFolderPath = path.join(details["title"], content["name"]) + newFolderPath = ospath.join(details["title"], content["name"]) else: - newFolderPath = path.join(folderPath, content["name"]) + newFolderPath = ospath.join(folderPath, content["name"]) __fetch_links(session, content["id"], newFolderPath) else: if not folderPath: folderPath = details["title"] item = { - "path": path.join(folderPath), + "path": ospath.join(folderPath), "filename": content["name"], "url": content["link"], } @@ -819,12 +1113,12 @@ def __fetch_links(session, _id, folderPath=""): try: token = __get_token(session) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") details["header"] = f"Cookie: accountToken={token}" try: __fetch_links(session, _id) except Exception as e: - raise DirectDownloadLinkError(e) + raise DirectDownloadLinkException(e) if len(details["contents"]) == 1: return (details["contents"][0]["url"], details["header"]) @@ -832,19 +1126,24 @@ def __fetch_links(session, _id, folderPath=""): def mediafireFolder(url): + if "::" in url: + _password = url.split("::")[-1] + url = url.split("::")[-2] + else: + _password = "" try: raw = url.split("/", 4)[-1] folderkey = raw.split("/", 1)[0] folderkey = folderkey.split(",") except Exception: - raise DirectDownloadLinkError("ERROR: Could not parse ") + raise DirectDownloadLinkException("ERROR: Could not parse ") if len(folderkey) == 1: folderkey = folderkey[0] details = {"contents": [], "title": "", "total_size": 0, "header": ""} - session = req_session() + session = create_scraper() adapter = HTTPAdapter( - max_retries=Retry(total=10, read=10, connect=10, backoff_factor=0.3) + max_retries=Retry(total=10, read=10, connect=10, backoff_factor=0.3), ) session.mount("http://", adapter) session.mount("https://", adapter) @@ -868,8 +1167,8 @@ def __get_info(folderkey): }, ).json() except Exception as e: - raise DirectDownloadLinkError( - f"ERROR: {e.__class__.__name__} While getting info" + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__} While getting info", ) _res = _json["response"] if "folder_infos" in _res: @@ -877,24 +1176,51 @@ def __get_info(folderkey): elif "folder_info" in _res: folder_infos.append(_res["folder_info"]) elif "message" in _res: - raise DirectDownloadLinkError(f"ERROR: {_res['message']}") + raise DirectDownloadLinkException(f"ERROR: {_res['message']}") else: - raise DirectDownloadLinkError("ERROR: something went wrong!") + raise DirectDownloadLinkException("ERROR: something went wrong!") try: __get_info(folderkey) except Exception as e: - raise DirectDownloadLinkError(e) + raise DirectDownloadLinkException(e) details["title"] = folder_infos[0]["name"] def __scraper(url): + session = create_scraper() + parsed_url = urlparse(url) + url = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}" + + def __repair_download(url): + try: + html = HTML(session.get(url).text) + if new_link := html.xpath('//a[@id="continue-btn"]/@href'): + return __scraper(f"https://mediafire.com/{new_link[0]}") + except Exception: + return None + try: html = HTML(session.get(url).text) except Exception: return None - if final_link := html.xpath("//a[@id='downloadButton']/@href"): + if html.xpath("//div[@class='passwordPrompt']"): + if not _password: + raise DirectDownloadLinkException( + f"ERROR: {PASSWORD_ERROR_MESSAGE}".format(url), + ) + try: + html = HTML(session.post(url, data={"downloadp": _password}).text) + except Exception: + return None + if html.xpath("//div[@class='passwordPrompt']"): + return None + if final_link := html.xpath('//a[@aria-label="Download file"]/@href'): + if final_link[0].startswith("//"): + return __scraper(f"https://{final_link[0][2:]}") return final_link[0] + if repair_link := html.xpath("//a[@class='retry']/@href"): + return __repair_download(repair_link[0]) return None def __get_content(folderKey, folderPath="", content_type="folders"): @@ -909,20 +1235,20 @@ def __get_content(folderKey, folderPath="", content_type="folders"): params=params, ).json() except Exception as e: - raise DirectDownloadLinkError( - f"ERROR: {e.__class__.__name__} While getting content" + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__} While getting content", ) _res = _json["response"] if "message" in _res: - raise DirectDownloadLinkError(f"ERROR: {_res['message']}") + raise DirectDownloadLinkException(f"ERROR: {_res['message']}") _folder_content = _res["folder_content"] if content_type == "folders": folders = _folder_content["folders"] for folder in folders: if folderPath: - newFolderPath = path.join(folderPath, folder["name"]) + newFolderPath = ospath.join(folderPath, folder["name"]) else: - newFolderPath = path.join(folder["name"]) + newFolderPath = ospath.join(folder["name"]) __get_content(folder["folderkey"], newFolderPath) __get_content(folderKey, folderPath, "files") else: @@ -934,7 +1260,7 @@ def __get_content(folderKey, folderPath="", content_type="folders"): item["filename"] = file["filename"] if not folderPath: folderPath = details["title"] - item["path"] = path.join(folderPath) + item["path"] = ospath.join(folderPath) item["url"] = _url if "size" in file: size = file["size"] @@ -947,7 +1273,7 @@ def __get_content(folderKey, folderPath="", content_type="folders"): for folder in folder_infos: __get_content(folder["folderkey"], folder["name"]) except Exception as e: - raise DirectDownloadLinkError(e) + raise DirectDownloadLinkException(e) finally: session.close() if len(details["contents"]) == 1: @@ -968,7 +1294,7 @@ def cf_bypass(url): return _json["solution"]["response"] except Exception as e: e - raise DirectDownloadLinkError("ERROR: Con't bypass cloudflare") + raise DirectDownloadLinkException("ERROR: Con't bypass cloudflare") def send_cm_file(url, file_id=None): @@ -983,11 +1309,13 @@ def send_cm_file(url, file_id=None): try: html = HTML(session.get(url).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if html.xpath("//input[@name='password']"): _passwordNeed = True if not (file_id := html.xpath("//input[@name='id']/@value")): - raise DirectDownloadLinkError("ERROR: file_id not found") + raise DirectDownloadLinkException("ERROR: file_id not found") try: data = {"op": "download2", "id": file_id} if _password and _passwordNeed: @@ -996,12 +1324,14 @@ def send_cm_file(url, file_id=None): if "Location" in _res.headers: return (_res.headers["Location"], "Referer: https://send.cm/") except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if _passwordNeed: - raise DirectDownloadLinkError( - f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(url)}" + raise DirectDownloadLinkException( + f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(url)}", ) - raise DirectDownloadLinkError("ERROR: Direct link not found") + raise DirectDownloadLinkException("ERROR: Direct link not found") def send_cm(url): @@ -1030,12 +1360,16 @@ def __collectFolders(html): folders = [] folders_urls = html.xpath("//h6/a/@href") folders_names = html.xpath("//h6/a/text()") - for folders_url, folders_name in zip(folders_urls, folders_names): + for folders_url, folders_name in zip( + folders_urls, + folders_names, + strict=False, + ): folders.append( { "folder_link": folders_url.strip(), "folder_name": folders_name.strip(), - } + }, ) return folders @@ -1056,13 +1390,18 @@ def __getFiles(html): hrefs = html.xpath('//tr[@class="selectable"]//a/@href') file_names = html.xpath('//tr[@class="selectable"]//a/text()') sizes = html.xpath('//tr[@class="selectable"]//span/text()') - for href, file_name, size_text in zip(hrefs, file_names, sizes): + for href, file_name, size_text in zip( + hrefs, + file_names, + sizes, + strict=False, + ): files.append( { "file_id": href.split("/")[-1], "file_name": file_name.strip(), - "size": text_to_bytes(size_text.strip()), - } + "size": speed_string_to_bytes(size_text.strip()), + }, ) return files @@ -1070,7 +1409,7 @@ def __writeContents(html_text, folderPath=""): folders = __collectFolders(html_text) for folder in folders: _html = HTML(cf_bypass(folder["folder_link"])) - __writeContents(_html, path.join(folderPath, folder["folder_name"])) + __writeContents(_html, ospath.join(folderPath, folder["folder_name"])) files = __getFiles(html_text) for file in files: if not (link := __getFile_link(file["file_id"])): @@ -1081,23 +1420,23 @@ def __writeContents(html_text, folderPath=""): try: mainHtml = HTML(cf_bypass(url)) - except DirectDownloadLinkError as e: + except DirectDownloadLinkException as e: session.close() raise e except Exception as e: session.close() - raise DirectDownloadLinkError( - f"ERROR: {e.__class__.__name__} While getting mainHtml" + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__} While getting mainHtml", ) try: __writeContents(mainHtml, details["title"]) - except DirectDownloadLinkError as e: + except DirectDownloadLinkException as e: session.close() raise e except Exception as e: session.close() - raise DirectDownloadLinkError( - f"ERROR: {e.__class__.__name__} While writing Contents" + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__} While writing Contents", ) session.close() if len(details["contents"]) == 1: @@ -1108,34 +1447,29 @@ def __writeContents(html_text, folderPath=""): def doods(url): if "/e/" in url: url = url.replace("/e/", "/d/") - api_url = f"https://api.pake.tk/dood?url={url}" - response = get(api_url) - if response.status_code != 200: - raise DirectDownloadLinkError("ERROR: Failed to fetch direct link from API") - json_data = response.json() - if direct_link := json_data.get("data", {}).get("direct_link"): - return f"https://dd-cdn.pakai.eu.org/download?url={direct_link}" - raise DirectDownloadLinkError("ERROR: Direct link not found in API response") - - -def hubdrive(url): - try: - rs = Session() - p_url = urlparse(url) - js_query = rs.post( - f"{p_url.scheme}://{p_url.hostname}/ajax.php?ajax=direct-download", - data={"id": str(url.split("/")[-1])}, - headers={"x-requested-with": "XMLHttpRequest"}, - ).json() - if str(js_query["code"]) == "200": - dlink = f"{p_url.scheme}://{p_url.hostname}{js_query['file']}" - res = rs.get(dlink) - soup = BeautifulSoup(res.text, "html.parser") - gd_data = soup.select('a[class="btn btn-primary btn-user"]') - gd_link = gd_data[0]["href"] - return gd_link - except Exception: - raise DirectDownloadLinkError("ERROR: Download link not found try again") + parsed_url = urlparse(url) + with create_scraper() as session: + try: + html = HTML(session.get(url).text) + except Exception as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__} While fetching token link", + ) from e + if not (link := html.xpath("//div[@class='download-content']//a/@href")): + raise DirectDownloadLinkException( + "ERROR: Token Link not found or maybe not allow to download! open in browser.", + ) + link = f"{parsed_url.scheme}://{parsed_url.hostname}{link[0]}" + sleep(2) + try: + _res = session.get(link) + except Exception as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__} While fetching download link", + ) from e + if not (link := search(r"window\.open\('(\S+)'", _res.text)): + raise DirectDownloadLinkException("ERROR: Download link not found try again") + return (link.group(1), f"Referer: {parsed_url.scheme}://{parsed_url.hostname}/") def easyupload(url): @@ -1149,14 +1483,14 @@ def easyupload(url): try: _res = session.get(url) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") first_page_html = HTML(_res.text) if ( first_page_html.xpath("//h6[contains(text(),'Password Protected')]") and not _password ): - raise DirectDownloadLinkError( - f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(url)}" + raise DirectDownloadLinkException( + f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(url)}", ) if not ( match := search( @@ -1164,8 +1498,8 @@ def easyupload(url): _res.text, ) ): - raise DirectDownloadLinkError( - "ERROR: Failed to get server for EasyUpload Link" + raise DirectDownloadLinkException( + "ERROR: Failed to get server for EasyUpload Link", ) action_url = match.group() session.headers.update({"referer": "https://easyupload.io/"}) @@ -1179,7 +1513,7 @@ def easyupload(url): "cb": "c3o1vbaxbmwe", } if not (captcha_token := get_captcha_token(session, recaptcha_params)): - raise DirectDownloadLinkError("ERROR: Captcha token not found") + raise DirectDownloadLinkException("ERROR: Captcha token not found") try: data = { "type": "download-token", @@ -1190,19 +1524,21 @@ def easyupload(url): } json_resp = session.post(url=action_url, data=data).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if "download_link" in json_resp: return json_resp["download_link"] if "data" in json_resp: - raise DirectDownloadLinkError( - f"ERROR: Failed to generate direct link due to {json_resp['data']}" + raise DirectDownloadLinkException( + f"ERROR: Failed to generate direct link due to {json_resp['data']}", ) - raise DirectDownloadLinkError( - "ERROR: Failed to generate direct link from EasyUpload." + raise DirectDownloadLinkException( + "ERROR: Failed to generate direct link from EasyUpload.", ) -def filewish(url): +def filelions_and_streamwish(url): parsed_url = urlparse(url) hostname = parsed_url.hostname scheme = parsed_url.scheme @@ -1215,25 +1551,26 @@ def filewish(url): "filelions.site", "cabecabean.lol", "filelions.online", + "mycloudz.cc", ] ): - apiKey = config_dict["FILELION_API"] - apiUrl = "https://api.filelions.co" + apiKey = Config.FILELION_API + apiUrl = "https://vidhideapi.com" elif any( x in hostname for x in [ "embedwish.com", - "streamwish.com", + "kissmovies.net", "kitabmarkaz.xyz", "wishfast.top", "streamwish.to", ] ): - apiKey = config_dict["STREAMWISH_API"] + apiKey = Config.STREAMWISH_API apiUrl = "https://api.streamwish.com" if not apiKey: - raise DirectDownloadLinkError( - f"ERROR: API is not provided get it from {scheme}://{hostname}" + raise DirectDownloadLinkException( + f"ERROR: API is not provided get it from {scheme}://{hostname}", ) file_code = url.split("/")[-1] quality = "" @@ -1249,12 +1586,14 @@ def filewish(url): params={"key": apiKey, "file_code": file_code, "hls": "1"}, ).json() except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if _res["status"] != 200: - raise DirectDownloadLinkError(f"ERROR: {_res['msg']}") + raise DirectDownloadLinkException(f"ERROR: {_res['msg']}") result = _res["result"] if not result["versions"]: - raise DirectDownloadLinkError("ERROR: File Not Found") + raise DirectDownloadLinkException("ERROR: File Not Found") error = "\nProvide a quality to download the video\nAvailable Quality:" for version in result["versions"]: if quality == version["name"]: @@ -1268,10 +1607,10 @@ def filewish(url): elif version["name"] == "h": error += "\nHD" error += f" {url}_{version['name']}" - raise DirectDownloadLinkError(f"ERROR: {error}") + raise DirectDownloadLinkException(f"ERROR: {error}") -def streamvid(url): +def streamvid(url: str): file_code = url.split("/")[-1] parsed_url = urlparse(url) url = f"{parsed_url.scheme}://{parsed_url.hostname}/d/{file_code}" @@ -1280,43 +1619,49 @@ def streamvid(url): try: html = HTML(session.get(url).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if quality_defined: data = {} if not (inputs := html.xpath('//form[@id="F1"]//input')): - raise DirectDownloadLinkError("ERROR: No inputs found") + raise DirectDownloadLinkException("ERROR: No inputs found") for i in inputs: if key := i.get("name"): data[key] = i.get("value") try: html = HTML(session.post(url, data=data).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if not ( script := html.xpath( - '//script[contains(text(),"document.location.href")]/text()' + '//script[contains(text(),"document.location.href")]/text()', ) ): if error := html.xpath( - '//div[@class="alert alert-danger"][1]/text()[2]' + '//div[@class="alert alert-danger"][1]/text()[2]', ): - raise DirectDownloadLinkError(f"ERROR: {error[0]}") - raise DirectDownloadLinkError("ERROR: direct link script not found!") + raise DirectDownloadLinkException(f"ERROR: {error[0]}") + raise DirectDownloadLinkException( + "ERROR: direct link script not found!", + ) if directLink := findall(r'document\.location\.href="(.*)"', script[0]): return directLink[0] - raise DirectDownloadLinkError( - "ERROR: direct link not found! in the script" + raise DirectDownloadLinkException( + "ERROR: direct link not found! in the script", ) if (qualities_urls := html.xpath('//div[@id="dl_versions"]/a/@href')) and ( qualities := html.xpath('//div[@id="dl_versions"]/a/text()[2]') ): error = "\nProvide a quality to download the video\nAvailable Quality:" - for quality_url, quality in zip(qualities_urls, qualities): + for quality_url, quality in zip(qualities_urls, qualities, strict=False): error += f"\n{quality.strip()} {quality_url}" - raise DirectDownloadLinkError(f"ERROR: {error}") + raise DirectDownloadLinkException(f"ERROR: {error}") if error := html.xpath('//div[@class="not-found-text"]/text()'): - raise DirectDownloadLinkError(f"ERROR: {error[0]}") - raise DirectDownloadLinkError("ERROR: Something went wrong") + raise DirectDownloadLinkException(f"ERROR: {error[0]}") + raise DirectDownloadLinkException("ERROR: Something went wrong") def streamhub(url): @@ -1327,9 +1672,11 @@ def streamhub(url): try: html = HTML(session.get(url).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if not (inputs := html.xpath('//form[@name="F1"]//input')): - raise DirectDownloadLinkError("ERROR: No inputs found") + raise DirectDownloadLinkException("ERROR: No inputs found") data = {} for i in inputs: if key := i.get("name"): @@ -1339,14 +1686,16 @@ def streamhub(url): try: html = HTML(session.post(url, data=data).text) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if directLink := html.xpath( - '//a[@class="btn btn-primary btn-go downloadbtn"]/@href' + '//a[@class="btn btn-primary btn-go downloadbtn"]/@href', ): return directLink[0] if error := html.xpath('//div[@class="alert alert-danger"]/text()[2]'): - raise DirectDownloadLinkError(f"ERROR: {error[0]}") - raise DirectDownloadLinkError("ERROR: direct link not found!") + raise DirectDownloadLinkException(f"ERROR: {error[0]}") + raise DirectDownloadLinkException("ERROR: direct link not found!") def pcloud(url): @@ -1354,7 +1703,91 @@ def pcloud(url): try: res = session.get(url) except Exception as e: - raise DirectDownloadLinkError(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e if link := findall(r".downloadlink.:..(https:.*)..", res.text): return link[0].replace(r"\/", "/") - raise DirectDownloadLinkError("ERROR: Direct link not found") + raise DirectDownloadLinkException("ERROR: Direct link not found") + + +def tmpsend(url): + parsed_url = urlparse(url) + if any(x in parsed_url.path for x in ["thank-you", "download"]): + query_params = parse_qs(parsed_url.query) + if file_id := query_params.get("d"): + file_id = file_id[0] + elif not (file_id := parsed_url.path.strip("/")): + raise DirectDownloadLinkException("ERROR: Invalid URL format") + referer_url = f"https://tmpsend.com/thank-you?d={file_id}" + header = f"Referer: {referer_url}" + download_link = f"https://tmpsend.com/download?d={file_id}" + return download_link, header + + +def qiwi(url): + """qiwi.gg link generator + based on https://github.com/aenulrofik""" + with Session() as session: + file_id = url.split("/")[-1] + try: + res = session.get(url).text + except Exception as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e + tree = HTML(res) + if name := tree.xpath('//h1[@class="page_TextHeading__VsM7r"]/text()'): + ext = name[0].split(".")[-1] + return f"https://spyderrock.com/{file_id}.{ext}" + raise DirectDownloadLinkException("ERROR: File not found") + + +def mp4upload(url): + with Session() as session: + try: + url = url.replace("embed-", "") + req = session.get(url).text + tree = HTML(req) + inputs = tree.xpath("//input") + header = {"Referer": "https://www.mp4upload.com/"} + data = {input.get("name"): input.get("value") for input in inputs} + if not data: + raise DirectDownloadLinkException("ERROR: File Not Found!") + post = session.post( + url, + data=data, + headers={ + "User-Agent": user_agent, + "Referer": "https://www.mp4upload.com/", + }, + ).text + tree = HTML(post) + inputs = tree.xpath('//form[@name="F1"]//input') + data = { + input.get("name"): input.get("value").replace(" ", "") + for input in inputs + } + if not data: + raise DirectDownloadLinkException("ERROR: File Not Found!") + data["referer"] = url + direct_link = session.post(url, data=data).url + return direct_link, header + except Exception: + raise DirectDownloadLinkException("ERROR: File Not Found!") + + +def berkasdrive(url): + """berkasdrive.com link generator + by https://github.com/aenulrofik""" + with Session() as session: + try: + sesi = session.get(url).text + except Exception as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__}", + ) from e + html = HTML(sesi) + if link := html.xpath("//script")[0].text.split('"')[1]: + return b64decode(link).decode("utf-8") + raise DirectDownloadLinkException("ERROR: File Not Found!") diff --git a/bot/helper/mirror_leech_utils/download_utils/direct_link_generator_license.md b/bot/helper/mirror_leech_utils/download_utils/direct_link_generator_license.md new file mode 100644 index 000000000..3e1d62680 --- /dev/null +++ b/bot/helper/mirror_leech_utils/download_utils/direct_link_generator_license.md @@ -0,0 +1,82 @@ + RAPHIELSCAPE PUBLIC LICENSE + Version 1.c, June 2019 + +Copyright (C) 2019 Raphielscape LLC. +Copyright (C) 2019 Devscapes Open Source Holding GmbH. + +Everyone is permitted to copy and distribute verbatim or modified +copies of this license document, and changing it is allowed as long +as the name is changed. + + RAPHIELSCAPE PUBLIC LICENSE + A-1. DEFINITIONS + +0. “This License” refers to version 1.c of the Raphielscape Public License. + +1. “Copyright” also means copyright-like laws that apply to other kinds of works. + +2. “The Work" refers to any copyrightable work licensed under this License. Each licensee is addressed as “you”. + “Licensees” and “recipients” may be individuals or organizations. + +3. To “modify” a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, + other than the making of an exact copy. The resulting work is called a “modified version” of the earlier work + or a work “based on” the earlier work. + +4. Source Form. The “source form” for a work means the preferred form of the work for making modifications to it. + “Object code” means any non-source form of a work. + +The “Corresponding Source” for a work in object code form means all the source code needed to generate, install, and +(for an executable work) run the object code and to modify the work, including scripts to control those activities. + +The Corresponding Source need not include anything that users can regenerate automatically from other parts of the +Corresponding Source. +The Corresponding Source for a work in source code form is that same work. + +5. "The author" refers to "author" of the code, which is the one that made the particular code which exists inside of + the Corresponding Source. + +6. "Owner" refers to any parties which is made the early form of the Corresponding Source. + + A-2. TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + +0. You must give any other recipients of the Work or Derivative Works a copy of this License; and + +1. You must cause any modified files to carry prominent notices stating that You changed the files; and + +2. You must retain, in the Source form of any Derivative Works that You distribute, + this license, all copyright, patent, trademark, authorships and attribution notices + from the Source form of the Work; and + +3. Respecting the author and owner of works that are distributed in any way. + +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, +or distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. + + B. DISCLAIMER OF WARRANTY + +THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, +OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + C. REVISED VERSION OF THIS LICENSE + +The Devscapes Open Source Holding GmbH. may publish revised and/or new versions of the +Raphielscape Public License from time to time. Such new versions will be similar in spirit +to the present version, but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Program specifies that a +certain numbered version of the Raphielscape Public License "or any later version" applies to it, +you have the option of following the terms and conditions either of that numbered version or of +any later version published by the Devscapes Open Source Holding GmbH. If the Program does not specify a +version number of the Raphielscape Public License, you may choose any version ever published +by the Devscapes Open Source Holding GmbH. + + END OF LICENSE diff --git a/bot/helper/mirror_leech_utils/download_utils/gd_download.py b/bot/helper/mirror_leech_utils/download_utils/gd_download.py index 4b8569bdc..e269c99fd 100644 --- a/bot/helper/mirror_leech_utils/download_utils/gd_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/gd_download.py @@ -1,78 +1,61 @@ from secrets import token_hex -from bot import ( - LOGGER, - download_dict, - non_queued_dl, - queue_dict_lock, - download_dict_lock, -) +from bot import LOGGER, task_dict, task_dict_lock from bot.helper.ext_utils.bot_utils import sync_to_async -from bot.helper.aeon_utils.nsfw_check import is_nsfw, is_nsfw_data from bot.helper.ext_utils.task_manager import ( - is_queued, - limit_checker, + check_running_tasks, stop_duplicate_check, ) -from bot.helper.telegram_helper.message_utils import send_message, sendStatusMessage -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper +from bot.helper.mirror_leech_utils.gdrive_utils.count import GoogleDriveCount +from bot.helper.mirror_leech_utils.gdrive_utils.download import GoogleDriveDownload +from bot.helper.mirror_leech_utils.status_utils.gdrive_status import ( + GoogleDriveStatus, +) from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus -from bot.helper.mirror_leech_utils.status_utils.gdrive_status import GdriveStatus +from bot.helper.telegram_helper.message_utils import send_status_message -async def add_gd_download(link, path, listener, newname): - drive = GoogleDriveHelper() - name, mime_type, size, _, _ = await sync_to_async(drive.count, link) +async def add_gd_download(listener, path): + drive = GoogleDriveCount() + name, mime_type, listener.size, _, _ = await sync_to_async( + drive.count, + listener.link, + listener.user_id, + ) if mime_type is None: - await listener.onDownloadError(name) + await listener.on_download_error(name) return - id = drive.getIdFromUrl(link) - data = drive.getFilesByFolderId(id) - name = newname or name - gid = token_hex(4) - if is_nsfw(name) or is_nsfw_data(data): - await listener.onDownloadError("NSFW detected") - return + listener.name = listener.name or name + gid = token_hex(4) - msg, button = await stop_duplicate_check(name, listener) + msg, button = await stop_duplicate_check(listener) if msg: - await send_message(listener.message, msg, button) + await listener.on_download_error(msg, button) return - if limit_exceeded := await limit_checker(size, listener, is_drive_link=True): - await listener.onDownloadError(limit_exceeded) - return - added_to_queue, event = await is_queued(listener.uid) - if added_to_queue: - LOGGER.info(f"Added to Queue/Download: {name}") - async with download_dict_lock: - download_dict[listener.uid] = QueueStatus( - name, size, gid, listener, "dl" - ) + + add_to_queue, event = await check_running_tasks(listener) + if add_to_queue: + LOGGER.info(f"Added to Queue/Download: {listener.name}") + async with task_dict_lock: + task_dict[listener.mid] = QueueStatus(listener, gid, "dl") await listener.on_download_start() - await sendStatusMessage(listener.message) + if listener.multi <= 1: + await send_status_message(listener.message) await event.wait() - async with download_dict_lock: - if listener.uid not in download_dict: - return - from_queue = True - else: - from_queue = False - - drive = GoogleDriveHelper(name, path, listener) - async with download_dict_lock: - download_dict[listener.uid] = GdriveStatus( - drive, size, listener.message, gid, "dl" - ) + if listener.is_cancelled: + return - async with queue_dict_lock: - non_queued_dl.add(listener.uid) + drive = GoogleDriveDownload(listener, path) + async with task_dict_lock: + task_dict[listener.mid] = GoogleDriveStatus(listener, drive, gid, "dl") - if from_queue: - LOGGER.info(f"Start Queued Download from GDrive: {name}") + if add_to_queue: + LOGGER.info(f"Start Queued Download from GDrive: {listener.name}") else: - LOGGER.info(f"Download from GDrive: {name}") + LOGGER.info(f"Download from GDrive: {listener.name}") await listener.on_download_start() - await sendStatusMessage(listener.message) + if listener.multi <= 1: + await send_status_message(listener.message) - await sync_to_async(drive.download, link) + await sync_to_async(drive.download) diff --git a/bot/helper/mirror_leech_utils/download_utils/mega_download.py b/bot/helper/mirror_leech_utils/download_utils/mega_download.py index 95ff4c840..48d3fa1a3 100644 --- a/bot/helper/mirror_leech_utils/download_utils/mega_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/mega_download.py @@ -1,225 +1,117 @@ -# ruff: noqa: ARG002 -from asyncio import Event from secrets import token_hex -from mega import MegaApi, MegaError, MegaRequest, MegaListener, MegaTransfer from aiofiles.os import makedirs +from mega import MegaApi from bot import ( LOGGER, - config_dict, - download_dict, non_queued_dl, queue_dict_lock, - download_dict_lock, -) -from bot.helper.ext_utils.bot_utils import ( - async_to_sync, - sync_to_async, - get_mega_link_type, + task_dict, + task_dict_lock, ) +from bot.core.config_manager import Config +from bot.helper.ext_utils.bot_utils import sync_to_async +from bot.helper.ext_utils.links_utils import get_mega_link_type from bot.helper.ext_utils.task_manager import ( - is_queued, - limit_checker, + check_running_tasks, stop_duplicate_check, ) -from bot.helper.telegram_helper.message_utils import send_message, sendStatusMessage +from bot.helper.listeners.mega_listener import ( + AsyncExecutor, + MegaAppListener, + mega_login, + mega_logout, +) from bot.helper.mirror_leech_utils.status_utils.mega_status import MegaDownloadStatus from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus +from bot.helper.telegram_helper.message_utils import ( + auto_delete_message, + delete_links, + send_message, + send_status_message, +) -class MegaAppListener(MegaListener): - _NO_EVENT_ON = (MegaRequest.TYPE_LOGIN, MegaRequest.TYPE_FETCH_NODES) - NO_ERROR = "no error" - - def __init__(self, continue_event: Event, listener): - self.continue_event = continue_event - self.node = None - self.public_node = None - self.listener = listener - self.is_cancelled = False - self.error = None - self.__bytes_transferred = 0 - self.__speed = 0 - self.__name = "" - super().__init__() - - @property - def speed(self): - return self.__speed - - @property - def downloaded_bytes(self): - return self.__bytes_transferred - - def onRequestFinish(self, api, request, error): - if str(error).lower() != "no error": - self.error = error.copy() - LOGGER.error(f"Mega onRequestFinishError: {self.error}") - self.continue_event.set() - return - request_type = request.getType() - if request_type == MegaRequest.TYPE_LOGIN: - api.fetchNodes() - elif request_type == MegaRequest.TYPE_GET_PUBLIC_NODE: - self.public_node = request.getPublicMegaNode() - self.__name = self.public_node.getName() - elif request_type == MegaRequest.TYPE_FETCH_NODES: - LOGGER.info("Fetching Root Node.") - self.node = api.getRootNode() - self.__name = self.node.getName() - LOGGER.info(f"Node Name: {self.node.getName()}") - if ( - request_type not in self._NO_EVENT_ON - or self.node - and "cloud drive" not in self.__name.lower() - ): - self.continue_event.set() - - def onRequestTemporaryError(self, _, request, error: MegaError): - LOGGER.error(f"Mega Request error in {error}") - if not self.is_cancelled: - self.is_cancelled = True - async_to_sync( - self.listener.onDownloadError, - f"RequestTempError: {error.toString()}", - ) - self.error = error.toString() - self.continue_event.set() - - def onTransferUpdate(self, api: MegaApi, transfer: MegaTransfer): - if self.is_cancelled: - api.cancelTransfer(transfer, None) - self.continue_event.set() - return - self.__speed = transfer.getSpeed() - self.__bytes_transferred = transfer.getTransferredBytes() - - def onTransferFinish(self, api: MegaApi, transfer: MegaTransfer, error): - try: - if self.is_cancelled: - self.continue_event.set() - elif transfer.isFinished() and ( - transfer.isFolderTransfer() or transfer.getFileName() == self.__name - ): - async_to_sync(self.listener.on_download_complete) - self.continue_event.set() - except Exception as e: - LOGGER.error(e) - - def onTransferTemporaryError(self, api, transfer, error): - filen = transfer.getFileName() - state = transfer.getState() - errStr = error.toString() - LOGGER.error(f"Mega download error in file {transfer} {filen}: {error}") - if state in [1, 4]: - return - - self.error = errStr - if not self.is_cancelled: - self.is_cancelled = True - async_to_sync( - self.listener.onDownloadError, - f"TransferTempError: {errStr} ({filen})", - ) - self.continue_event.set() - - async def cancel_download(self): - self.is_cancelled = True - await self.listener.onDownloadError("Download Canceled by user") - - -class AsyncExecutor: - def __init__(self): - self.continue_event = Event() - - async def do(self, function, args): - self.continue_event.clear() - await sync_to_async(function, *args) - await self.continue_event.wait() - - -async def add_mega_download(mega_link, path, listener, name): - MEGA_EMAIL = config_dict["MEGA_EMAIL"] - MEGA_PASSWORD = config_dict["MEGA_PASSWORD"] +async def add_mega_download(listener, path): + email = Config.MEGA_EMAIL + password = Config.MEGA_PASSWORD executor = AsyncExecutor() - api = MegaApi(None, None, None, "aeon") + api = MegaApi(None, None, None, "Aeon") folder_api = None mega_listener = MegaAppListener(executor.continue_event, listener) api.addListener(mega_listener) - if MEGA_EMAIL and MEGA_PASSWORD: - await executor.do(api.login, (MEGA_EMAIL, MEGA_PASSWORD)) + await mega_login(executor, api, email, password) - if get_mega_link_type(mega_link) == "file": - await executor.do(api.getPublicNode, (mega_link,)) + if get_mega_link_type(listener.link) == "file": + await sync_to_async(executor.do, api.getPublicNode, (listener.link,)) node = mega_listener.public_node else: - folder_api = MegaApi(None, None, None, "aeon") + folder_api = MegaApi(None, None, None, "Aeon") folder_api.addListener(mega_listener) - await executor.do(folder_api.loginToFolder, (mega_link,)) + await sync_to_async(executor.do, folder_api.loginToFolder, (listener.link,)) node = await sync_to_async(folder_api.authorizeNode, mega_listener.node) - if mega_listener.error is not None: - await send_message(listener.message, str(mega_listener.error)) - await executor.do(api.logout, ()) - if folder_api is not None: - await executor.do(folder_api.logout, ()) + + if mega_listener.error: + mmsg = await send_message(listener.message, str(mega_listener.error)) + await mega_logout(executor, api, folder_api) + await delete_links(listener.message) + await auto_delete_message(listener.message, mmsg) return - name = name or node.getName() - msg, button = await stop_duplicate_check(name, listener) + listener.name = listener.name or node.getName() + msg, button = await stop_duplicate_check(listener) if msg: - await send_message(listener.message, msg, button) - await executor.do(api.logout, ()) - if folder_api is not None: - await executor.do(folder_api.logout, ()) + mmsg = await send_message(listener.message, msg, button) + await mega_logout(executor, api, folder_api) + await delete_links(listener.message) + await auto_delete_message(listener.message, mmsg) return gid = token_hex(4) - size = api.getSize(node) - if limit_exceeded := await limit_checker(size, listener, is_mega=True): - await listener.onDownloadError(limit_exceeded) - return + listener.size = api.getSize(node) - added_to_queue, event = await is_queued(listener.uid) + added_to_queue, event = await check_running_tasks(listener) if added_to_queue: - LOGGER.info(f"Added to Queue/Download: {name}") - async with download_dict_lock: - download_dict[listener.uid] = QueueStatus( - name, size, gid, listener, "Dl" - ) + LOGGER.info(f"Added to Queue/Download: {listener.name}") + async with task_dict_lock: + task_dict[listener.mid] = QueueStatus(listener, gid, "Dl") await listener.on_download_start() - await sendStatusMessage(listener.message) + await send_status_message(listener.message) await event.wait() - async with download_dict_lock: - if listener.uid not in download_dict: - await executor.do(api.logout, ()) - if folder_api is not None: - await executor.do(folder_api.logout, ()) + async with task_dict_lock: + if listener.mid not in task_dict: + await mega_logout(executor, api, folder_api) return from_queue = True - LOGGER.info(f"Start Queued Download from Mega: {name}") + LOGGER.info(f"Start Queued Download from Mega: {listener.name}") else: from_queue = False - async with download_dict_lock: - download_dict[listener.uid] = MegaDownloadStatus( - name, size, gid, mega_listener, listener.message + async with task_dict_lock: + task_dict[listener.mid] = MegaDownloadStatus( + listener, + mega_listener, + gid, + "dl", ) async with queue_dict_lock: - non_queued_dl.add(listener.uid) + non_queued_dl.add(listener.mid) if from_queue: - LOGGER.info(f"Start Queued Download from Mega: {name}") + LOGGER.info(f"Start Queued Download from Mega: {listener.name}") else: await listener.on_download_start() - await sendStatusMessage(listener.message) - LOGGER.info(f"Download from Mega: {name}") + await send_status_message(listener.message) + LOGGER.info(f"Download from Mega: {listener.name}") await makedirs(path, exist_ok=True) - await executor.do(api.startDownload, (node, path, name, None, False, None)) - await executor.do(api.logout, ()) - if folder_api is not None: - await executor.do(folder_api.logout, ()) + await sync_to_async( + executor.do, + api.startDownload, + (node, path, listener.name, None, False, None), + ) + await mega_logout(executor, api, folder_api) diff --git a/bot/helper/mirror_leech_utils/download_utils/qbit_download.py b/bot/helper/mirror_leech_utils/download_utils/qbit_download.py index 2aaa762d2..352c87ca8 100644 --- a/bot/helper/mirror_leech_utils/download_utils/qbit_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/qbit_download.py @@ -1,96 +1,88 @@ -from time import time +from asyncio import sleep from aiofiles.os import path as aiopath -from aiofiles.os import remove as aioremove +from aiofiles.os import remove -from bot import ( - LOGGER, - config_dict, - xnox_client, - download_dict, - non_queued_dl, - queue_dict_lock, - download_dict_lock, -) -from bot.helper.ext_utils.bot_utils import sync_to_async, bt_selection_buttons -from bot.helper.ext_utils.task_manager import is_queued +from bot import LOGGER, task_dict, task_dict_lock, xnox_client +from bot.core.config_manager import Config +from bot.helper.ext_utils.bot_utils import bt_selection_buttons, sync_to_async +from bot.helper.ext_utils.task_manager import check_running_tasks from bot.helper.listeners.qbit_listener import on_download_start +from bot.helper.mirror_leech_utils.status_utils.qbit_status import QbittorrentStatus from bot.helper.telegram_helper.message_utils import ( - send_message, delete_message, - sendStatusMessage, + send_message, + send_status_message, ) -from bot.helper.mirror_leech_utils.status_utils.qbit_status import QbittorrentStatus -async def add_qb_torrent(link, path, listener, ratio, seed_time): - ADD_TIME = time() +async def add_qb_torrent(listener, path, ratio, seed_time): try: - url = link + url = listener.link tpath = None - if await aiopath.exists(link): + if await aiopath.exists(listener.link): url = None - tpath = link - added_to_queue, event = await is_queued(listener.uid) + tpath = listener.link + add_to_queue, event = await check_running_tasks(listener) op = await sync_to_async( xnox_client.torrents_add, url, tpath, path, - is_paused=added_to_queue, - tags=f"{listener.uid}", + is_paused=add_to_queue, + tags=f"{listener.mid}", ratio_limit=ratio, seeding_time_limit=seed_time, - headers={"user-agent": "Wget/1.12"}, ) if op.lower() == "ok.": tor_info = await sync_to_async( - xnox_client.torrents_info, tag=f"{listener.uid}" + xnox_client.torrents_info, + tag=f"{listener.mid}", ) if len(tor_info) == 0: while True: + if add_to_queue and event.is_set(): + add_to_queue = False tor_info = await sync_to_async( - xnox_client.torrents_info, tag=f"{listener.uid}" + xnox_client.torrents_info, + tag=f"{listener.mid}", ) if len(tor_info) > 0: break - if time() - ADD_TIME >= 120: - await listener.onDownloadError( - "Not added! Check if the link is valid or not. If it's torrent file then report, this happens if torrent file size above 10mb." - ) - return + await sleep(1) tor_info = tor_info[0] + listener.name = tor_info.name ext_hash = tor_info.hash else: - await listener.onDownloadError( - "This Torrent already added or unsupported/invalid link/file." + await listener.on_download_error( + "This Torrent already added or unsupported/invalid link/file.", ) return - async with download_dict_lock: - download_dict[listener.uid] = QbittorrentStatus( - listener, queued=added_to_queue + async with task_dict_lock: + task_dict[listener.mid] = QbittorrentStatus( + listener, + queued=add_to_queue, ) - await on_download_start(f"{listener.uid}") + await on_download_start(f"{listener.mid}") - if added_to_queue: + if add_to_queue: LOGGER.info( - f"Added to Queue/Download: {tor_info.name} - Hash: {ext_hash}" + f"Added to Queue/Download: {tor_info.name} - Hash: {ext_hash}", ) else: - async with queue_dict_lock: - non_queued_dl.add(listener.uid) LOGGER.info(f"QbitDownload started: {tor_info.name} - Hash: {ext_hash}") await listener.on_download_start() - if config_dict["BASE_URL"] and listener.select: - if link.startswith("magnet:"): + if Config.BASE_URL and listener.select: + if listener.link.startswith("magnet:"): metamsg = "Downloading Metadata, wait then you can select files. Use torrent file to avoid this wait." meta = await send_message(listener.message, metamsg) while True: tor_info = await sync_to_async( - xnox_client.torrents_info, tag=f"{listener.uid}" + xnox_client.torrents_info, + tag=f"{listener.mid}", ) if len(tor_info) == 0: await delete_message(meta) @@ -100,7 +92,7 @@ async def add_qb_torrent(link, path, listener, ratio, seed_time): if tor_info.state not in [ "metaDL", "checkingResumeData", - "pausedDL", + "stoppedDL", ]: await delete_message(meta) break @@ -109,33 +101,35 @@ async def add_qb_torrent(link, path, listener, ratio, seed_time): return ext_hash = tor_info.hash - if not added_to_queue: + if not add_to_queue: await sync_to_async( - xnox_client.torrents_pause, torrent_hashes=ext_hash + xnox_client.torrents_stop, + torrent_hashes=ext_hash, ) - s_buttons = bt_selection_buttons(ext_hash) + SBUTTONS = bt_selection_buttons(ext_hash) msg = "Your download paused. Choose files then press Done Selecting button to start downloading." - await send_message(listener.message, msg, s_buttons) - else: - await sendStatusMessage(listener.message) - - if added_to_queue: - await event.wait() + await send_message(listener.message, msg, SBUTTONS) + elif listener.multi <= 1: + await send_status_message(listener.message) - async with download_dict_lock: - if listener.uid not in download_dict: + if event is not None: + if not event.is_set(): + await event.wait() + if listener.is_cancelled: return - download_dict[listener.uid].queued = False - - await sync_to_async(xnox_client.torrents_resume, torrent_hashes=ext_hash) - LOGGER.info( - f"Start Queued Download from Qbittorrent: {tor_info.name} - Hash: {ext_hash}" + async with task_dict_lock: + task_dict[listener.mid].queued = False + LOGGER.info( + f"Start Queued Download from Qbittorrent: {tor_info.name} - Hash: {ext_hash}", + ) + await on_download_start(f"{listener.mid}") + await sync_to_async( + xnox_client.torrents_start, + torrent_hashes=ext_hash, ) - async with queue_dict_lock: - non_queued_dl.add(listener.uid) except Exception as e: - await send_message(listener.message, str(e)) + await listener.on_download_error(f"{e}") finally: - if await aiopath.exists(link): - await aioremove(link) + if tpath and await aiopath.exists(tpath): + await remove(tpath) diff --git a/bot/helper/mirror_leech_utils/download_utils/rclone_download.py b/bot/helper/mirror_leech_utils/download_utils/rclone_download.py index 03d185322..15c7f3353 100644 --- a/bot/helper/mirror_leech_utils/download_utils/rclone_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/rclone_download.py @@ -1,28 +1,39 @@ -from json import loads from asyncio import gather +from json import loads from secrets import token_hex -from bot import ( - LOGGER, - download_dict, - non_queued_dl, - queue_dict_lock, - download_dict_lock, -) +from aiofiles.os import remove + +from bot import LOGGER, task_dict, task_dict_lock from bot.helper.ext_utils.bot_utils import cmd_exec -from bot.helper.ext_utils.task_manager import is_queued, stop_duplicate_check -from bot.helper.telegram_helper.message_utils import send_message, sendStatusMessage +from bot.helper.ext_utils.task_manager import ( + check_running_tasks, + stop_duplicate_check, +) from bot.helper.mirror_leech_utils.rclone_utils.transfer import RcloneTransferHelper from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus from bot.helper.mirror_leech_utils.status_utils.rclone_status import RcloneStatus +from bot.helper.telegram_helper.message_utils import send_status_message -async def add_rclone_download(rc_path, config_path, path, name, listener): - remote, rc_path = rc_path.split(":", 1) - rc_path = rc_path.strip("/") +async def add_rclone_download(listener, path): + if listener.link.startswith("mrcc:"): + listener.link = listener.link.split("mrcc:", 1)[1] + config_path = f"rclone/{listener.user_id}.conf" + else: + config_path = "rclone.conf" + + remote, listener.link = listener.link.split(":", 1) + listener.link = listener.link.strip("/") + rclone_select = False + if listener.link.startswith("rclone_select"): + rclone_select = True + rpath = "" + else: + rpath = listener.link cmd1 = [ - "rclone", + "xone", "lsjson", "--fast-list", "--stat", @@ -30,74 +41,108 @@ async def add_rclone_download(rc_path, config_path, path, name, listener): "--no-modtime", "--config", config_path, - f"{remote}:{rc_path}", + f"{remote}:{rpath}", + "-v", + "--log-systemd", + "--log-file", + "rlog.txt", ] cmd2 = [ - "rclone", + "xone", "size", "--fast-list", "--json", "--config", config_path, - f"{remote}:{rc_path}", + f"{remote}:{rpath}", + "-v", + "--log-systemd", + "--log-file", + "rlog.txt", ] - res1, res2 = await gather(cmd_exec(cmd1), cmd_exec(cmd2)) - if res1[2] != res2[2] != 0: - if res1[2] != -9: - err = res1[1] or res2[1] - msg = f"Error: While getting rclone stat/size. Path: {remote}:{rc_path}. Stderr: {err[:4000]}" - await send_message(listener.message, msg) - return - try: - rstat = loads(res1[0]) - rsize = loads(res2[0]) - except Exception as err: - await send_message(listener.message, f"RcloneDownload JsonLoad: {err}") - return - if rstat["IsDir"]: - if not name: - name = rc_path.rsplit("/", 1)[-1] if rc_path else remote - path += name + if rclone_select: + cmd2.extend(("--files-from", listener.link)) + res = await cmd_exec(cmd2) + if res[2] != 0: + if res[2] != -9: + err = ( + res[1] + or "Use /shell cat rlog.txt to see more information" + ) + msg = f"Error: While getting rclone stat/size. Path: {remote}:{listener.link}. Stderr: {err[:4000]}" + await listener.on_download_error(msg) + return + try: + rsize = loads(res[0]) + except Exception as err: + if not str(err): + err = "Use /shell cat rlog.txt to see more information" + await listener.on_download_error(f"RcloneDownload JsonLoad: {err}") + return + if not listener.name: + listener.name = listener.link + path += listener.name else: - name = rc_path.rsplit("/", 1)[-1] - size = rsize["bytes"] + res1, res2 = await gather(cmd_exec(cmd1), cmd_exec(cmd2)) + if res1[2] != res2[2] != 0: + if res1[2] != -9: + err = ( + res1[1] + or res2[1] + or "Use /shell cat rlog.txt to see more information" + ) + msg = f"Error: While getting rclone stat/size. Path: {remote}:{listener.link}. Stderr: {err[:4000]}" + await listener.on_download_error(msg) + return + try: + rstat = loads(res1[0]) + rsize = loads(res2[0]) + except Exception as err: + if not str(err): + err = "Use /shell cat rlog.txt to see more information" + await listener.on_download_error(f"RcloneDownload JsonLoad: {err}") + return + if rstat["IsDir"]: + if not listener.name: + listener.name = ( + listener.link.rsplit("/", 1)[-1] if listener.link else remote + ) + path += listener.name + else: + listener.name = listener.link.rsplit("/", 1)[-1] + listener.size = rsize["bytes"] gid = token_hex(4) - msg, button = await stop_duplicate_check(name, listener) - if msg: - await send_message(listener.message, msg, button) - return + if not rclone_select: + msg, button = await stop_duplicate_check(listener) + if msg: + await listener.on_download_error(msg, button) + return - added_to_queue, event = await is_queued(listener.uid) - if added_to_queue: - LOGGER.info(f"Added to Queue/Download: {name}") - async with download_dict_lock: - download_dict[listener.uid] = QueueStatus( - name, size, gid, listener, "dl" - ) + add_to_queue, event = await check_running_tasks(listener) + if add_to_queue: + LOGGER.info(f"Added to Queue/Download: {listener.name}") + async with task_dict_lock: + task_dict[listener.mid] = QueueStatus(listener, gid, "dl") await listener.on_download_start() - await sendStatusMessage(listener.message) + if listener.multi <= 1: + await send_status_message(listener.message) await event.wait() - async with download_dict_lock: - if listener.uid not in download_dict: - return - from_queue = True - else: - from_queue = False + if listener.is_cancelled: + return - RCTransfer = RcloneTransferHelper(listener, name) - async with download_dict_lock: - download_dict[listener.uid] = RcloneStatus( - RCTransfer, listener.message, gid, "dl" - ) - async with queue_dict_lock: - non_queued_dl.add(listener.uid) + RCTransfer = RcloneTransferHelper(listener) + async with task_dict_lock: + task_dict[listener.mid] = RcloneStatus(listener, RCTransfer, gid, "dl") - if from_queue: - LOGGER.info(f"Start Queued Download with rclone: {rc_path}") + if add_to_queue: + LOGGER.info(f"Start Queued Download with rclone: {listener.link}") else: await listener.on_download_start() - await sendStatusMessage(listener.message) - LOGGER.info(f"Download with rclone: {rc_path}") + if listener.multi <= 1: + await send_status_message(listener.message) + LOGGER.info(f"Download with rclone: {listener.link}") - await RCTransfer.download(remote, rc_path, config_path, path) + await RCTransfer.download(remote, config_path, path) + if rclone_select: + await remove(listener.link) diff --git a/bot/helper/mirror_leech_utils/download_utils/telegram_download.py b/bot/helper/mirror_leech_utils/download_utils/telegram_download.py index 966225e9e..38963aa3a 100644 --- a/bot/helper/mirror_leech_utils/download_utils/telegram_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/telegram_download.py @@ -1,118 +1,103 @@ -import contextlib +from asyncio import Lock, sleep from time import time -from asyncio import Lock -from logging import ERROR, getLogger -from secrets import token_hex - -from bot import ( - LOGGER, - IS_PREMIUM_USER, - bot, - user, - download_dict, - non_queued_dl, - queue_dict_lock, - download_dict_lock, -) + +from pyrogram.errors import FloodPremiumWait, FloodWait + +from bot import LOGGER, task_dict, task_dict_lock +from bot.core.aeon_client import TgClient from bot.helper.ext_utils.task_manager import ( - is_queued, - limit_checker, + check_running_tasks, stop_duplicate_check, ) -from bot.helper.telegram_helper.message_utils import ( - delete_links, - send_message, - sendStatusMessage, -) from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus from bot.helper.mirror_leech_utils.status_utils.telegram_status import TelegramStatus +from bot.helper.telegram_helper.message_utils import send_status_message global_lock = Lock() GLOBAL_GID = set() -getLogger("pyrogram").setLevel(ERROR) class TelegramDownloadHelper: def __init__(self, listener): - self.name = "" - self.__processed_bytes = 0 - self.__start_time = time() - self.__listener = listener - self.__id = "" - self.__is_cancelled = False + self._processed_bytes = 0 + self._start_time = time() + self._listener = listener + self._id = "" + self.session = "" @property def speed(self): - return self.__processed_bytes / (time() - self.__start_time) + return self._processed_bytes / (time() - self._start_time) @property def processed_bytes(self): - return self.__processed_bytes + return self._processed_bytes - async def __on_download_start(self, name, size, file_id, from_queue): + async def _on_download_start(self, file_id, from_queue): async with global_lock: GLOBAL_GID.add(file_id) - self.name = name - self.__id = file_id - gid = token_hex(4) - async with download_dict_lock: - download_dict[self.__listener.uid] = TelegramStatus( - self, size, self.__listener.message, gid, "dl" + self._id = file_id + async with task_dict_lock: + task_dict[self._listener.mid] = TelegramStatus( + self._listener, + self, + file_id[:12], + "dl", ) - async with queue_dict_lock: - non_queued_dl.add(self.__listener.uid) if not from_queue: - await self.__listener.on_download_start() - await sendStatusMessage(self.__listener.message) - LOGGER.info(f"Download from Telegram: {name}") + await self._listener.on_download_start() + if self._listener.multi <= 1: + await send_status_message(self._listener.message) + LOGGER.info(f"Download from Telegram: {self._listener.name}") else: - LOGGER.info(f"Start Queued Download from Telegram: {name}") + LOGGER.info( + f"Start Queued Download from Telegram: {self._listener.name}", + ) - async def __onDownloadProgress(self, current, _): - if self.__is_cancelled: - if IS_PREMIUM_USER: - user.stop_transmission() - else: - bot.stop_transmission() - self.__processed_bytes = current + async def _on_download_progress(self, current, _): + if self._listener.is_cancelled: + self.session.stop_transmission() + self._processed_bytes = current - async def __on_download_error(self, error): + async def _on_download_error(self, error): async with global_lock: - with contextlib.suppress(Exception): - GLOBAL_GID.remove(self.__id) - await self.__listener.onDownloadError(error) + if self._id in GLOBAL_GID: + GLOBAL_GID.remove(self._id) + await self._listener.on_download_error(error) - async def __on_download_complete(self): - await self.__listener.on_download_complete() + async def _on_download_complete(self): + await self._listener.on_download_complete() async with global_lock: - GLOBAL_GID.remove(self.__id) + GLOBAL_GID.remove(self._id) - async def __download(self, message, path): + async def _download(self, message, path): try: download = await message.download( - file_name=path, progress=self.__onDownloadProgress + file_name=path, + progress=self._on_download_progress, ) - if self.__is_cancelled: - await self.__on_download_error("Cancelled by user!") + if self._listener.is_cancelled: return + except (FloodWait, FloodPremiumWait) as f: + LOGGER.warning(str(f)) + await sleep(f.value) + await self._download(message, path) + return except Exception as e: LOGGER.error(str(e)) - await self.__on_download_error(str(e)) + await self._on_download_error(str(e)) return if download is not None: - await self.__on_download_complete() - elif not self.__is_cancelled: - await self.__on_download_error("Internal error occurred") - - async def add_download(self, message, path, filename, session): - if session == "user": - if not self.__listener.isSuperGroup: - await send_message( - message, "Use SuperGroup to download this Link with User!" - ) - return - message = await user.get_messages( - chat_id=message.chat.id, message_ids=message.id + await self._on_download_complete() + elif not self._listener.is_cancelled: + await self._on_download_error("Internal error occurred") + + async def add_download(self, message, path, session): + self.session = session + if self.session != TgClient.bot: + message = await self.session.get_messages( + chat_id=message.chat.id, + message_ids=message.id, ) media = ( @@ -132,50 +117,51 @@ async def add_download(self, message, path, filename, session): download = media.file_unique_id not in GLOBAL_GID if download: - if filename == "": - name = media.file_name if hasattr(media, "file_name") else "None" + if self._listener.name == "": + self._listener.name = ( + media.file_name if hasattr(media, "file_name") else "None" + ) else: - name = filename - path = path + name - size = media.file_size + path = path + self._listener.name + self._listener.size = media.file_size gid = media.file_unique_id - msg, button = await stop_duplicate_check(name, self.__listener) + msg, button = await stop_duplicate_check(self._listener) if msg: - await send_message(self.__listener.message, msg, button) - await delete_links(self.__listener.message) - return - if limit_exceeded := await limit_checker(size, self.__listener): - await self.__listener.onDownloadError(limit_exceeded) - await delete_links(self.__listener.message) + await self._listener.on_download_error(msg, button) return - added_to_queue, event = await is_queued(self.__listener.uid) - if added_to_queue: - LOGGER.info(f"Added to Queue/Download: {name}") - async with download_dict_lock: - download_dict[self.__listener.uid] = QueueStatus( - name, size, gid, self.__listener, "dl" + + add_to_queue, event = await check_running_tasks(self._listener) + if add_to_queue: + LOGGER.info(f"Added to Queue/Download: {self._listener.name}") + async with task_dict_lock: + task_dict[self._listener.mid] = QueueStatus( + self._listener, + gid, + "dl", ) - await self.__listener.on_download_start() - await sendStatusMessage(self.__listener.message) + await self._listener.on_download_start() + if self._listener.multi <= 1: + await send_status_message(self._listener.message) await event.wait() - async with download_dict_lock: - if self.__listener.uid not in download_dict: - return - from_queue = True - else: - from_queue = False - await self.__on_download_start(name, size, gid, from_queue) - await self.__download(message, path) + if self._listener.is_cancelled: + async with global_lock: + if self._id in GLOBAL_GID: + GLOBAL_GID.remove(self._id) + return + + await self._on_download_start(gid, add_to_queue) + await self._download(message, path) else: - await self.__on_download_error("File already being downloaded!") + await self._on_download_error("File already being downloaded!") else: - await self.__on_download_error( - "No valid media type in the replied message" + await self._on_download_error( + "No document in the replied message! Use SuperGroup incase you are trying to download with User session!", ) - async def cancel_download(self): - self.__is_cancelled = True + async def cancel_task(self): + self._listener.is_cancelled = True LOGGER.info( - f"Cancelling download via User: [ Name: {self.name} ID: {self.__id} ]" + f"Cancelling download on user request: name: {self._listener.name} id: {self._id}", ) + await self._on_download_error("Stopped by user!") diff --git a/bot/helper/mirror_leech_utils/download_utils/yt_dlp_download.py b/bot/helper/mirror_leech_utils/download_utils/yt_dlp_download.py index fe71b6379..2c6f2e0a1 100644 --- a/bot/helper/mirror_leech_utils/download_utils/yt_dlp_download.py +++ b/bot/helper/mirror_leech_utils/download_utils/yt_dlp_download.py @@ -1,41 +1,44 @@ +# ruff: noqa: ARG005, B023 import contextlib -from os import path as ospath +from logging import getLogger from os import listdir +from os import path as ospath from re import search as re_search -from logging import getLogger from secrets import token_hex -from yt_dlp import YoutubeDL, DownloadError +from yt_dlp import DownloadError, YoutubeDL -from bot import download_dict, non_queued_dl, queue_dict_lock, download_dict_lock +from bot import task_dict, task_dict_lock from bot.helper.ext_utils.bot_utils import async_to_sync, sync_to_async from bot.helper.ext_utils.task_manager import ( - is_queued, - limit_checker, + check_running_tasks, stop_duplicate_check, ) -from bot.helper.telegram_helper.message_utils import sendStatusMessage from bot.helper.mirror_leech_utils.status_utils.queue_status import QueueStatus -from bot.helper.mirror_leech_utils.status_utils.ytdlp_status import ( - YtDlpDownloadStatus, -) +from bot.helper.mirror_leech_utils.status_utils.yt_dlp_status import YtDlpStatus +from bot.helper.telegram_helper.message_utils import send_status_message LOGGER = getLogger(__name__) class MyLogger: - def __init__(self, obj): - self.obj = obj + def __init__(self, obj, listener): + self._obj = obj + self._listener = listener def debug(self, msg): - if not self.obj.is_playlist and ( - match := re_search(r".Merger..Merging formats into..(.*?).$", msg) + # Hack to fix changing extension + if not self._obj.is_playlist and ( + match := re_search( + r".Merger..Merging formats into..(.*?).$", + msg, + ) or re_search(r".ExtractAudio..Destination..(.*?)$", msg) ): LOGGER.info(msg) newname = match.group(1) newname = newname.rsplit("/", 1)[-1] - self.obj.name = newname + self._listener.name = newname @staticmethod def warning(msg): @@ -49,23 +52,18 @@ def error(msg): class YoutubeDLHelper: def __init__(self, listener): - self.__last_downloaded = 0 - self.__size = 0 - self.__progress = 0 - self.__downloaded_bytes = 0 - self.__download_speed = 0 - self.__eta = "-" - self.__listener = listener - self.__gid = "" - self.__is_cancelled = False - self.__downloading = False - self.__ext = "" - self.name = "" + self._last_downloaded = 0 + self._progress = 0 + self._downloaded_bytes = 0 + self._download_speed = 0 + self._eta = "-" + self._listener = listener + self._gid = "" + self._ext = "" self.is_playlist = False - self.playlist_count = 0 self.opts = { - "progress_hooks": [self.__onDownloadProgress], - "logger": MyLogger(self), + "progress_hooks": [self._on_download_progress], + "logger": MyLogger(self, self._listener), "usenetrc": True, "cookiefile": "cookies.txt", "allow_multiple_video_streams": True, @@ -77,143 +75,139 @@ def __init__(self, listener): "trim_file_name": 220, "ffmpeg_location": "/bin/xtra", "retry_sleep_functions": { - "http": lambda _: 3, - "fragment": lambda _: 3, - "file_access": lambda _: 3, - "extractor": lambda _: 3, + "http": lambda n: 3, + "fragment": lambda n: 3, + "file_access": lambda n: 3, + "extractor": lambda n: 3, }, } @property def download_speed(self): - return self.__download_speed + return self._download_speed @property def downloaded_bytes(self): - return self.__downloaded_bytes + return self._downloaded_bytes @property def size(self): - return self.__size + return self._listener.size @property def progress(self): - return self.__progress + return self._progress @property def eta(self): - return self.__eta + return self._eta - def __onDownloadProgress(self, d): - self.__downloading = True - if self.__is_cancelled: + def _on_download_progress(self, d): + if self._listener.is_cancelled: raise ValueError("Cancelling...") if d["status"] == "finished": if self.is_playlist: - self.__last_downloaded = 0 + self._last_downloaded = 0 elif d["status"] == "downloading": - self.__download_speed = d["speed"] + self._download_speed = d["speed"] if self.is_playlist: downloadedBytes = d["downloaded_bytes"] - chunk_size = downloadedBytes - self.__last_downloaded - self.__last_downloaded = downloadedBytes - self.__downloaded_bytes += chunk_size + chunk_size = downloadedBytes - self._last_downloaded + self._last_downloaded = downloadedBytes + self._downloaded_bytes += chunk_size else: if d.get("total_bytes"): - self.__size = d["total_bytes"] + self._listener.size = d["total_bytes"] elif d.get("total_bytes_estimate"): - self.__size = d["total_bytes_estimate"] - self.__downloaded_bytes = d["downloaded_bytes"] - self.__eta = d.get("eta", "-") or "-" + self._listener.size = d["total_bytes_estimate"] + self._downloaded_bytes = d["downloaded_bytes"] + self._eta = d.get("eta", "-") or "-" with contextlib.suppress(Exception): - self.__progress = (self.__downloaded_bytes / self.__size) * 100 - - async def __on_download_start(self, from_queue=False): - async with download_dict_lock: - download_dict[self.__listener.uid] = YtDlpDownloadStatus( - self, self.__listener, self.__gid + self._progress = (self._downloaded_bytes / self._listener.size) * 100 + + async def _on_download_start(self, from_queue=False): + async with task_dict_lock: + task_dict[self._listener.mid] = YtDlpStatus( + self._listener, + self, + self._gid, ) if not from_queue: - await self.__listener.on_download_start() - await sendStatusMessage(self.__listener.message) + await self._listener.on_download_start() + if self._listener.multi <= 1: + await send_status_message(self._listener.message) - def __on_download_error(self, error): - self.__is_cancelled = True - async_to_sync(self.__listener.onDownloadError, error) + def _on_download_error(self, error): + self._listener.is_cancelled = True + async_to_sync(self._listener.on_download_error, error) - def extractMetaData(self, link, name): - if link.startswith(("rtmp", "mms", "rstp", "rtmps")): - self.opts["external_downloader"] = "ffmpeg" + def _extract_meta_data(self): + if self._listener.link.startswith(("rtmp", "mms", "rstp", "rtmps")): + self.opts["external_downloader"] = "xtra" with YoutubeDL(self.opts) as ydl: try: - result = ydl.extract_info(link, download=False) + result = ydl.extract_info(self._listener.link, download=False) if result is None: raise ValueError("Info result is None") except Exception as e: - return self.__on_download_error(str(e)) - if self.is_playlist: - self.playlist_count = result.get("playlist_count", 0) + return self._on_download_error(str(e)) if "entries" in result: - self.name = name for entry in result["entries"]: if not entry: continue if "filesize_approx" in entry: - self.__size += entry["filesize_approx"] + self._listener.size += entry.get("filesize_approx", 0) elif "filesize" in entry: - self.__size += entry["filesize"] - if not self.name: + self._listener.size += entry.get("filesize", 0) + if not self._listener.name: outtmpl_ = "%(series,playlist_title,channel)s%(season_number& |)s%(season_number&S|)s%(season_number|)02d.%(ext)s" - self.name, ext = ospath.splitext( - ydl.prepare_filename(entry, outtmpl=outtmpl_) + self._listener.name, ext = ospath.splitext( + ydl.prepare_filename(entry, outtmpl=outtmpl_), ) - if not self.__ext: - self.__ext = ext + if not self._ext: + self._ext = ext return None outtmpl_ = "%(title,fulltitle,alt_title)s%(season_number& |)s%(season_number&S|)s%(season_number|)02d%(episode_number&E|)s%(episode_number|)02d%(height& |)s%(height|)s%(height&p|)s%(fps|)s%(fps&fps|)s%(tbr& |)s%(tbr|)d.%(ext)s" realName = ydl.prepare_filename(result, outtmpl=outtmpl_) ext = ospath.splitext(realName)[-1] - self.name = f"{name}{ext}" if name else realName - if not self.__ext: - self.__ext = ext - if result.get("filesize"): - self.__size = result["filesize"] - return None - if result.get("filesize_approx"): - self.__size = result["filesize_approx"] + self._listener.name = ( + f"{self._listener.name}{ext}" if self._listener.name else realName + ) + if not self._ext: + self._ext = ext return None return None - def __download(self, link, path): + def _download(self, path): try: with YoutubeDL(self.opts) as ydl: try: - ydl.download([link]) + ydl.download([self._listener.link]) except DownloadError as e: - if not self.__is_cancelled: - self.__on_download_error(str(e)) + if not self._listener.is_cancelled: + self._on_download_error(str(e)) return if self.is_playlist and ( not ospath.exists(path) or len(listdir(path)) == 0 ): - self.__on_download_error( - "No video available to download from this playlist. Check logs for more details" + self._on_download_error( + "No video available to download from this playlist. Check logs for more details", ) return - if self.__is_cancelled: - raise ValueError - async_to_sync(self.__listener.on_download_complete) - except ValueError: - self.__on_download_error("Download Stopped by User!") + if self._listener.is_cancelled: + return + async_to_sync(self._listener.on_download_complete) + except Exception: + pass - async def add_download(self, link, path, name, qual, playlist, options): + async def add_download(self, path, qual, playlist, options): if playlist: self.opts["ignoreerrors"] = True self.is_playlist = True - self.__gid = token_hex(4) + self._gid = token_hex(4) - await self.__on_download_start() + await self._on_download_start() self.opts["postprocessors"] = [ { @@ -221,7 +215,7 @@ async def add_download(self, link, path, name, qual, playlist, options): "add_infojson": "if_exists", "add_metadata": True, "key": "FFmpegMetadata", - } + }, ] if qual.startswith("ba/b-"): @@ -234,37 +228,44 @@ async def add_download(self, link, path, name, qual, playlist, options): "key": "FFmpegExtractAudio", "preferredcodec": audio_format, "preferredquality": rate, - } + }, ) if audio_format == "vorbis": - self.__ext = ".ogg" + self._ext = ".ogg" elif audio_format == "alac": - self.__ext = ".m4a" + self._ext = ".m4a" else: - self.__ext = f".{audio_format}" - - self.opts["format"] = qual + self._ext = f".{audio_format}" if options: - self.__set_options(options) + self._set_options(options) + + self.opts["format"] = qual - await sync_to_async(self.extractMetaData, link, name) - if self.__is_cancelled: + await sync_to_async(self._extract_meta_data) + if self._listener.is_cancelled: return - base_name, ext = ospath.splitext(self.name) - trim_name = self.name if self.is_playlist else base_name + base_name, ext = ospath.splitext(self._listener.name) + trim_name = self._listener.name if self.is_playlist else base_name if len(trim_name.encode()) > 200: - self.name = ( - self.name[:200] if self.is_playlist else f"{base_name[:200]}{ext}" + self._listener.name = ( + self._listener.name[:200] + if self.is_playlist + else f"{base_name[:200]}{ext}" ) - base_name = ospath.splitext(self.name)[0] + base_name = ospath.splitext(self._listener.name)[0] if self.is_playlist: self.opts["outtmpl"] = { - "default": f"{path}/{self.name}/%(title,fulltitle,alt_title)s%(season_number& |)s%(season_number&S|)s%(season_number|)02d%(episode_number&E|)s%(episode_number|)02d%(height& |)s%(height|)s%(height&p|)s%(fps|)s%(fps&fps|)s%(tbr& |)s%(tbr|)d.%(ext)s", + "default": f"{path}/{self._listener.name}/%(title,fulltitle,alt_title)s%(season_number& |)s%(season_number&S|)s%(season_number|)02d%(episode_number&E|)s%(episode_number|)02d%(height& |)s%(height|)s%(height&p|)s%(fps|)s%(fps&fps|)s%(tbr& |)s%(tbr|)d.%(ext)s", "thumbnail": f"{path}/yt-dlp-thumb/%(title,fulltitle,alt_title)s%(season_number& |)s%(season_number&S|)s%(season_number|)02d%(episode_number&E|)s%(episode_number|)02d%(height& |)s%(height|)s%(height&p|)s%(fps|)s%(fps&fps|)s%(tbr& |)s%(tbr|)d.%(ext)s", } + elif "download_ranges" in options: + self.opts["outtmpl"] = { + "default": f"{path}/{base_name}/%(section_number|)s%(section_number&.|)s%(section_title|)s%(section_title&-|)s%(title,fulltitle,alt_title)s %(section_start)s to %(section_end)s.%(ext)s", + "thumbnail": f"{path}/yt-dlp-thumb/%(section_number|)s%(section_number&.|)s%(section_title|)s%(section_title&-|)s%(title,fulltitle,alt_title)s %(section_start)s to %(section_end)s.%(ext)s", + } elif any( key in options for key in [ @@ -279,27 +280,27 @@ async def add_download(self, link, path, name, qual, playlist, options): ] ): self.opts["outtmpl"] = { - "default": f"{path}/{base_name}/{self.name}", + "default": f"{path}/{base_name}/{self._listener.name}", "thumbnail": f"{path}/yt-dlp-thumb/{base_name}.%(ext)s", } else: self.opts["outtmpl"] = { - "default": f"{path}/{self.name}", + "default": f"{path}/{self._listener.name}", "thumbnail": f"{path}/yt-dlp-thumb/{base_name}.%(ext)s", } if qual.startswith("ba/b"): - self.name = f"{base_name}{self.__ext}" + self._listener.name = f"{base_name}{self._ext}" - if self.__listener.is_leech: + if self._listener.is_leech and not self._listener.thumbnail_layout: self.opts["postprocessors"].append( { "format": "jpg", "key": "FFmpegThumbnailsConvertor", "when": "before_dl", - } + }, ) - if self.__ext in [ + if self._ext in [ ".mp3", ".mkv", ".mka", @@ -309,62 +310,54 @@ async def add_download(self, link, path, name, qual, playlist, options): ".m4a", ".mp4", ".mov", - "m4v", + ".m4v", ]: self.opts["postprocessors"].append( { - "already_have_thumbnail": self.__listener.is_leech, + "already_have_thumbnail": bool( + self._listener.is_leech + and not self._listener.thumbnail_layout, + ), "key": "EmbedThumbnail", - } + }, ) - elif not self.__listener.is_leech: + elif not self._listener.is_leech: self.opts["writethumbnail"] = False - msg, button = await stop_duplicate_check(self.name, self.__listener) + msg, button = await stop_duplicate_check(self._listener) if msg: - await self.__listener.onDownloadError(msg, button) + await self._listener.on_download_error(msg, button) return - if limit_exceeded := await limit_checker( - self.__size, - self.__listener, - is_ytdlp=True, - is_playlist=self.playlist_count, - ): - await self.__listener.onDownloadError(limit_exceeded) - return - added_to_queue, event = await is_queued(self.__listener.uid) - if added_to_queue: - LOGGER.info(f"Added to Queue/Download: {self.name}") - async with download_dict_lock: - download_dict[self.__listener.uid] = QueueStatus( - self.name, self.__size, self.__gid, self.__listener, "dl" + + add_to_queue, event = await check_running_tasks(self._listener) + if add_to_queue: + LOGGER.info(f"Added to Queue/Download: {self._listener.name}") + async with task_dict_lock: + task_dict[self._listener.mid] = QueueStatus( + self._listener, + self._gid, + "dl", ) await event.wait() - async with download_dict_lock: - if self.__listener.uid not in download_dict: - return - LOGGER.info(f"Start Queued Download from YT_DLP: {self.name}") - await self.__on_download_start(True) - else: - LOGGER.info(f"Download with YT_DLP: {self.name}") + if self._listener.is_cancelled: + return + LOGGER.info(f"Start Queued Download from YT_DLP: {self._listener.name}") + await self._on_download_start(True) - async with queue_dict_lock: - non_queued_dl.add(self.__listener.uid) + if not add_to_queue: + LOGGER.info(f"Download with YT_DLP: {self._listener.name}") - await sync_to_async(self.__download, link, path) + await sync_to_async(self._download, path) - async def cancel_download(self): - self.__is_cancelled = True - LOGGER.info(f"Cancelling Download: {self.name}") - if not self.__downloading: - await self.__listener.onDownloadError("Download Cancelled by User!") + async def cancel_task(self): + self._listener.is_cancelled = True + LOGGER.info(f"Cancelling Download: {self._listener.name}") + await self._listener.on_download_error("Stopped by User!") - def __set_options(self, options): + def _set_options(self, options): options = options.split("|") for opt in options: key, value = map(str.strip, opt.split(":", 1)) - if key == "format" and value.startswith("ba/b-"): - continue if value.startswith("^"): if "." in value or value == "^inf": value = float(value.split("^", 1)[1]) @@ -375,7 +368,7 @@ def __set_options(self, options): elif value.lower() == "false": value = False elif value.startswith(("{", "[", "(")) and value.endswith( - ("}", "]", ")") + ("}", "]", ")"), ): value = eval(value) @@ -384,5 +377,8 @@ def __set_options(self, options): self.opts[key].extend(tuple(value)) elif isinstance(value, dict): self.opts[key].append(value) + elif key == "download_ranges": + if isinstance(value, list): + self.opts[key] = lambda info, ytdl: value else: self.opts[key] = value diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/__init__.py b/bot/helper/mirror_leech_utils/gdrive_utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/clone.py b/bot/helper/mirror_leech_utils/gdrive_utils/clone.py new file mode 100644 index 000000000..a93200550 --- /dev/null +++ b/bot/helper/mirror_leech_utils/gdrive_utils/clone.py @@ -0,0 +1,174 @@ +from logging import getLogger +from os import path as ospath +from time import time + +from googleapiclient.errors import HttpError +from tenacity import ( + RetryError, + retry, + retry_if_exception_type, + stop_after_attempt, + wait_exponential, +) + +from bot.helper.ext_utils.bot_utils import async_to_sync +from bot.helper.mirror_leech_utils.gdrive_utils.helper import GoogleDriveHelper + +LOGGER = getLogger(__name__) + + +class GoogleDriveClone(GoogleDriveHelper): + def __init__(self, listener): + self.listener = listener + self._start_time = time() + super().__init__() + self.is_cloning = True + self.user_setting() + + def user_setting(self): + if self.listener.up_dest.startswith("mtp:") or self.listener.link.startswith( + "mtp:", + ): + self.token_path = f"tokens/{self.listener.user_id}.pickle" + self.listener.up_dest = self.listener.up_dest.replace("mtp:", "", 1) + self.use_sa = False + elif self.listener.up_dest.startswith("tp:"): + self.listener.up_dest = self.listener.up_dest.replace("tp:", "", 1) + self.use_sa = False + elif self.listener.up_dest.startswith( + "sa:", + ) or self.listener.link.startswith( + "sa:", + ): + self.listener.up_dest = self.listener.up_dest.replace("sa:", "", 1) + self.use_sa = True + + def clone(self): + try: + file_id = self.get_id_from_url(self.listener.link) + except (KeyError, IndexError): + return ( + "Google Drive ID could not be found in the provided link", + None, + None, + None, + None, + ) + self.service = self.authorize() + msg = "" + LOGGER.info(f"File ID: {file_id}") + try: + meta = self.get_file_metadata(file_id) + mime_type = meta.get("mimeType") + if mime_type == self.G_DRIVE_DIR_MIME_TYPE: + dir_id = self.create_directory( + meta.get("name"), + self.listener.up_dest, + ) + self._clone_folder(meta.get("name"), meta.get("id"), dir_id) + durl = self.G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id) + if self.listener.is_cancelled: + LOGGER.info("Deleting cloned data from Drive...") + self.service.files().delete( + fileId=dir_id, + supportsAllDrives=True, + ).execute() + return None, None, None, None, None + mime_type = "Folder" + self.listener.size = self.proc_bytes + else: + file = self._copy_file(meta.get("id"), self.listener.up_dest) + msg += f"Name: {file.get('name')}" + durl = self.G_DRIVE_BASE_DOWNLOAD_URL.format(file.get("id")) + if mime_type is None: + mime_type = "File" + self.listener.size = int(meta.get("size", 0)) + return ( + durl, + mime_type, + self.total_files, + self.total_folders, + self.get_id_from_url(durl), + ) + except Exception as err: + if isinstance(err, RetryError): + LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") + err = err.last_attempt.exception() + err = str(err).replace(">", "").replace("<", "") + if "User rate limit exceeded" in err: + msg = "User rate limit exceeded." + elif "File not found" in err: + if not self.alt_auth and self.use_sa: + self.alt_auth = True + self.use_sa = False + LOGGER.error("File not found. Trying with token.pickle...") + return self.clone() + msg = "File not found." + else: + msg = f"Error.\n{err}" + async_to_sync(self.listener.on_upload_error, msg) + return None, None, None, None, None + + def _clone_folder(self, folder_name, folder_id, dest_id): + LOGGER.info(f"Syncing: {folder_name}") + files = self.get_files_by_folder_id(folder_id) + if len(files) == 0: + return dest_id + for file in files: + if file.get("mimeType") == self.G_DRIVE_DIR_MIME_TYPE: + self.total_folders += 1 + file_path = ospath.join(folder_name, file.get("name")) + current_dir_id = self.create_directory(file.get("name"), dest_id) + self._clone_folder(file_path, file.get("id"), current_dir_id) + elif ( + not file.get("name") + .lower() + .endswith(tuple(self.listener.extension_filter)) + ): + self.total_files += 1 + self._copy_file(file.get("id"), dest_id) + self.proc_bytes += int(file.get("size", 0)) + self.total_time = int(time() - self._start_time) + if self.listener.is_cancelled: + break + return None + + @retry( + wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=retry_if_exception_type(Exception), + ) + def _copy_file(self, file_id, dest_id): + body = {"parents": [dest_id]} + try: + return ( + self.service.files() + .copy(fileId=file_id, body=body, supportsAllDrives=True) + .execute() + ) + except HttpError as err: + if err.resp.get("content-type", "").startswith("application/json"): + reason = ( + eval(err.content).get("error").get("errors")[0].get("reason") + ) + if reason not in [ + "userRateLimitExceeded", + "dailyLimitExceeded", + "cannotCopyFile", + ]: + raise err + if reason == "cannotCopyFile": + LOGGER.error(err) + elif self.use_sa: + if self.sa_count >= self.sa_number: + LOGGER.info( + f"Reached maximum number of service accounts switching, which is {self.sa_count}", + ) + raise err + if self.listener.is_cancelled: + return None + self.switch_service_account() + return self._copy_file(file_id, dest_id) + else: + LOGGER.error(f"Got: {reason}") + raise err diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/count.py b/bot/helper/mirror_leech_utils/gdrive_utils/count.py new file mode 100644 index 000000000..32873414b --- /dev/null +++ b/bot/helper/mirror_leech_utils/gdrive_utils/count.py @@ -0,0 +1,81 @@ +from logging import getLogger + +from tenacity import RetryError + +from bot.helper.mirror_leech_utils.gdrive_utils.helper import GoogleDriveHelper + +LOGGER = getLogger(__name__) + + +class GoogleDriveCount(GoogleDriveHelper): + def __init__(self): + super().__init__() + + def count(self, link, user_id): + try: + file_id = self.get_id_from_url(link, user_id) + except (KeyError, IndexError): + return ( + "Google Drive ID could not be found in the provided link", + None, + None, + None, + None, + ) + self.service = self.authorize() + LOGGER.info(f"File ID: {file_id}") + try: + return self._proceed_count(file_id) + except Exception as err: + if isinstance(err, RetryError): + LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") + err = err.last_attempt.exception() + err = str(err).replace(">", "").replace("<", "") + if "File not found" in err: + if not self.alt_auth and self.use_sa: + self.alt_auth = True + self.use_sa = False + LOGGER.error("File not found. Trying with token.pickle...") + return self.count(link, user_id) + msg = "File not found." + else: + msg = f"Error.\n{err}" + return msg, None, None, None, None + + def _proceed_count(self, file_id): + meta = self.get_file_metadata(file_id) + name = meta["name"] + LOGGER.info(f"Counting: {name}") + mime_type = meta.get("mimeType") + if mime_type == self.G_DRIVE_DIR_MIME_TYPE: + self._gdrive_directory(meta) + mime_type = "Folder" + else: + if mime_type is None: + mime_type = "File" + self.total_files += 1 + self._gdrive_file(meta) + return name, mime_type, self.proc_bytes, self.total_files, self.total_folders + + def _gdrive_file(self, filee): + size = int(filee.get("size", 0)) + self.proc_bytes += size + + def _gdrive_directory(self, drive_folder): + files = self.get_files_by_folder_id(drive_folder["id"]) + if len(files) == 0: + return + for filee in files: + shortcut_details = filee.get("shortcutDetails") + if shortcut_details is not None: + mime_type = shortcut_details["targetMimeType"] + file_id = shortcut_details["targetId"] + filee = self.get_file_metadata(file_id) + else: + mime_type = filee.get("mimeType") + if mime_type == self.G_DRIVE_DIR_MIME_TYPE: + self.total_folders += 1 + self._gdrive_directory(filee) + else: + self.total_files += 1 + self._gdrive_file(filee) diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/delete.py b/bot/helper/mirror_leech_utils/gdrive_utils/delete.py new file mode 100644 index 000000000..62fe7d818 --- /dev/null +++ b/bot/helper/mirror_leech_utils/gdrive_utils/delete.py @@ -0,0 +1,40 @@ +from logging import getLogger + +from googleapiclient.errors import HttpError + +from bot.helper.mirror_leech_utils.gdrive_utils.helper import GoogleDriveHelper + +LOGGER = getLogger(__name__) + + +class GoogleDriveDelete(GoogleDriveHelper): + def __init__(self): + super().__init__() + + def deletefile(self, link, user_id): + try: + file_id = self.get_id_from_url(link, user_id) + except (KeyError, IndexError): + return "Google Drive ID could not be found in the provided link" + self.service = self.authorize() + msg = "" + try: + self.service.files().delete( + fileId=file_id, + supportsAllDrives=True, + ).execute() + msg = "Successfully deleted" + LOGGER.info(f"Delete Result: {msg}") + except HttpError as err: + if "File not found" in str(err) or "insufficientFilePermissions" in str( + err, + ): + if not self.alt_auth and self.use_sa: + self.alt_auth = True + self.use_sa = False + LOGGER.error("File not found. Trying with token.pickle...") + return self.deletefile(link, user_id) + err = "File not found or insufficientFilePermissions!" + LOGGER.error(f"Delete Result: {err}") + msg = str(err) + return msg diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/download.py b/bot/helper/mirror_leech_utils/gdrive_utils/download.py new file mode 100644 index 000000000..42d533b30 --- /dev/null +++ b/bot/helper/mirror_leech_utils/gdrive_utils/download.py @@ -0,0 +1,176 @@ +from io import FileIO +from logging import getLogger +from os import makedirs +from os import path as ospath + +from googleapiclient.errors import HttpError +from googleapiclient.http import MediaIoBaseDownload +from tenacity import ( + RetryError, + retry, + retry_if_exception_type, + stop_after_attempt, + wait_exponential, +) + +from bot.helper.ext_utils.bot_utils import SetInterval, async_to_sync +from bot.helper.mirror_leech_utils.gdrive_utils.helper import GoogleDriveHelper + +LOGGER = getLogger(__name__) + + +class GoogleDriveDownload(GoogleDriveHelper): + def __init__(self, listener, path): + self.listener = listener + self._updater = None + self._path = path + super().__init__() + self.is_downloading = True + + def download(self): + file_id = self.get_id_from_url(self.listener.link, self.listener.user_id) + self.service = self.authorize() + self._updater = SetInterval(self.update_interval, self.progress) + try: + meta = self.get_file_metadata(file_id) + if meta.get("mimeType") == self.G_DRIVE_DIR_MIME_TYPE: + self._download_folder(file_id, self._path, self.listener.name) + else: + makedirs(self._path, exist_ok=True) + self._download_file( + file_id, + self._path, + self.listener.name, + meta.get("mimeType"), + ) + except Exception as err: + if isinstance(err, RetryError): + LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") + err = err.last_attempt.exception() + err = str(err).replace(">", "").replace("<", "") + if "downloadQuotaExceeded" in err: + err = "Download Quota Exceeded." + elif "File not found" in err: + if not self.alt_auth and self.use_sa: + self.alt_auth = True + self.use_sa = False + LOGGER.error("File not found. Trying with token.pickle...") + self._updater.cancel() + return self.download() + err = "File not found!" + async_to_sync(self.listener.on_download_error, err) + self.listener.is_cancelled = True + finally: + self._updater.cancel() + if not self.listener.is_cancelled: + async_to_sync(self.listener.on_download_complete) + + def _download_folder(self, folder_id, path, folder_name): + folder_name = folder_name.replace("/", "") + if not ospath.exists(f"{path}/{folder_name}"): + makedirs(f"{path}/{folder_name}") + path += f"/{folder_name}" + result = self.get_files_by_folder_id(folder_id) + if len(result) == 0: + return + result = sorted(result, key=lambda k: k["name"]) + for item in result: + file_id = item["id"] + filename = item["name"] + shortcut_details = item.get("shortcutDetails") + if shortcut_details is not None: + file_id = shortcut_details["targetId"] + mime_type = shortcut_details["targetMimeType"] + else: + mime_type = item.get("mimeType") + if mime_type == self.G_DRIVE_DIR_MIME_TYPE: + self._download_folder(file_id, path, filename) + elif not ospath.isfile( + f"{path}{filename}", + ) and not filename.lower().endswith( + tuple(self.listener.extension_filter), + ): + self._download_file(file_id, path, filename, mime_type) + if self.listener.is_cancelled: + break + + @retry( + wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=(retry_if_exception_type(Exception)), + ) + def _download_file(self, file_id, path, filename, mime_type, export=False): + if export: + request = self.service.files().export_media( + fileId=file_id, + mimeType="application/pdf", + ) + else: + request = self.service.files().get_media( + fileId=file_id, + supportsAllDrives=True, + acknowledgeAbuse=True, + ) + filename = filename.replace("/", "") + if export: + filename = f"{filename}.pdf" + if len(filename.encode()) > 255: + ext = ospath.splitext(filename)[1] + filename = f"{filename[:245]}{ext}" + + if self.listener.name.endswith(ext): + self.listener.name = filename + if self.listener.is_cancelled: + return None + fh = FileIO(f"{path}/{filename}", "wb") + downloader = MediaIoBaseDownload(fh, request, chunksize=50 * 1024 * 1024) + done = False + retries = 0 + while not done: + if self.listener.is_cancelled: + fh.close() + break + try: + self.status, done = downloader.next_chunk() + except HttpError as err: + LOGGER.error(err) + if err.resp.status in [500, 502, 503, 504, 429] and retries < 10: + retries += 1 + continue + if err.resp.get("content-type", "").startswith("application/json"): + reason = ( + eval(err.content).get("error").get("errors")[0].get("reason") + ) + if "fileNotDownloadable" in reason and "document" in mime_type: + return self._download_file( + file_id, + path, + filename, + mime_type, + True, + ) + if reason not in [ + "downloadQuotaExceeded", + "dailyLimitExceeded", + ]: + raise err + if self.use_sa: + if self.sa_count >= self.sa_number: + LOGGER.info( + f"Reached maximum number of service accounts switching, which is {self.sa_count}", + ) + raise err + if self.listener.is_cancelled: + return None + self.switch_service_account() + LOGGER.info(f"Got: {reason}, Trying Again...") + return self._download_file( + file_id, + path, + filename, + mime_type, + ) + LOGGER.error(f"Got: {reason}") + raise err + self.file_processed_bytes = 0 + return None diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/helper.py b/bot/helper/mirror_leech_utils/gdrive_utils/helper.py new file mode 100644 index 000000000..30cd7fed8 --- /dev/null +++ b/bot/helper/mirror_leech_utils/gdrive_utils/helper.py @@ -0,0 +1,267 @@ +from logging import ERROR, getLogger +from os import listdir +from os import path as ospath +from pickle import load as pload +from random import randrange +from re import search as re_search +from urllib.parse import parse_qs, urlparse + +from google.oauth2 import service_account +from google_auth_httplib2 import AuthorizedHttp +from googleapiclient.discovery import build +from googleapiclient.http import build_http +from tenacity import ( + retry, + retry_if_exception_type, + stop_after_attempt, + wait_exponential, +) + +from bot.core.config_manager import Config +from bot.helper.ext_utils.links_utils import is_gdrive_id + +LOGGER = getLogger(__name__) +getLogger("googleapiclient.discovery").setLevel(ERROR) + + +class GoogleDriveHelper: + def __init__(self): + self._OAUTH_SCOPE = ["https://www.googleapis.com/auth/drive"] + self.token_path = "token.pickle" + self.G_DRIVE_DIR_MIME_TYPE = "application/vnd.google-apps.folder" + self.G_DRIVE_BASE_DOWNLOAD_URL = ( + "https://drive.google.com/uc?id={}&export=download" + ) + self.G_DRIVE_DIR_BASE_DOWNLOAD_URL = ( + "https://drive.google.com/drive/folders/{}" + ) + self.is_uploading = False + self.is_downloading = False + self.is_cloning = False + self.sa_index = 0 + self.sa_count = 1 + self.sa_number = 100 + self.alt_auth = False + self.service = None + self.total_files = 0 + self.total_folders = 0 + self.file_processed_bytes = 0 + self.proc_bytes = 0 + self.total_time = 0 + self.status = None + self.update_interval = 3 + self.use_sa = Config.USE_SERVICE_ACCOUNTS + + @property + def speed(self): + try: + return self.proc_bytes / self.total_time + except Exception: + return 0 + + @property + def processed_bytes(self): + return self.proc_bytes + + async def progress(self): + if self.status is not None: + chunk_size = ( + self.status.total_size * self.status.progress() + - self.file_processed_bytes + ) + self.file_processed_bytes = ( + self.status.total_size * self.status.progress() + ) + self.proc_bytes += chunk_size + self.total_time += self.update_interval + + def authorize(self): + credentials = None + if self.use_sa: + json_files = listdir("accounts") + self.sa_number = len(json_files) + self.sa_index = randrange(self.sa_number) + LOGGER.info( + f"Authorizing with {json_files[self.sa_index]} service account", + ) + credentials = service_account.Credentials.from_service_account_file( + f"accounts/{json_files[self.sa_index]}", + scopes=self._OAUTH_SCOPE, + ) + elif ospath.exists(self.token_path): + LOGGER.info(f"Authorize with {self.token_path}") + with open(self.token_path, "rb") as f: + credentials = pload(f) + else: + LOGGER.error("token.pickle not found!") + authorized_http = AuthorizedHttp(credentials, http=build_http()) + authorized_http.http.disable_ssl_certificate_validation = True + return build("drive", "v3", http=authorized_http, cache_discovery=False) + + def switch_service_account(self): + if self.sa_index == self.sa_number - 1: + self.sa_index = 0 + else: + self.sa_index += 1 + self.sa_count += 1 + LOGGER.info(f"Switching to {self.sa_index} index") + self.service = self.authorize() + + def get_id_from_url(self, link, user_id=""): + if user_id and link.startswith("mtp:"): + self.use_sa = False + self.token_path = f"tokens/{user_id}.pickle" + link = link.replace("mtp:", "", 1) + elif link.startswith("sa:"): + self.use_sa = True + link = link.replace("sa:", "", 1) + elif link.startswith("tp:"): + self.use_sa = False + link = link.replace("tp:", "", 1) + if is_gdrive_id(link): + return link + if "folders" in link or "file" in link: + regex = r"https:\/\/drive\.google\.com\/(?:drive(.*?)\/folders\/|file(.*?)?\/d\/)([-\w]+)" + res = re_search(regex, link) + if res is None: + raise IndexError("G-Drive ID not found.") + return res.group(3) + parsed = urlparse(link) + return parse_qs(parsed.query)["id"][0] + + @retry( + wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=retry_if_exception_type(Exception), + ) + def set_permission(self, file_id): + permissions = { + "role": "reader", + "type": "anyone", + "value": None, + "withLink": True, + } + return ( + self.service.permissions() + .create(fileId=file_id, body=permissions, supportsAllDrives=True) + .execute() + ) + + @retry( + wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=retry_if_exception_type(Exception), + ) + def get_file_metadata(self, file_id): + return ( + self.service.files() + .get( + fileId=file_id, + supportsAllDrives=True, + fields="name, id, mimeType, size", + ) + .execute() + ) + + @retry( + wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=retry_if_exception_type(Exception), + ) + def get_files_by_folder_id(self, folder_id, item_type=""): + page_token = None + files = [] + if not item_type: + q = f"'{folder_id}' in parents and trashed = false" + elif item_type == "folders": + q = f"'{folder_id}' in parents and mimeType = '{self.G_DRIVE_DIR_MIME_TYPE}' and trashed = false" + else: + q = f"'{folder_id}' in parents and mimeType != '{self.G_DRIVE_DIR_MIME_TYPE}' and trashed = false" + while True: + response = ( + self.service.files() + .list( + supportsAllDrives=True, + includeItemsFromAllDrives=True, + q=q, + spaces="drive", + pageSize=200, + fields="nextPageToken, files(id, name, mimeType, size, shortcutDetails)", + orderBy="folder, name", + pageToken=page_token, + ) + .execute() + ) + files.extend(response.get("files", [])) + page_token = response.get("nextPageToken") + if page_token is None: + break + return files + + @retry( + wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=retry_if_exception_type(Exception), + ) + def create_directory(self, directory_name, dest_id): + file_metadata = { + "name": directory_name, + "description": "Uploaded by Mirror-leech-telegram-bot", + "mimeType": self.G_DRIVE_DIR_MIME_TYPE, + } + if dest_id is not None: + file_metadata["parents"] = [dest_id] + file = ( + self.service.files() + .create(body=file_metadata, supportsAllDrives=True) + .execute() + ) + file_id = file.get("id") + if not Config.IS_TEAM_DRIVE: + self.set_permission(file_id) + LOGGER.info( + f"Created G-Drive Folder:\nName: {file.get('name')}\nID: {file_id}", + ) + return file_id + + def escapes(self, estr): + chars = ["\\", "'", '"', r"\a", r"\b", r"\f", r"\n", r"\r", r"\t"] + for char in chars: + estr = estr.replace(char, f"\\{char}") + return estr.strip() + + """ + def get_recursive_list(self, file, rootId): + rtnlist = [] + if not rootId: + rootId = file.get('teamDriveId') + if rootId == "root": + rootId = self.service.files().get( + fileId='root', fields='id').execute().get('id') + x = file.get("name") + y = file.get("id") + while (y != rootId): + rtnlist.append(x) + file = self.service.files().get(fileId=file.get("parents")[0], supportsAllDrives=True, + fields='id, name, parents').execute() + x = file.get("name") + y = file.get("id") + rtnlist.reverse() + return rtnlist + """ + + async def cancel_task(self): + self.listener.is_cancelled = True + if self.is_downloading: + LOGGER.info(f"Cancelling Download: {self.listener.name}") + await self.listener.on_download_error("Stopped by user!") + elif self.is_cloning: + LOGGER.info(f"Cancelling Clone: {self.listener.name}") + await self.listener.on_upload_error( + "your clone has been stopped and cloned data has been deleted!", + ) + elif self.is_uploading: + LOGGER.info(f"Cancelling Upload: {self.listener.name}") + await self.listener.on_upload_error( + "your upload has been stopped and uploaded data has been deleted!", + ) diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/list.py b/bot/helper/mirror_leech_utils/gdrive_utils/list.py new file mode 100644 index 000000000..5b47a467f --- /dev/null +++ b/bot/helper/mirror_leech_utils/gdrive_utils/list.py @@ -0,0 +1,381 @@ +from asyncio import Event, gather, wait_for +from functools import partial +from logging import getLogger +from time import time + +from aiofiles.os import path as aiopath +from natsort import natsorted +from pyrogram.filters import regex, user +from pyrogram.handlers import CallbackQueryHandler +from tenacity import RetryError + +from bot.core.config_manager import Config +from bot.helper.ext_utils.bot_utils import new_task, update_user_ldata +from bot.helper.ext_utils.db_handler import database +from bot.helper.ext_utils.status_utils import ( + get_readable_file_size, + get_readable_time, +) +from bot.helper.mirror_leech_utils.gdrive_utils.helper import GoogleDriveHelper +from bot.helper.telegram_helper.button_build import ButtonMaker +from bot.helper.telegram_helper.message_utils import ( + delete_message, + edit_message, + send_message, +) + +LOGGER = getLogger(__name__) + +LIST_LIMIT = 6 + + +@new_task +async def id_updates(_, query, obj): + await query.answer() + message = query.message + data = query.data.split() + if data[1] == "cancel": + obj.id = "Task has been cancelled!" + obj.listener.is_cancelled = True + obj.event.set() + await delete_message(message) + return + if obj.query_proc: + return + obj.query_proc = True + if data[1] == "pre": + obj.iter_start -= LIST_LIMIT * obj.page_step + await obj.get_items_buttons() + elif data[1] == "nex": + obj.iter_start += LIST_LIMIT * obj.page_step + await obj.get_items_buttons() + elif data[1] == "back": + if data[2] == "dr": + await obj.choose_token() + else: + await obj.get_pevious_id() + elif data[1] == "dr": + index = int(data[2]) + i = obj.drives[index] + obj.id = i["id"] + obj.parents = [{"id": i["id"], "name": i["name"]}] + await obj.get_items() + elif data[1] == "pa": + index = int(data[3]) + i = obj.items_list[index] + obj.id = i["id"] + if data[2] == "fo": + obj.parents.append({"id": i["id"], "name": i["name"]}) + await obj.get_items() + else: + await delete_message(message) + obj.event.set() + elif data[1] == "ps": + if obj.page_step == int(data[2]): + obj.query_proc = False + return + obj.page_step = int(data[2]) + await obj.get_items_buttons() + elif data[1] == "root": + obj.id = obj.parents[0]["id"] + obj.parents = [obj.parents[0]] + await obj.get_items() + elif data[1] == "itype": + obj.item_type = data[2] + await obj.get_items() + elif data[1] == "cur": + await delete_message(message) + obj.event.set() + elif data[1] == "def": + if obj.token_path != obj.user_token_path: + id_ = f"sa:{obj.id}" if obj.use_sa else f"tp:{obj.id}" + else: + id_ = f"mtp:{obj.id}" + if id_ != obj.listener.user_dict.get("gdrive_id"): + update_user_ldata(obj.listener.user_id, "gdrive_id", id_) + await obj.get_items_buttons() + if Config.DATABASE_URL: + await database.update_user_data(obj.listener.user_id) + elif data[1] == "owner": + obj.token_path = "token.pickle" + obj.use_sa = False + obj.id = "" + obj.parents = [] + await obj.list_drives() + elif data[1] == "user": + obj.token_path = obj.user_token_path + obj.use_sa = False + obj.id = "" + obj.parents = [] + await obj.list_drives() + elif data[1] == "sa": + obj.token_path = "accounts" + obj.use_sa = True + obj.id = "" + obj.parents = [] + await obj.list_drives() + obj.query_proc = False + + +class GoogleDriveList(GoogleDriveHelper): + def __init__(self, listener): + self.listener = listener + self._token_user = False + self._token_owner = False + self._sa_owner = False + self._reply_to = None + self._time = time() + self._timeout = 240 + self.drives = [] + self.query_proc = False + self.item_type = "folders" + self.event = Event() + self.user_token_path = f"tokens/{self.listener.user_id}.pickle" + self.id = "" + self.parents = [] + self.list_status = "" + self.items_list = [] + self.iter_start = 0 + self.page_step = 1 + super().__init__() + + async def _event_handler(self): + pfunc = partial(id_updates, obj=self) + handler = self.listener.client.add_handler( + CallbackQueryHandler( + pfunc, + filters=regex("^gdq") & user(self.listener.user_id), + ), + group=-1, + ) + try: + await wait_for(self.event.wait(), timeout=self._timeout) + except Exception: + self.id = "Timed Out. Task has been cancelled!" + self.listener.is_cancelled = True + self.event.set() + finally: + self.listener.client.remove_handler(*handler) + + async def _send_list_message(self, msg, button): + if not self.listener.is_cancelled: + if self._reply_to is None: + self._reply_to = await send_message( + self.listener.message, + msg, + button, + ) + else: + await edit_message(self._reply_to, msg, button) + + async def get_items_buttons(self): + items_no = len(self.items_list) + pages = (items_no + LIST_LIMIT - 1) // LIST_LIMIT + if items_no <= self.iter_start: + self.iter_start = 0 + elif self.iter_start < 0 or self.iter_start > items_no: + self.iter_start = LIST_LIMIT * (pages - 1) + page = (self.iter_start / LIST_LIMIT) + 1 if self.iter_start != 0 else 1 + buttons = ButtonMaker() + for index, item in enumerate( + self.items_list[self.iter_start : LIST_LIMIT + self.iter_start], + ): + orig_index = index + self.iter_start + if item["mimeType"] == self.G_DRIVE_DIR_MIME_TYPE: + ptype = "fo" + name = item["name"] + else: + ptype = "fi" + name = ( + f"[{get_readable_file_size(float(item['size']))}] {item['name']}" + ) + buttons.data_button(name, f"gdq pa {ptype} {orig_index}") + if items_no > LIST_LIMIT: + for i in [1, 2, 4, 6, 10, 30, 50, 100]: + buttons.data_button(i, f"gdq ps {i}", position="header") + buttons.data_button("Previous", "gdq pre", position="footer") + buttons.data_button("Next", "gdq nex", position="footer") + if self.list_status == "gdd": + if self.item_type == "folders": + buttons.data_button("Files", "gdq itype files", position="footer") + else: + buttons.data_button( + "Folders", + "gdq itype folders", + position="footer", + ) + if self.list_status == "gdu" or len(self.items_list) > 0: + buttons.data_button("Choose Current Path", "gdq cur", position="footer") + if self.list_status == "gdu": + buttons.data_button("Set as Default Path", "gdq def", position="footer") + if (len(self.parents) > 1 and len(self.drives) > 1) or ( + self._token_user and self._token_owner + ): + buttons.data_button("Back", "gdq back pa", position="footer") + if len(self.parents) > 1: + buttons.data_button("Back To Root", "gdq root", position="footer") + buttons.data_button("Cancel", "gdq cancel", position="footer") + button = buttons.build_menu(f_cols=2) + msg = "Choose Path:" + ( + "\nTransfer Type: Download" + if self.list_status == "gdd" + else "\nTransfer Type: Upload" + ) + if self.list_status == "gdu": + default_id = self.listener.user_dict.get("gdrive_id") or Config.GDRIVE_ID + msg += f"\nDefault Gdrive ID: {default_id}" if default_id else "" + msg += f"\n\nItems: {items_no}" + if items_no > LIST_LIMIT: + msg += f" | Page: {int(page)}/{pages} | Page Step: {self.page_step}" + msg += f"\n\nItem Type: {self.item_type}\nToken Path: {self.token_path}" + msg += f"\n\nCurrent ID: {self.id}" + msg += f"\nCurrent Path: {('/').join(i['name'] for i in self.parents)}" + msg += ( + f"\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + ) + await self._send_list_message(msg, button) + + async def get_items(self, itype=""): + if self.list_status == "gdu": + self.item_type = "folders" + elif itype: + self.item_type = itype + try: + files = self.get_files_by_folder_id(self.id, self.item_type) + if self.listener.is_cancelled: + return + except Exception as err: + if isinstance(err, RetryError): + LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") + err = err.last_attempt.exception() + self.id = str(err).replace(">", "").replace("<", "") + self.event.set() + return + if len(files) == 0 and itype != self.item_type and self.list_status == "gdd": + itype = "folders" if self.item_type == "files" else "files" + self.item_type = itype + await self.get_items(itype) + else: + self.items_list = natsorted(files) + self.iter_start = 0 + await self.get_items_buttons() + + async def list_drives(self): + self.service = self.authorize() + try: + result = self.service.drives().list(pageSize="100").execute() + except Exception as e: + self.id = str(e) + self.event.set() + return + drives = result["drives"] + if len(drives) == 0 and not self.use_sa: + self.drives = [{"id": "root", "name": "root"}] + self.parents = [{"id": "root", "name": "root"}] + self.id = "root" + await self.get_items() + elif len(drives) == 0: + msg = "Service accounts Doesn't have access to any drive!" + buttons = ButtonMaker() + if self._token_user and self._token_owner: + buttons.data_button("Back", "gdq back dr", position="footer") + buttons.data_button("Cancel", "gdq cancel", position="footer") + button = buttons.build_menu(2) + await self._send_list_message(msg, button) + elif self.use_sa and len(drives) == 1: + self.id = drives[0]["id"] + self.drives = [{"id": self.id, "name": drives[0]["name"]}] + self.parents = [{"id": self.id, "name": drives[0]["name"]}] + await self.get_items() + else: + msg = "Choose Drive:" + ( + "\nTransfer Type: Download" + if self.list_status == "gdd" + else "\nTransfer Type: Upload" + ) + msg += f"\nToken Path: {self.token_path}" + msg += f"\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + buttons = ButtonMaker() + self.drives.clear() + self.parents.clear() + if not self.use_sa: + buttons.data_button("root", "gdq dr 0") + self.drives = [{"id": "root", "name": "root"}] + for index, item in enumerate(drives, start=1): + self.drives.append({"id": item["id"], "name": item["name"]}) + buttons.data_button(item["name"], f"gdq dr {index}") + if self._token_user and self._token_owner: + buttons.data_button("Back", "gdq back dr", position="footer") + buttons.data_button("Cancel", "gdq cancel", position="footer") + button = buttons.build_menu(2) + await self._send_list_message(msg, button) + + async def choose_token(self): + if ( + (self._token_user and self._token_owner) + or (self._sa_owner and self._token_owner) + or (self._sa_owner and self._token_user) + ): + msg = "Choose Token:" + ( + "\nTransfer Type: Download" + if self.list_status == "gdd" + else "\nTransfer Type: Upload" + ) + msg += f"\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + buttons = ButtonMaker() + if self._token_owner: + buttons.data_button("Owner Token", "gdq owner") + if self._sa_owner: + buttons.data_button("Service Accounts", "gdq sa") + if self._token_user: + buttons.data_button("My Token", "gdq user") + buttons.data_button("Cancel", "gdq cancel") + button = buttons.build_menu(2) + await self._send_list_message(msg, button) + else: + if self._token_owner: + self.token_path = "token.pickle" + self.use_sa = False + elif self._token_user: + self.token_path = self.user_token_path + self.use_sa = False + else: + self.token_path = "accounts" + self.use_sa = True + await self.list_drives() + + async def get_pevious_id(self): + if self.parents: + self.parents.pop() + if self.parents: + self.id = self.parents[-1]["id"] + await self.get_items() + else: + await self.list_drives() + else: + await self.list_drives() + + async def get_target_id(self, status, token_path=None): + self.list_status = status + if token_path is None: + self._token_user, self._token_owner, self._sa_owner = await gather( + aiopath.exists(self.user_token_path), + aiopath.exists("token.pickle"), + aiopath.exists("accounts"), + ) + if not self._token_owner and not self._token_user and not self._sa_owner: + self.event.set() + return "token.pickle or service accounts are not Exists!" + await self.choose_token() + else: + self.token_path = token_path + self.use_sa = self.token_path == "accounts" + await self.list_drives() + await self._event_handler() + if self._reply_to: + await delete_message(self._reply_to) + if not self.listener.is_cancelled: + if self.token_path == self.user_token_path: + return f"mtp:{self.id}" + return f"sa:{self.id}" if self.use_sa else f"tp:{self.id}" + return self.id diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/search.py b/bot/helper/mirror_leech_utils/gdrive_utils/search.py new file mode 100644 index 000000000..8422ef56a --- /dev/null +++ b/bot/helper/mirror_leech_utils/gdrive_utils/search.py @@ -0,0 +1,187 @@ +from logging import getLogger + +from bot import drives_ids, drives_names, index_urls, user_data +from bot.helper.ext_utils.status_utils import get_readable_file_size +from bot.helper.mirror_leech_utils.gdrive_utils.helper import GoogleDriveHelper + +LOGGER = getLogger(__name__) + + +class GoogleDriveSearch(GoogleDriveHelper): + def __init__( + self, + stop_dup=False, + no_multi=False, + is_recursive=True, + item_type="", + ): + super().__init__() + self._stop_dup = stop_dup + self._no_multi = no_multi + self._is_recursive = is_recursive + self._item_type = item_type + + def _drive_query(self, dir_id, file_name, is_recursive): + try: + if is_recursive: + if self._stop_dup: + query = f"name = '{file_name}' and " + else: + file_name = file_name.split() + query = "".join( + f"name contains '{name}' and " + for name in file_name + if name != "" + ) + if self._item_type == "files": + query += f"mimeType != '{self.G_DRIVE_DIR_MIME_TYPE}' and " + elif self._item_type == "folders": + query += f"mimeType = '{self.G_DRIVE_DIR_MIME_TYPE}' and " + query += "trashed = false" + if dir_id == "root": + return ( + self.service.files() + .list( + q=f"{query} and 'me' in owners", + pageSize=200, + spaces="drive", + fields="files(id, name, mimeType, size, parents)", + orderBy="folder, name asc", + ) + .execute() + ) + return ( + self.service.files() + .list( + supportsAllDrives=True, + includeItemsFromAllDrives=True, + driveId=dir_id, + q=query, + spaces="drive", + pageSize=150, + fields="files(id, name, mimeType, size, teamDriveId, parents)", + corpora="drive", + orderBy="folder, name asc", + ) + .execute() + ) + if self._stop_dup: + query = f"'{dir_id}' in parents and name = '{file_name}' and " + else: + query = f"'{dir_id}' in parents and " + file_name = file_name.split() + for name in file_name: + if name != "": + query += f"name contains '{name}' and " + if self._item_type == "files": + query += f"mimeType != '{self.G_DRIVE_DIR_MIME_TYPE}' and " + elif self._item_type == "folders": + query += f"mimeType = '{self.G_DRIVE_DIR_MIME_TYPE}' and " + query += "trashed = false" + return ( + self.service.files() + .list( + supportsAllDrives=True, + includeItemsFromAllDrives=True, + q=query, + spaces="drive", + pageSize=150, + fields="files(id, name, mimeType, size)", + orderBy="folder, name asc", + ) + .execute() + ) + except Exception as err: + err = str(err).replace(">", "").replace("<", "") + LOGGER.error(err) + return {"files": []} + + def drive_list(self, file_name, target_id="", user_id=""): + msg = "" + file_name = self.escapes(str(file_name)) + contents_no = 0 + telegraph_content = [] + Title = False + + if target_id.startswith("mtp:"): + drives = self.get_user_drive(target_id, user_id) + elif target_id: + drives = [ + ( + "From Owner", + target_id.replace("tp:", "", 1), + index_urls[0] if index_urls else "", + ), + ] + else: + drives = zip(drives_names, drives_ids, index_urls, strict=False) + if ( + not target_id.startswith("mtp:") and len(drives_ids) > 1 + ) or target_id.startswith("tp:"): + self.use_sa = False + + self.service = self.authorize() + + for drive_name, dir_id, index_url in drives: + isRecur = ( + False + if self._is_recursive and len(dir_id) > 23 + else self._is_recursive + ) + response = self._drive_query(dir_id, file_name, isRecur) + if not response["files"]: + if self._no_multi: + break + continue + if not Title: + msg += f"

Search Result For {file_name}

" + Title = True + if drive_name: + msg += f"╾────────────╼
{drive_name}
╾────────────╼
" + for file in response.get("files", []): + mime_type = file.get("mimeType") + if mime_type == self.G_DRIVE_DIR_MIME_TYPE: + furl = self.G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(file.get("id")) + msg += f"📁 {file.get('name')}
(folder)

" + msg += f"Drive Link" + if index_url: + url = f"{index_url}findpath?id={file.get('id')}" + msg += f' | Index Link' + elif mime_type == "application/vnd.google-apps.shortcut": + furl = self.G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(file.get("id")) + msg += ( + f"⁍{file.get('name')}" + f" (shortcut)" + ) + else: + furl = self.G_DRIVE_BASE_DOWNLOAD_URL.format(file.get("id")) + msg += f"📄 {file.get('name')}
({get_readable_file_size(int(file.get('size', 0)))})

" + msg += f"Drive Link" + if index_url: + url = f"{index_url}findpath?id={file.get('id')}" + msg += f' | Index Link' + if mime_type.startswith(("image", "video", "audio")): + urlv = ( + f"{index_url}findpath?id={file.get('id')}&view=true" + ) + msg += f' | View Link' + msg += "

" + contents_no += 1 + if len(msg.encode("utf-8")) > 39000: + telegraph_content.append(msg) + msg = "" + if self._no_multi: + break + + if msg != "": + telegraph_content.append(msg) + + return telegraph_content, contents_no + + def get_user_drive(self, target_id, user_id): + dest_id = target_id.replace("mtp:", "", 1) + self.token_path = f"tokens/{user_id}.pickle" + self.use_sa = False + user_dict = user_data.get(user_id, {}) + INDEX = user_dict["index_url"] if user_dict.get("index_url") else "" + return [("User Choice", dest_id, INDEX)] diff --git a/bot/helper/mirror_leech_utils/gdrive_utils/upload.py b/bot/helper/mirror_leech_utils/gdrive_utils/upload.py new file mode 100644 index 000000000..e5c936dfa --- /dev/null +++ b/bot/helper/mirror_leech_utils/gdrive_utils/upload.py @@ -0,0 +1,263 @@ +import contextlib +from logging import getLogger +from os import listdir, remove +from os import path as ospath + +from googleapiclient.errors import HttpError +from googleapiclient.http import MediaFileUpload +from tenacity import ( + RetryError, + retry, + retry_if_exception_type, + stop_after_attempt, + wait_exponential, +) + +from bot.core.config_manager import Config +from bot.helper.ext_utils.bot_utils import SetInterval, async_to_sync +from bot.helper.ext_utils.files_utils import get_mime_type +from bot.helper.mirror_leech_utils.gdrive_utils.helper import GoogleDriveHelper + +LOGGER = getLogger(__name__) + + +class GoogleDriveUpload(GoogleDriveHelper): + def __init__(self, listener, path): + self.listener = listener + self._updater = None + self._path = path + self._is_errored = False + super().__init__() + self.is_uploading = True + + def user_setting(self): + if self.listener.up_dest.startswith("mtp:"): + self.token_path = f"tokens/{self.listener.user_id}.pickle" + self.listener.up_dest = self.listener.up_dest.replace("mtp:", "", 1) + self.use_sa = False + elif self.listener.up_dest.startswith("tp:"): + self.listener.up_dest = self.listener.up_dest.replace("tp:", "", 1) + self.use_sa = False + elif self.listener.up_dest.startswith("sa:"): + self.listener.up_dest = self.listener.up_dest.replace("sa:", "", 1) + self.use_sa = True + + def upload(self): + self.user_setting() + self.service = self.authorize() + LOGGER.info(f"Uploading: {self._path}") + self._updater = SetInterval(self.update_interval, self.progress) + link = None + dir_id = None + mime_type = None + try: + if ospath.isfile(self._path): + if self._path.lower().endswith( + tuple(self.listener.extension_filter), + ): + raise Exception( + "This file extension is excluded by extension filter!", + ) + mime_type = get_mime_type(self._path) + link = self._upload_file( + self._path, + self.listener.name, + mime_type, + self.listener.up_dest, + in_dir=False, + ) + if self.listener.is_cancelled: + return + if link is None: + raise Exception("Upload has been manually cancelled") + LOGGER.info(f"Uploaded To G-Drive: {self._path}") + else: + mime_type = "Folder" + dir_id = self.create_directory( + ospath.basename(ospath.abspath(self.listener.name)), + self.listener.up_dest, + ) + result = self._upload_dir( + self._path, + dir_id, + ) + if result is None: + raise Exception("Upload has been manually cancelled!") + link = self.G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id) + if self.listener.is_cancelled: + return + LOGGER.info(f"Uploaded To G-Drive: {self.listener.name}") + except Exception as err: + if isinstance(err, RetryError): + LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") + err = err.last_attempt.exception() + err = str(err).replace(">", "").replace("<", "") + async_to_sync(self.listener.on_upload_error, err) + self._is_errored = True + finally: + self._updater.cancel() + + if self.listener.is_cancelled and not self._is_errored: + if mime_type == "Folder" and dir_id: + LOGGER.info("Deleting uploaded data from Drive...") + self.service.files().delete( + fileId=dir_id, + supportsAllDrives=True, + ).execute() + return + if self._is_errored: + return + async_to_sync( + self.listener.on_upload_complete, + link, + self.total_files, + self.total_folders, + mime_type, + dir_id=self.get_id_from_url(link) if link else None, + ) + + def _upload_dir(self, input_directory, dest_id): + list_dirs = listdir(input_directory) + if len(list_dirs) == 0: + return dest_id + new_id = None + for item in list_dirs: + current_file_name = ospath.join(input_directory, item) + if ospath.isdir(current_file_name): + current_dir_id = self.create_directory(item, dest_id) + new_id = self._upload_dir( + current_file_name, + current_dir_id, + ) + self.total_folders += 1 + elif not item.lower().endswith(tuple(self.listener.extension_filter)): + mime_type = get_mime_type(current_file_name) + file_name = current_file_name.split("/")[-1] + self._upload_file( + current_file_name, + file_name, + mime_type, + dest_id, + ) + self.total_files += 1 + new_id = dest_id + else: + remove(current_file_name) + new_id = "filter" + if self.listener.is_cancelled: + break + return new_id + + @retry( + wait=wait_exponential(multiplier=2, min=3, max=6), + stop=stop_after_attempt(3), + retry=(retry_if_exception_type(Exception)), + ) + def _upload_file( + self, + file_path, + file_name, + mime_type, + dest_id, + in_dir=True, + ): + # File body description + file_metadata = { + "name": file_name, + "description": "Uploaded by Mirror-leech-telegram-bot", + "mimeType": mime_type, + } + if dest_id is not None: + file_metadata["parents"] = [dest_id] + + if ospath.getsize(file_path) == 0: + media_body = MediaFileUpload( + file_path, + mimetype=mime_type, + resumable=False, + ) + response = ( + self.service.files() + .create( + body=file_metadata, + media_body=media_body, + supportsAllDrives=True, + ) + .execute() + ) + if not Config.IS_TEAM_DRIVE: + self.set_permission(response["id"]) + + drive_file = ( + self.service.files() + .get(fileId=response["id"], supportsAllDrives=True) + .execute() + ) + return self.G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get("id")) + media_body = MediaFileUpload( + file_path, + mimetype=mime_type, + resumable=True, + chunksize=100 * 1024 * 1024, + ) + + # Insert a file + drive_file = self.service.files().create( + body=file_metadata, + media_body=media_body, + supportsAllDrives=True, + ) + response = None + retries = 0 + while response is None and not self.listener.is_cancelled: + try: + self.status, response = drive_file.next_chunk() + except HttpError as err: + if err.resp.status in [500, 502, 503, 504, 429] and retries < 10: + retries += 1 + continue + if err.resp.get("content-type", "").startswith("application/json"): + reason = ( + eval(err.content).get("error").get("errors")[0].get("reason") + ) + if reason not in [ + "userRateLimitExceeded", + "dailyLimitExceeded", + ]: + raise err + if self.use_sa: + if self.sa_count >= self.sa_number: + LOGGER.info( + f"Reached maximum number of service accounts switching, which is {self.sa_count}", + ) + raise err + if self.listener.is_cancelled: + return None + self.switch_service_account() + LOGGER.info(f"Got: {reason}, Trying Again.") + return self._upload_file( + file_path, + file_name, + mime_type, + dest_id, + in_dir, + ) + LOGGER.error(f"Got: {reason}") + raise err + if self.listener.is_cancelled: + return None + with contextlib.suppress(Exception): + remove(file_path) + self.file_processed_bytes = 0 + # Insert new permissions + if not Config.IS_TEAM_DRIVE: + self.set_permission(response["id"]) + # Define file instance and get url for download + if not in_dir: + drive_file = ( + self.service.files() + .get(fileId=response["id"], supportsAllDrives=True) + .execute() + ) + return self.G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get("id")) + return None diff --git a/bot/helper/mirror_leech_utils/rclone_utils/list.py b/bot/helper/mirror_leech_utils/rclone_utils/list.py index a3a6cce69..fc05ec66f 100644 --- a/bot/helper/mirror_leech_utils/rclone_utils/list.py +++ b/bot/helper/mirror_leech_utils/rclone_utils/list.py @@ -1,25 +1,28 @@ +from asyncio import Event, gather, wait_for +from configparser import RawConfigParser +from functools import partial from json import loads from time import time -from asyncio import Event, wait_for, wrap_future -from functools import partial -from configparser import ConfigParser from aiofiles import open as aiopen from aiofiles.os import path as aiopath -from pyrogram.filters import user, regex +from pyrogram.filters import regex, user from pyrogram.handlers import CallbackQueryHandler -from bot import LOGGER, config_dict -from bot.helper.ext_utils.bot_utils import ( - cmd_exec, - new_task, - new_thread, - get_readable_time, +from bot import LOGGER +from bot.core.config_manager import Config +from bot.helper.ext_utils.bot_utils import cmd_exec, new_task, update_user_ldata +from bot.helper.ext_utils.db_handler import database +from bot.helper.ext_utils.status_utils import ( get_readable_file_size, + get_readable_time, ) -from bot.helper.ext_utils.db_handler import DbManager from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.telegram_helper.message_utils import edit_message, send_message +from bot.helper.telegram_helper.message_utils import ( + delete_message, + edit_message, + send_message, +) LIST_LIMIT = 6 @@ -32,9 +35,9 @@ async def path_updates(_, query, obj): if data[1] == "cancel": obj.remote = "Task has been cancelled!" obj.path = "" - obj.is_cancelled = True + obj.listener.is_cancelled = True obj.event.set() - await message.delete() + await delete_message(message) return if obj.query_proc: return @@ -45,17 +48,44 @@ async def path_updates(_, query, obj): elif data[1] == "nex": obj.iter_start += LIST_LIMIT * obj.page_step await obj.get_path_buttons() + elif data[1] == "select": + obj.select = not obj.select + await obj.get_path_buttons() elif data[1] == "back": if data[2] == "re": await obj.list_config() else: await obj.back_from_path() elif data[1] == "re": + # some remotes has space data = query.data.split(maxsplit=2) obj.remote = data[2] await obj.get_path() + elif data[1] == "clear": + obj.selected_pathes = set() + await obj.get_path_buttons() + elif data[1] == "ds": + obj.path = f"rclone_select_{time()}.txt" + async with aiopen(obj.path, "w") as txt_file: + for f in obj.selected_pathes: + await txt_file.write(f"{f}\n") + await delete_message(message) + obj.event.set() elif data[1] == "pa": index = int(data[3]) + if obj.select: + path = obj.path + ( + f"/{obj.path_list[index]['Path']}" + if obj.path + else obj.path_list[index]["Path"] + ) + if path in obj.selected_pathes: + obj.selected_pathes.remove(path) + else: + obj.selected_pathes.add(path) + await obj.get_path_buttons() + obj.query_proc = False + return obj.path += ( f"/{obj.path_list[index]['Path']}" if obj.path @@ -64,10 +94,11 @@ async def path_updates(_, query, obj): if data[2] == "fo": await obj.get_path() else: - await message.delete() + await delete_message(message) obj.event.set() elif data[1] == "ps": if obj.page_step == int(data[2]): + obj.query_proc = False return obj.page_step = int(data[2]) await obj.get_path_buttons() @@ -78,21 +109,21 @@ async def path_updates(_, query, obj): obj.item_type = data[2] await obj.get_path() elif data[1] == "cur": - await message.delete() + await delete_message(message) obj.event.set() elif data[1] == "def": path = ( f"{obj.remote}{obj.path}" - if obj.config_path == "rcl.conf" + if obj.config_path == "rclone.conf" else f"mrcc:{obj.remote}{obj.path}" ) - if path != config_dict["RCLONE_PATH"]: - config_dict["RCLONE_PATH"] = path + if path != obj.listener.user_dict.get("rclone_path"): + update_user_ldata(obj.listener.user_id, "rclone_path", path) await obj.get_path_buttons() - if config_dict["DATABASE_URL"]: - await DbManager().update_config({"RCLONE_PATH": path}) + if Config.DATABASE_URL: + await database.update_user_data(obj.listener.user_id) elif data[1] == "owner": - obj.config_path = "rcl.conf" + obj.config_path = "rclone.conf" obj.path = "" obj.remote = "" await obj.list_remotes() @@ -105,54 +136,57 @@ async def path_updates(_, query, obj): class RcloneList: - def __init__(self, client, message): - self.__user_id = message.from_user.id - self.__rc_user = False - self.__rc_owner = False - self.__client = client - self.__message = message - self.__sections = [] - self.__reply_to = None - self.__time = time() - self.__timeout = 240 + def __init__(self, listener): + self._rc_user = False + self._rc_owner = False + self._sections = [] + self._reply_to = None + self._time = time() + self._timeout = 240 + self.listener = listener self.remote = "" - self.is_cancelled = False self.query_proc = False self.item_type = "--dirs-only" self.event = Event() - self.user_rcc_path = f"tanha/{self.__user_id}.conf" + self.user_rcc_path = f"rclone/{self.listener.user_id}.conf" self.config_path = "" self.path = "" self.list_status = "" self.path_list = [] self.iter_start = 0 self.page_step = 1 + self.select = False + self.selected_pathes = set() - @new_thread - async def __event_handler(self): + async def _event_handler(self): pfunc = partial(path_updates, obj=self) - handler = self.__client.add_handler( + handler = self.listener.client.add_handler( CallbackQueryHandler( - pfunc, filters=regex("^rcq") & user(self.__user_id) + pfunc, + filters=regex("^rcq") & user(self.listener.user_id), ), group=-1, ) try: - await wait_for(self.event.wait(), timeout=self.__timeout) + await wait_for(self.event.wait(), timeout=self._timeout) except Exception: self.path = "" self.remote = "Timed Out. Task has been cancelled!" - self.is_cancelled = True + self.listener.is_cancelled = True self.event.set() finally: - self.__client.remove_handler(*handler) + self.listener.client.remove_handler(*handler) - async def __send_list_message(self, msg, button): - if not self.is_cancelled: - if self.__reply_to is None: - self.__reply_to = await send_message(self.__message, msg, button) + async def _send_list_message(self, msg, button): + if not self.listener.is_cancelled: + if self._reply_to is None: + self._reply_to = await send_message( + self.listener.message, + msg, + button, + ) else: - await edit_message(self.__reply_to, msg, button) + await edit_message(self._reply_to, msg, button) async def get_path_buttons(self): items_no = len(self.path_list) @@ -164,66 +198,84 @@ async def get_path_buttons(self): page = (self.iter_start / LIST_LIMIT) + 1 if self.iter_start != 0 else 1 buttons = ButtonMaker() for index, idict in enumerate( - self.path_list[self.iter_start : LIST_LIMIT + self.iter_start] + self.path_list[self.iter_start : LIST_LIMIT + self.iter_start], ): orig_index = index + self.iter_start + name = idict["Path"] + if name in self.selected_pathes or any( + p.endswith(f"/{name}") for p in self.selected_pathes + ): + name = f"✅ {name}" if idict["IsDir"]: ptype = "fo" - name = idict["Path"] else: ptype = "fi" - name = f"[{get_readable_file_size(idict['Size'])}] {idict['Path']}" - buttons.callback(name, f"rcq pa {ptype} {orig_index}") + name = f"[{get_readable_file_size(idict['Size'])}] {name}" + buttons.data_button(name, f"rcq pa {ptype} {orig_index}") if items_no > LIST_LIMIT: for i in [1, 2, 4, 6, 10, 30, 50, 100]: - buttons.callback(i, f"rcq ps {i}", position="header") - buttons.callback("Previous", "rcq pre", position="footer") - buttons.callback("Next", "rcq nex", position="footer") + buttons.data_button(i, f"rcq ps {i}", position="header") + buttons.data_button("Previous", "rcq pre", position="footer") + buttons.data_button("Next", "rcq nex", position="footer") if self.list_status == "rcd": if self.item_type == "--dirs-only": - buttons.callback( - "Files", "rcq itype --files-only", position="footer" + buttons.data_button( + "Files", + "rcq itype --files-only", + position="footer", ) else: - buttons.callback( - "Folders", "rcq itype --dirs-only", position="footer" + buttons.data_button( + "Folders", + "rcq itype --dirs-only", + position="footer", ) if self.list_status == "rcu" or len(self.path_list) > 0: - buttons.callback("Choose Current Path", "rcq cur", position="footer") + buttons.data_button("Choose Current Path", "rcq cur", position="footer") + if self.list_status == "rcd": + buttons.data_button( + f"Select: {'Enabled' if self.select else 'Disabled'}", + "rcq select", + position="footer", + ) + if len(self.selected_pathes) > 1: + buttons.data_button("Done With Selection", "rcq ds", position="footer") + buttons.data_button("Clear Selection", "rcq clear", position="footer") if self.list_status == "rcu": - buttons.callback("Set as Default Path", "rcq def", position="footer") + buttons.data_button("Set as Default Path", "rcq def", position="footer") if ( self.path - or len(self.__sections) > 1 - or self.__rc_user - and self.__rc_owner + or len(self._sections) > 1 + or (self._rc_user and self._rc_owner) ): - buttons.callback("Back", "rcq back pa", position="footer") + buttons.data_button("Back", "rcq back pa", position="footer") if self.path: - buttons.callback("Back To Root", "rcq root", position="footer") - buttons.callback("Cancel", "rcq cancel", position="footer") - button = buttons.column(2) + buttons.data_button("Back To Root", "rcq root", position="footer") + buttons.data_button("Cancel", "rcq cancel", position="footer") + button = buttons.build_menu(f_cols=2) msg = "Choose Path:" + ( - "\nTransfer Type: Download" + "\nTransfer Type: Download" if self.list_status == "rcd" - else "\nTransfer Type: Upload" + else "\nTransfer Type: Upload" ) if self.list_status == "rcu": - default_path = config_dict["RCLONE_PATH"] + default_path = Config.RCLONE_PATH msg += f"\nDefault Rclone Path: {default_path}" if default_path else "" msg += f"\n\nItems: {items_no}" if items_no > LIST_LIMIT: msg += f" | Page: {int(page)}/{pages} | Page Step: {self.page_step}" msg += f"\n\nItem Type: {self.item_type}\nConfig Path: {self.config_path}" msg += f"\nCurrent Path: {self.remote}{self.path}" - msg += f"\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}" - await self.__send_list_message(msg, button) + msg += ( + f"\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + ) + await self._send_list_message(msg, button) async def get_path(self, itype=""): - if itype: - self.item_type == itype - elif self.list_status == "rcu": - self.item_type == "--dirs-only" + if self.list_status == "rcu": + self.item_type = "--dirs-only" + elif itype: + self.item_type = itype cmd = [ "xone", "lsjson", @@ -234,78 +286,88 @@ async def get_path(self, itype=""): "--config", self.config_path, f"{self.remote}{self.path}", + "-v", + "--log-systemd", + "--log-file", + "rlog.txt", ] - if self.is_cancelled: - return None + if self.listener.is_cancelled: + return res, err, code = await cmd_exec(cmd) - if code not in [0, -9]: + if code in [0, -9]: + result = loads(res) + if ( + len(result) == 0 + and itype != self.item_type + and self.list_status == "rcd" + ): + itype = ( + "--dirs-only" + if self.item_type == "--files-only" + else "--files-only" + ) + self.item_type = itype + await self.get_path(itype) + else: + self.path_list = sorted(result, key=lambda x: x["Path"]) + self.iter_start = 0 + await self.get_path_buttons() + else: + if not err: + err = "Use /shell cat rlog.txt to see more information" LOGGER.error( - f"While rclone listing. Path: {self.remote}{self.path}. Stderr: {err}" + f"While rclone listing. Path: {self.remote}{self.path}. Stderr: {err}", ) self.remote = err[:4000] self.path = "" self.event.set() - return None - result = loads(res) - if ( - len(result) == 0 - and itype != self.item_type - and self.list_status == "rcd" - ): - itype = ( - "--dirs-only" if self.item_type == "--files-only" else "--files-only" - ) - self.item_type = itype - return await self.get_path(itype) - self.path_list = sorted(result, key=lambda x: x["Path"]) - self.iter_start = 0 - await self.get_path_buttons() - return None async def list_remotes(self): - config = ConfigParser() + config = RawConfigParser() async with aiopen(self.config_path) as f: contents = await f.read() config.read_string(contents) if config.has_section("combine"): config.remove_section("combine") - self.__sections = config.sections() - if len(self.__sections) == 1: - self.remote = f"{self.__sections[0]}:" + self._sections = config.sections() + if len(self._sections) == 1: + self.remote = f"{self._sections[0]}:" await self.get_path() else: msg = "Choose Rclone remote:" + ( - "\nTransfer Type: Download" + "\nTransfer Type: Download" if self.list_status == "rcd" - else "\nTransfer Type: Upload" + else "\nTransfer Type: Upload" ) msg += f"\nConfig Path: {self.config_path}" - msg += f"\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}" + msg += f"\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" buttons = ButtonMaker() - for remote in self.__sections: - buttons.callback(remote, f"rcq re {remote}:") - if self.__rc_user and self.__rc_owner: - buttons.callback("Back", "rcq back re", position="footer") - buttons.callback("Cancel", "rcq cancel", position="footer") - button = buttons.column(2) - await self.__send_list_message(msg, button) + for remote in self._sections: + buttons.data_button(remote, f"rcq re {remote}:") + if self._rc_user and self._rc_owner: + buttons.data_button("Back", "rcq back re", position="footer") + buttons.data_button("Cancel", "rcq cancel", position="footer") + button = buttons.build_menu(2) + await self._send_list_message(msg, button) async def list_config(self): - if self.__rc_user and self.__rc_owner: + if self._rc_user and self._rc_owner: msg = "Choose Rclone config:" + ( "\nTransfer Type: Download" if self.list_status == "rcd" else "\nTransfer Type: Upload" ) - msg += f"\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time))}" + msg += f"\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" buttons = ButtonMaker() - buttons.callback("Owner Config", "rcq owner") - buttons.callback("My Config", "rcq user") - buttons.callback("Cancel", "rcq cancel") - button = buttons.column(2) - await self.__send_list_message(msg, button) + buttons.data_button("Owner Config", "rcq owner") + buttons.data_button("My Config", "rcq user") + buttons.data_button("Cancel", "rcq cancel") + button = buttons.build_menu(2) + await self._send_list_message(msg, button) else: - self.config_path = "rcl.conf" if self.__rc_owner else self.user_rcc_path + self.config_path = ( + "rclone.conf" if self._rc_owner else self.user_rcc_path + ) await self.list_remotes() async def back_from_path(self): @@ -313,26 +375,27 @@ async def back_from_path(self): path = self.path.rsplit("/", 1) self.path = path[0] if len(path) > 1 else "" await self.get_path() - elif len(self.__sections) > 1: + elif len(self._sections) > 1: await self.list_remotes() else: await self.list_config() async def get_rclone_path(self, status, config_path=None): self.list_status = status - future = self.__event_handler() if config_path is None: - self.__rc_user = await aiopath.exists(self.user_rcc_path) - self.__rc_owner = await aiopath.exists("rcl.conf") - if not self.__rc_owner and not self.__rc_user: + self._rc_user, self._rc_owner = await gather( + aiopath.exists(self.user_rcc_path), + aiopath.exists("rclone.conf"), + ) + if not self._rc_owner and not self._rc_user: self.event.set() return "Rclone Config not Exists!" await self.list_config() else: self.config_path = config_path await self.list_remotes() - await wrap_future(future) - await self.__reply_to.delete() - if self.config_path != "rcl.conf" and not self.is_cancelled: + await self._event_handler() + await delete_message(self._reply_to) + if self.config_path != "rclone.conf" and not self.listener.is_cancelled: return f"mrcc:{self.remote}{self.path}" return f"{self.remote}{self.path}" diff --git a/bot/helper/mirror_leech_utils/rclone_utils/serve.py b/bot/helper/mirror_leech_utils/rclone_utils/serve.py new file mode 100644 index 000000000..2f7aa8649 --- /dev/null +++ b/bot/helper/mirror_leech_utils/rclone_utils/serve.py @@ -0,0 +1,62 @@ +from asyncio import create_subprocess_exec +from configparser import RawConfigParser + +from aiofiles import open as aiopen +from aiofiles.os import path as aiopath + +from bot.core.config_manager import Config + +RcloneServe = [] + + +async def rclone_serve_booter(): + if not Config.RCLONE_SERVE_URL or not await aiopath.exists("rclone.conf"): + if RcloneServe: + try: + RcloneServe[0].kill() + RcloneServe.clear() + except Exception: + pass + return + config = RawConfigParser() + async with aiopen("rclone.conf") as f: + contents = await f.read() + config.read_string(contents) + if not config.has_section("combine"): + upstreams = " ".join(f"{remote}={remote}:" for remote in config.sections()) + config.add_section("combine") + config.set("combine", "type", "combine") + config.set("combine", "upstreams", upstreams) + async with aiopen("rclone.conf", "w") as f: + config.write(f, space_around_delimiters=False) + if RcloneServe: + try: + RcloneServe[0].kill() + RcloneServe.clear() + except Exception: + pass + cmd = [ + "xone", + "serve", + "http", + "--config", + "rclone.conf", + "--no-modtime", + "combine:", + "--addr", + f":{Config.RCLONE_SERVE_PORT}", + "--vfs-cache-mode", + "full", + "--vfs-cache-max-age", + "1m0s", + "--buffer-size", + "64M", + "-v", + "--log-systemd", + "--log-file", + "rlog.txt", + ] + if (user := Config.RCLONE_SERVE_USER) and (pswd := Config.RCLONE_SERVE_PASS): + cmd.extend(("--user", user, "--pass", pswd)) + rcs = await create_subprocess_exec(*cmd) + RcloneServe.append(rcs) diff --git a/bot/helper/mirror_leech_utils/rclone_utils/transfer.py b/bot/helper/mirror_leech_utils/rclone_utils/transfer.py index 7b3091234..d565f8926 100644 --- a/bot/helper/mirror_leech_utils/rclone_utils/transfer.py +++ b/bot/helper/mirror_leech_utils/rclone_utils/transfer.py @@ -1,104 +1,110 @@ import contextlib -from re import findall as re_findall +from asyncio import create_subprocess_exec, gather, sleep, wait_for +from asyncio.subprocess import PIPE +from configparser import RawConfigParser from json import loads -from random import randrange -from asyncio import gather, create_subprocess_exec from logging import getLogger -from configparser import ConfigParser -from asyncio.subprocess import PIPE +from random import randrange +from re import findall as re_findall from aiofiles import open as aiopen +from aiofiles.os import listdir, makedirs from aiofiles.os import path as aiopath -from aiofiles.os import mkdir, listdir -from bot import GLOBAL_EXTENSION_FILTER, config_dict +from bot.core.config_manager import Config from bot.helper.ext_utils.bot_utils import cmd_exec, sync_to_async -from bot.helper.ext_utils.files_utils import get_mime_type, count_files_and_folders +from bot.helper.ext_utils.files_utils import count_files_and_folders, get_mime_type LOGGER = getLogger(__name__) class RcloneTransferHelper: - def __init__(self, listener=None, name=""): - self.__listener = listener - self.__proc = None - self.__transferred_size = "0 B" - self.__eta = "-" - self.__percentage = "0%" - self.__speed = "0 B/s" - self.__size = "0 B" - self.__is_cancelled = False - self.__is_download = False - self.__is_upload = False - self.__sa_count = 1 - self.__sa_index = 0 - self.__sa_number = 0 - self.name = name + def __init__(self, listener): + self._listener = listener + self._proc = None + self._transferred_size = "0 B" + self._eta = "-" + self._percentage = "0%" + self._speed = "0 B/s" + self._size = "0 B" + self._is_download = False + self._is_upload = False + self._sa_count = 1 + self._sa_index = 0 + self._sa_number = 0 + self._use_service_accounts = Config.USE_SERVICE_ACCOUNTS + self._rclone_select = False @property def transferred_size(self): - return self.__transferred_size + return self._transferred_size @property def percentage(self): - return self.__percentage + return self._percentage @property def speed(self): - return self.__speed + return self._speed @property def eta(self): - return self.__eta + return self._eta @property def size(self): - return self.__size + return self._size - async def __progress(self): - while not (self.__proc is None or self.__is_cancelled): + async def _progress(self): + while not ( + self._proc.returncode is not None + or self._proc.stdout.at_eof() + or self._listener.is_cancelled + ): try: - data = (await self.__proc.stdout.readline()).decode() + data = await wait_for(self._proc.stdout.readline(), 60) except Exception: - continue + break if not data: break + data = data.decode().strip() if data := re_findall( r"Transferred:\s+([\d.]+\s*\w+)\s+/\s+([\d.]+\s*\w+),\s+([\d.]+%)\s*,\s+([\d.]+\s*\w+/s),\s+ETA\s+([\dwdhms]+)", data, ): ( - self.__transferred_size, - self.__size, - self.__percentage, - self.__speed, - self.__eta, + self._transferred_size, + self._size, + self._percentage, + self._speed, + self._eta, ) = data[0] + await sleep(0.05) - def __switchServiceAccount(self): - if self.__sa_index == self.__sa_number - 1: - self.__sa_index = 0 + def _switch_service_account(self): + if self._sa_index == self._sa_number - 1: + self._sa_index = 0 else: - self.__sa_index += 1 - self.__sa_count += 1 - remote = f"sa{self.__sa_index:03}" + self._sa_index += 1 + self._sa_count += 1 + remote = f"sa{self._sa_index:03}" LOGGER.info(f"Switching to {remote} remote") return remote - async def __create_rc_sa(self, remote, remote_opts): + async def _create_rc_sa(self, remote, remote_opts): sa_conf_dir = "rclone_sa" sa_conf_file = f"{sa_conf_dir}/{remote}.conf" - if not await aiopath.isdir(sa_conf_dir): - await mkdir(sa_conf_dir) - elif await aiopath.isfile(sa_conf_file): + if await aiopath.isfile(sa_conf_file): return sa_conf_file + await makedirs(sa_conf_dir, exist_ok=True) if gd_id := remote_opts.get("team_drive"): option = "team_drive" elif gd_id := remote_opts.get("root_folder_id"): option = "root_folder_id" else: - return "rcl.conf" + self._use_service_accounts = False + return "rclone.conf" files = await listdir("accounts") text = "".join( @@ -110,99 +116,98 @@ async def __create_rc_sa(self, remote, remote_opts): await f.write(text) return sa_conf_file - async def __start_download(self, cmd, remote_type): - self.__proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) - _, return_code = await gather(self.__progress(), self.__proc.wait()) - - if self.__is_cancelled: + async def _start_download(self, cmd, remote_type): + self._proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) + await self._progress() + _, stderr = await self._proc.communicate() + return_code = self._proc.returncode + if self._listener.is_cancelled: return None if return_code == 0: - await self.__listener.on_download_complete() + await self._listener.on_download_complete() return None if return_code != -9: - error = (await self.__proc.stderr.read()).decode().strip() - if ( - not error - and remote_type == "drive" - and config_dict["USE_SERVICE_ACCOUNTS"] - ): + error = ( + stderr.decode().strip() + or "Use /shell cat rlog.txt to see more information" + ) + if not error and remote_type == "drive" and self._use_service_accounts: error = ( "Mostly your service accounts don't have access to this drive!" ) + elif not error: + error = ( + "Use /shell cat rlog.txt to see more information" + ) LOGGER.error(error) if ( - self.__sa_number != 0 + self._sa_number != 0 and remote_type == "drive" and "RATE_LIMIT_EXCEEDED" in error - and config_dict["USE_SERVICE_ACCOUNTS"] + and self._use_service_accounts ): - if self.__sa_count < self.__sa_number: - remote = self.__switchServiceAccount() + if self._sa_count < self._sa_number: + remote = self._switch_service_account() cmd[6] = f"{remote}:{cmd[6].split(':', 1)[1]}" - if self.__is_cancelled: + if self._listener.is_cancelled: return None - return await self.__start_download(cmd, remote_type) + return await self._start_download(cmd, remote_type) LOGGER.info( - f"Reached maximum number of service accounts switching, which is {self.__sa_count}" + f"Reached maximum number of service accounts switching, which is {self._sa_count}", ) - await self.__listener.onDownloadError(error[:4000]) + await self._listener.on_download_error(error[:4000]) return None return None - async def download(self, remote, rc_path, config_path, path): - self.__is_download = True + async def download(self, remote, config_path, path): + self._is_download = True try: - remote_opts = await self.__get_remote_options(config_path, remote) + remote_opts = await self._get_remote_options(config_path, remote) except Exception as err: - await self.__listener.onDownloadError(str(err)) + await self._listener.on_download_error(str(err)) return remote_type = remote_opts["type"] if ( remote_type == "drive" - and config_dict["USE_SERVICE_ACCOUNTS"] - and config_path == "rcl.conf" + and self._use_service_accounts + and config_path == "rclone.conf" and await aiopath.isdir("accounts") and not remote_opts.get("service_account_file") ): - config_path = await self.__create_rc_sa(remote, remote_opts) - if config_path != "rcl.conf": + config_path = await self._create_rc_sa(remote, remote_opts) + if config_path != "rclone.conf": sa_files = await listdir("accounts") - self.__sa_number = len(sa_files) - self.__sa_index = randrange(self.__sa_number) - remote = f"sa{self.__sa_index:03}" + self._sa_number = len(sa_files) + self._sa_index = randrange(self._sa_number) + remote = f"sa{self._sa_index:03}" LOGGER.info(f"Download with service account {remote}") - rc_flags = self.__listener.rc_flags or config_dict["RCLONE_FLAGS"] - cmd = self.__getUpdatedCommand( - config_path, f"{remote}:{rc_path}", path, rc_flags, "copy" + cmd = self._get_updated_command( + config_path, + f"{remote}:{self._listener.link}", + path, + "copy", ) if ( remote_type == "drive" - and not config_dict["RCLONE_FLAGS"] - and not self.__listener.rc_flags + and not Config.RCLONE_FLAGS + and not self._listener.rc_flags ): cmd.append("--drive-acknowledge-abuse") elif remote_type != "drive": cmd.extend(("--retries-sleep", "3s")) - await self.__start_download(cmd, remote_type) + await self._start_download(cmd, remote_type) - async def __get_gdrive_link(self, config_path, remote, rc_path, mime_type): - if mime_type == "Folder": - epath = rc_path.strip("/").rsplit("/", 1) - epath = f"{remote}:{epath[0]}" if len(epath) > 1 else f"{remote}:" - destination = f"{remote}:{rc_path}" - elif rc_path: - epath = f"{remote}:{rc_path}/{self.name}" - destination = epath - else: - epath = f"{remote}:{rc_path}{self.name}" - destination = epath + async def _get_gdrive_link(self, config_path, destination, mime_type): + epath = ( + destination.rsplit("/", 1)[0] if mime_type == "Folder" else destination + ) cmd = [ "xone", @@ -213,84 +218,92 @@ async def __get_gdrive_link(self, config_path, remote, rc_path, mime_type): "--config", config_path, epath, + "-v", + "--log-systemd", + "--log-file", + "rlog.txt", ] res, err, code = await cmd_exec(cmd) if code == 0: result = loads(res) - fid = next((r["ID"] for r in result if r["Path"] == self.name), "err") + fid = next( + (r["ID"] for r in result if r["Path"] == self._listener.name), + "err", + ) link = ( f"https://drive.google.com/drive/folders/{fid}" if mime_type == "Folder" else f"https://drive.google.com/uc?id={fid}&export=download" ) elif code != -9: + if not err: + err = "Use /shell cat rlog.txt to see more information" LOGGER.error( - f"while getting drive link. Path: {destination}. Stderr: {err}" + f"while getting drive link. Path: {destination}. Stderr: {err}", ) link = "" - return link, destination + return link - async def __start_upload(self, cmd, remote_type): - self.__proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) - _, return_code = await gather(self.__progress(), self.__proc.wait()) + async def _start_upload(self, cmd, remote_type): + self._proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) + await self._progress() + _, stderr = await self._proc.communicate() + return_code = self._proc.returncode - if self.__is_cancelled: + if self._listener.is_cancelled: return False - if return_code == -9: return False - if return_code != 0: - error = (await self.__proc.stderr.read()).decode().strip() - if ( - not error - and remote_type == "drive" - and config_dict["USE_SERVICE_ACCOUNTS"] - ): - error = ( - "Mostly your service accounts don't have access to this drive!" - ) - LOGGER.error(error) - if ( - self.__sa_number != 0 - and remote_type == "drive" - and "RATE_LIMIT_EXCEEDED" in error - and config_dict["USE_SERVICE_ACCOUNTS"] - ): - if self.__sa_count < self.__sa_number: - remote = self.__switchServiceAccount() - cmd[7] = f"{remote}:{cmd[7].split(':', 1)[1]}" - return ( - False - if self.__is_cancelled - else await self.__start_upload(cmd, remote_type) - ) - LOGGER.info( - f"Reached maximum number of service accounts switching, which is {self.__sa_count}" + if return_code == 0: + return True + error = ( + stderr.decode().strip() + or "Use /shell cat rlog.txt to see more information" + ) + LOGGER.error(error) + if ( + self._sa_number != 0 + and remote_type == "drive" + and "RATE_LIMIT_EXCEEDED" in error + and self._use_service_accounts + ): + if self._sa_count < self._sa_number: + remote = self._switch_service_account() + cmd[7] = f"{remote}:{cmd[7].split(':', 1)[1]}" + return ( + False + if self._listener.is_cancelled + else await self._start_upload(cmd, remote_type) ) - await self.__listener.onUploadError(error[:4000]) - return False - return True + LOGGER.info( + f"Reached maximum number of service accounts switching, which is {self._sa_count}", + ) + await self._listener.on_upload_error(error[:4000]) + return False - async def upload(self, path, size): - self.__is_upload = True - rc_path = self.__listener.upPath.strip("/") + async def upload(self, path): + self._is_upload = True + rc_path = self._listener.up_dest if rc_path.startswith("mrcc:"): rc_path = rc_path.split("mrcc:", 1)[1] - oconfig_path = f"tanha/{self.__listener.message.from_user.id}.conf" + oconfig_path = f"rclone/{self._listener.user_id}.conf" else: - oconfig_path = "rcl.conf" + oconfig_path = "rclone.conf" oremote, rc_path = rc_path.split(":", 1) if await aiopath.isdir(path): mime_type = "Folder" - folders, files = await count_files_and_folders(path) - rc_path += f"/{self.name}" if rc_path else self.name + folders, files = await count_files_and_folders( + path, + self._listener.extension_filter, + ) + rc_path += f"/{self._listener.name}" if rc_path else self._listener.name else: - if path.lower().endswith(tuple(GLOBAL_EXTENSION_FILTER)): - await self.__listener.onUploadError( - "This file extension is excluded by extension filter!" + if path.lower().endswith(tuple(self._listener.extension_filter)): + await self._listener.on_upload_error( + "This file extension is excluded by extension filter!", ) return mime_type = await sync_to_async(get_mime_type, path) @@ -298,9 +311,9 @@ async def upload(self, path, size): files = 1 try: - remote_opts = await self.__get_remote_options(oconfig_path, oremote) + remote_opts = await self._get_remote_options(oconfig_path, oremote) except Exception as err: - await self.__listener.onUploadError(str(err)) + await self._listener.on_upload_error(str(err)) return remote_type = remote_opts["type"] @@ -308,80 +321,93 @@ async def upload(self, path, size): fconfig_path = oconfig_path if ( remote_type == "drive" - and config_dict["USE_SERVICE_ACCOUNTS"] - and fconfig_path == "rcl.conf" + and self._use_service_accounts + and fconfig_path == "rclone.conf" and await aiopath.isdir("accounts") and not remote_opts.get("service_account_file") ): - fconfig_path = await self.__create_rc_sa(oremote, remote_opts) - if fconfig_path != "rcl.conf": + fconfig_path = await self._create_rc_sa(oremote, remote_opts) + if fconfig_path != "rclone.conf": sa_files = await listdir("accounts") - self.__sa_number = len(sa_files) - self.__sa_index = randrange(self.__sa_number) - fremote = f"sa{self.__sa_index:03}" + self._sa_number = len(sa_files) + self._sa_index = randrange(self._sa_number) + fremote = f"sa{self._sa_index:03}" LOGGER.info(f"Upload with service account {fremote}") - rc_flags = self.__listener.rc_flags or config_dict["RCLONE_FLAGS"] - method = ( - "move" if not self.__listener.seed or self.__listener.newDir else "copy" - ) - cmd = self.__getUpdatedCommand( - fconfig_path, path, f"{fremote}:{rc_path}", rc_flags, method + method = "move" + cmd = self._get_updated_command( + fconfig_path, + path, + f"{fremote}:{rc_path}", + method, ) if ( remote_type == "drive" - and not config_dict["RCLONE_FLAGS"] - and not self.__listener.rc_flags + and not Config.RCLONE_FLAGS + and not self._listener.rc_flags ): - cmd.extend(("--drive-chunk-size", "64M", "--drive-upload-cutoff", "32M")) - elif remote_type != "drive": - cmd.extend(("--retries-sleep", "3s")) + cmd.extend( + ("--drive-chunk-size", "128M", "--drive-upload-cutoff", "128M"), + ) - result = await self.__start_upload(cmd, remote_type) + result = await self._start_upload(cmd, remote_type) if not result: return + if mime_type == "Folder": + destination = f"{oremote}:{rc_path}" + elif rc_path: + destination = f"{oremote}:{rc_path}/{self._listener.name}" + else: + destination = f"{oremote}:{self._listener.name}" + if remote_type == "drive": - link, destination = await self.__get_gdrive_link( - oconfig_path, oremote, rc_path, mime_type - ) + link = await self._get_gdrive_link(oconfig_path, destination, mime_type) else: - if mime_type == "Folder": - destination = f"{oremote}:{rc_path}" - elif rc_path: - destination = f"{oremote}:{rc_path}/{self.name}" - else: - destination = f"{oremote}:{self.name}" - - cmd = ["xone", "link", "--config", oconfig_path, destination] + cmd = [ + "xone", + "link", + "--config", + oconfig_path, + destination, + "-v", + "--log-systemd", + "--log-file", + "rlog.txt", + ] res, err, code = await cmd_exec(cmd) if code == 0: link = res elif code != -9: + if not err: + err = "Use /shell cat rlog.txt to see more information" LOGGER.error( - f"while getting link. Path: {destination} | Stderr: {err}" + f"while getting link. Path: {destination} | Stderr: {err}", ) link = "" - if self.__is_cancelled: + if self._listener.is_cancelled: return LOGGER.info(f"Upload Done. Path: {destination}") - await self.__listener.onUploadComplete( - link, size, files, folders, mime_type, self.name, destination + await self._listener.on_upload_complete( + link, + files, + folders, + mime_type, + destination, ) - async def clone( - self, config_path, src_remote, src_path, destination, rc_flags, mime_type - ): + async def clone(self, config_path, src_remote, src_path, mime_type, method): + destination = self._listener.up_dest dst_remote, dst_path = destination.split(":", 1) try: src_remote_opts, dst_remote_opt = await gather( - self.__get_remote_options(config_path, src_remote), - self.__get_remote_options(config_path, dst_remote), + self._get_remote_options(config_path, src_remote), + self._get_remote_options(config_path, dst_remote), ) except Exception as err: - await self.__listener.onUploadError(str(err)) + await self._listener.on_upload_error(str(err)) return None, None src_remote_type, dst_remote_type = ( @@ -389,57 +415,91 @@ async def clone( dst_remote_opt["type"], ) - cmd = self.__getUpdatedCommand( - config_path, f"{src_remote}:{src_path}", destination, rc_flags, "copy" + cmd = self._get_updated_command( + config_path, + f"{src_remote}:{src_path}", + destination, + method, ) - if not rc_flags: + if not self._listener.rc_flags and not Config.RCLONE_FLAGS: if src_remote_type == "drive" and dst_remote_type != "drive": cmd.append("--drive-acknowledge-abuse") - elif dst_remote_type == "drive" and src_remote_type != "drive": - cmd.extend( - ("--drive-chunk-size", "64M", "--drive-upload-cutoff", "32M") - ) elif src_remote_type == "drive": cmd.extend(("--tpslimit", "3", "--transfers", "3")) - self.__proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) - _, return_code = await gather(self.__progress(), self.__proc.wait()) + self._proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE) + await self._progress() + _, stderr = await self._proc.communicate() + return_code = self._proc.returncode - if self.__is_cancelled: + if self._listener.is_cancelled: return None, None if return_code == -9: return None, None - if return_code != 0: - error = (await self.__proc.stderr.read()).decode().strip() - LOGGER.error(error) - await self.__listener.onUploadError(error[:4000]) - return None, None - if dst_remote_type == "drive": - link, destination = await self.__get_gdrive_link( - config_path, dst_remote, dst_path, mime_type - ) - return (None, None) if self.__is_cancelled else (link, destination) - if mime_type != "Folder": - destination += f"/{self.name}" if dst_path else self.name - - cmd = ["xone", "link", "--config", config_path, destination] - res, err, code = await cmd_exec(cmd) + if return_code == 0: + if mime_type != "Folder": + destination += ( + f"/{self._listener.name}" if dst_path else self._listener.name + ) + if dst_remote_type == "drive": + link = await self._get_gdrive_link( + config_path, + destination, + mime_type, + ) + return ( + (None, None) + if self._listener.is_cancelled + else (link, destination) + ) + cmd = [ + "xone", + "link", + "--config", + config_path, + destination, + "-v", + "--log-systemd", + "--log-file", + "rlog.txt", + ] + res, err, code = await cmd_exec(cmd) - if self.__is_cancelled: - return None, None + if self._listener.is_cancelled: + return None, None - if code == 0: - return res, destination - if code != -9: - LOGGER.error(f"while getting link. Path: {destination} | Stderr: {err}") - await self.__listener.onUploadError(err[:4000]) - return None, None - return None + if code == 0: + return res, destination + if code != -9: + if not err: + err = "Use /shell cat rlog.txt to see more information" + LOGGER.error( + f"while getting link. Path: {destination} | Stderr: {err}", + ) + return None, destination + return None - @staticmethod - def __getUpdatedCommand(config_path, source, destination, rc_flags, method): - ext = "*.{" + ",".join(GLOBAL_EXTENSION_FILTER) + "}" + error = ( + stderr.decode().strip() + or "Use /shell cat rlog.txt to see more information" + ) + LOGGER.error(error) + await self._listener.on_upload_error(error[:4000]) + return None, None + + def _get_updated_command( + self, + config_path, + source, + destination, + method, + ): + if source.split(":")[-1].startswith("rclone_select"): + source = f"{source.split(':')[0]}:" + self._rclone_select = True + else: + ext = "*.{" + ",".join(self._listener.extension_filter) + "}" cmd = [ "xone", method, @@ -449,20 +509,25 @@ def __getUpdatedCommand(config_path, source, destination, rc_flags, method): "-P", source, destination, - "--exclude", - ext, + "-L", + "--retries-sleep", + "3s", "--ignore-case", "--low-level-retries", "1", "-M", + "-v", + "--log-systemd", "--log-file", "rlog.txt", - "--log-level", - "DEBUG", ] - if rc_flags: - rc_flags = rc_flags.split("|") - for flag in rc_flags: + if self._rclone_select: + cmd.extend(("--files-from", self._listener.link)) + else: + cmd.extend(("--exclude", ext)) + if rcflags := self._listener.rc_flags or Config.RCLONE_FLAGS: + rcflags = rcflags.split("|") + for flag in rcflags: if ":" in flag: key, value = map(str.strip, flag.split(":", 1)) cmd.extend((key, value)) @@ -471,25 +536,25 @@ def __getUpdatedCommand(config_path, source, destination, rc_flags, method): return cmd @staticmethod - async def __get_remote_options(config_path, remote): - config = ConfigParser() + async def _get_remote_options(config_path, remote): + config = RawConfigParser() async with aiopen(config_path) as f: contents = await f.read() config.read_string(contents) options = config.options(remote) return {opt: config.get(remote, opt) for opt in options} - async def cancel_download(self): - self.__is_cancelled = True - if self.__proc is not None: + async def cancel_task(self): + self._listener.is_cancelled = True + if self._proc is not None: with contextlib.suppress(Exception): - self.__proc.kill() - if self.__is_download: - LOGGER.info(f"Cancelling Download: {self.name}") - await self.__listener.onDownloadError("Stopped by user!") - elif self.__is_upload: - LOGGER.info(f"Cancelling Upload: {self.name}") - await self.__listener.onUploadError("Cancelled by user!") + self._proc.kill() + if self._is_download: + LOGGER.info(f"Cancelling Download: {self._listener.name}") + await self._listener.on_download_error("Stopped by user!") + elif self._is_upload: + LOGGER.info(f"Cancelling Upload: {self._listener.name}") + await self._listener.on_upload_error("your upload has been stopped!") else: - LOGGER.info(f"Cancelling Clone: {self.name}") - await self.__listener.onUploadError("Your clone has been stopped!") + LOGGER.info(f"Cancelling Clone: {self._listener.name}") + await self._listener.on_upload_error("your clone has been stopped!") diff --git a/bot/helper/mirror_leech_utils/status_utils/aria2_status.py b/bot/helper/mirror_leech_utils/status_utils/aria2_status.py index 36f37f918..0a786e16c 100644 --- a/bot/helper/mirror_leech_utils/status_utils/aria2_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/aria2_status.py @@ -1,114 +1,109 @@ from time import time from bot import LOGGER, aria2 -from bot.helper.ext_utils.bot_utils import ( - MirrorStatus, - sync_to_async, - get_readable_time, -) +from bot.helper.ext_utils.bot_utils import sync_to_async +from bot.helper.ext_utils.status_utils import MirrorStatus, get_readable_time -def get_download(gid): +def get_download(gid, old_info=None): try: - return aria2.get_download(gid) + res = aria2.get_download(gid) + return res or old_info except Exception as e: LOGGER.error(f"{e}: Aria2c, Error while getting torrent info") - return None + return old_info class Aria2Status: - def __init__(self, gid, listener, seeding=False, queued=False): - self.__gid = gid - self.__download = get_download(gid) - self.__listener = listener + def __init__(self, listener, gid, seeding=False, queued=False): + self._gid = gid + self._download = None + self.listener = listener self.queued = queued self.start_time = 0 self.seeding = seeding - self.message = self.__listener.message - def __update(self): - if self.__download is None: - self.__download = get_download(self.__gid) + def update(self): + if self._download is None: + self._download = get_download(self._gid, self._download) else: - self.__download = self.__download.live - if self.__download.followed_by_ids: - self.__gid = self.__download.followed_by_ids[0] - self.__download = get_download(self.__gid) + self._download = self._download.live + if self._download.followed_by_ids: + self._gid = self._download.followed_by_ids[0] + self._download = get_download(self._gid) def progress(self): - return self.__download.progress_string() + return self._download.progress_string() def processed_bytes(self): - return self.__download.completed_length_string() + return self._download.completed_length_string() def speed(self): - return self.__download.download_speed_string() + return self._download.download_speed_string() def name(self): - return self.__download.name + return self._download.name def size(self): - return self.__download.total_length_string() + return self._download.total_length_string() def eta(self): - return get_readable_time(int(self.__download.eta.total_seconds())) + return self._download.eta_string() def status(self): - self.__update() - if self.__download.is_waiting or self.queued: + self.update() + if self._download.is_waiting or self.queued: if self.seeding: return MirrorStatus.STATUS_QUEUEUP return MirrorStatus.STATUS_QUEUEDL - if self.__download.is_paused: + if self._download.is_paused: return MirrorStatus.STATUS_PAUSED - if self.__download.seeder and self.seeding: - return MirrorStatus.STATUS_SEEDING - return MirrorStatus.STATUS_DOWNLOADING + if self._download.seeder and self.seeding: + return MirrorStatus.STATUS_SEED + return MirrorStatus.STATUS_DOWNLOAD def seeders_num(self): - return self.__download.num_seeders + return self._download.num_seeders def leechers_num(self): - return self.__download.connections + return self._download.connections def uploaded_bytes(self): - return self.__download.upload_length_string() + return self._download.upload_length_string() - def upload_speed(self): - self.__update() - return self.__download.upload_speed_string() + def seed_speed(self): + return self._download.upload_speed_string() def ratio(self): - return f"{round(self.__download.upload_length / self.__download.completed_length, 3)}" + return f"{round(self._download.upload_length / self._download.completed_length, 3)}" def seeding_time(self): - return get_readable_time(time() - self.start_time, True) + return get_readable_time(time() - self.start_time) - def download(self): + def task(self): return self - def listener(self): - return self.__listener - def gid(self): - self.__update() - return self.__gid + return self._gid - async def cancel_download(self): - self.__update() - await sync_to_async(self.__update) - if self.__download.seeder and self.seeding: + async def cancel_task(self): + self.listener.is_cancelled = True + await sync_to_async(self.update) + if self._download.seeder and self.seeding: LOGGER.info(f"Cancelling Seed: {self.name()}") - await self.__listener.onUploadError( - f"Seeding stopped with Ratio: {self.ratio()} and Time: {self.seeding_time()}" + await self.listener.on_upload_error( + f"Seeding stopped with Ratio: {self.ratio()} and Time: {self.seeding_time()}", ) await sync_to_async( - aria2.remove, [self.__download], force=True, files=True + aria2.remove, + [self._download], + force=True, + files=True, ) - elif downloads := self.__download.followed_by: + elif downloads := self._download.followed_by: LOGGER.info(f"Cancelling Download: {self.name()}") - await self.__listener.onDownloadError("Download cancelled by user!") - downloads.append(self.__download) + await self.listener.on_download_error("Download cancelled by user!") + downloads.append(self._download) await sync_to_async(aria2.remove, downloads, force=True, files=True) else: if self.queued: @@ -117,7 +112,10 @@ async def cancel_download(self): else: LOGGER.info(f"Cancelling Download: {self.name()}") msg = "Download stopped by user!" - await self.__listener.onDownloadError(msg) + await self.listener.on_download_error(msg) await sync_to_async( - aria2.remove, [self.__download], force=True, files=True + aria2.remove, + [self._download], + force=True, + files=True, ) diff --git a/bot/helper/mirror_leech_utils/status_utils/direct_status.py b/bot/helper/mirror_leech_utils/status_utils/direct_status.py index 0740a80dd..8a11d63b2 100644 --- a/bot/helper/mirror_leech_utils/status_utils/direct_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/direct_status.py @@ -1,23 +1,22 @@ -from bot.helper.ext_utils.bot_utils import ( +from bot.helper.ext_utils.status_utils import ( MirrorStatus, - get_readable_time, get_readable_file_size, + get_readable_time, ) class DirectStatus: - def __init__(self, obj, gid, listener): - self.__gid = gid - self.__listener = listener - self.__obj = obj - self.message = self.__listener.message + def __init__(self, listener, obj, gid): + self._gid = gid + self._obj = obj + self.listener = listener def gid(self): - return self.__gid + return self._gid def progress_raw(self): try: - return self.__obj.processed_bytes / self.__obj.total_size * 100 + return self._obj.processed_bytes / self.listener.size * 100 except Exception: return 0 @@ -25,30 +24,30 @@ def progress(self): return f"{round(self.progress_raw(), 2)}%" def speed(self): - return f"{get_readable_file_size(self.__obj.speed)}/s" + return f"{get_readable_file_size(self._obj.speed)}/s" def name(self): - return self.__obj.name + return self.listener.name def size(self): - return get_readable_file_size(self.__obj.total_size) + return get_readable_file_size(self.listener.size) def eta(self): try: seconds = ( - self.__obj.total_size - self.__obj.processed_bytes - ) / self.__obj.speed + self.listener.size - self._obj.processed_bytes + ) / self._obj.speed return get_readable_time(seconds) except Exception: return "-" def status(self): - if self.__obj.task and self.__obj.task.is_waiting: + if self._obj.download_task and self._obj.download_task.is_waiting: return MirrorStatus.STATUS_QUEUEDL - return MirrorStatus.STATUS_DOWNLOADING + return MirrorStatus.STATUS_DOWNLOAD def processed_bytes(self): - return get_readable_file_size(self.__obj.processed_bytes) + return get_readable_file_size(self._obj.processed_bytes) - def download(self): - return self.__obj + def task(self): + return self._obj diff --git a/bot/helper/mirror_leech_utils/status_utils/extract_status.py b/bot/helper/mirror_leech_utils/status_utils/extract_status.py deleted file mode 100644 index 79da14460..000000000 --- a/bot/helper/mirror_leech_utils/status_utils/extract_status.py +++ /dev/null @@ -1,74 +0,0 @@ -from time import time - -from bot import LOGGER -from bot.helper.ext_utils.bot_utils import ( - MirrorStatus, - async_to_sync, - get_readable_time, - get_readable_file_size, -) -from bot.helper.ext_utils.files_utils import get_path_size - - -class ExtractStatus: - def __init__(self, name, size, gid, listener): - self.__name = name - self.__size = size - self.__gid = gid - self.__listener = listener - self.__uid = listener.uid - self.__start_time = time() - self.message = listener.message - - def gid(self): - return self.__gid - - def speed_raw(self): - return self.processed_raw() / (time() - self.__start_time) - - def progress_raw(self): - try: - return self.processed_raw() / self.__size * 100 - except Exception: - return 0 - - def progress(self): - return f"{round(self.progress_raw(), 2)}%" - - def speed(self): - return f"{get_readable_file_size(self.speed_raw())}/s" - - def name(self): - return self.__name - - def size(self): - return get_readable_file_size(self.__size) - - def eta(self): - try: - seconds = (self.__size - self.processed_raw()) / self.speed_raw() - return get_readable_time(seconds) - except Exception: - return "-" - - def status(self): - return MirrorStatus.STATUS_EXTRACTING - - def processed_bytes(self): - return get_readable_file_size(self.processed_raw()) - - def processed_raw(self): - if self.__listener.newDir: - return async_to_sync(get_path_size, self.__listener.newDir) - return async_to_sync(get_path_size, self.__listener.dir) - self.__size - - def download(self): - return self - - async def cancel_download(self): - LOGGER.info(f"Cancelling Extract: {self.__name}") - if self.__listener.suproc is not None: - self.__listener.suproc.kill() - else: - self.__listener.suproc = "cancelled" - await self.__listener.onUploadError("extracting stopped by user!") diff --git a/bot/helper/mirror_leech_utils/status_utils/ffmpeg_status.py b/bot/helper/mirror_leech_utils/status_utils/ffmpeg_status.py new file mode 100644 index 000000000..50af1b513 --- /dev/null +++ b/bot/helper/mirror_leech_utils/status_utils/ffmpeg_status.py @@ -0,0 +1,64 @@ +import contextlib + +from bot import LOGGER +from bot.helper.ext_utils.status_utils import ( + MirrorStatus, + get_readable_file_size, + get_readable_time, +) + + +class FFmpegStatus: + def __init__(self, listener, obj, gid, status=""): + self.listener = listener + self._obj = obj + self._gid = gid + self._cstatus = status + + def speed(self): + return f"{get_readable_file_size(self._obj.speed_raw)}/s" + + def processed_bytes(self): + return get_readable_file_size(self._obj.processed_bytes) + + async def progress(self): + return f"{round(self._obj.progress_raw, 2)}%" + + def gid(self): + return self._gid + + def name(self): + return self.listener.name + + def size(self): + return get_readable_file_size(self.listener.size) + + def eta(self): + return get_readable_time(self._obj.eta_raw) if self._obj.eta_raw else "-" + + def status(self): + if self._cstatus == "Convert": + return MirrorStatus.STATUS_CONVERT + if self._cstatus == "Split": + return MirrorStatus.STATUS_SPLIT + if self._cstatus == "Sample Video": + return MirrorStatus.STATUS_SAMVID + if self._cstatus == "Metadata": + return MirrorStatus.STATUS_METADATA + if self._cstatus == "Watermark": + return MirrorStatus.STATUS_WATERMARK + return MirrorStatus.STATUS_FFMPEG + + def task(self): + return self + + async def cancel_task(self): + LOGGER.info(f"Cancelling {self._cstatus}: {self.listener.name}") + self.listener.is_cancelled = True + if ( + self.listener.subproc is not None + and self.listener.subproc.returncode is None + ): + with contextlib.suppress(Exception): + self.listener.subproc.kill() + await self.listener.on_upload_error(f"{self._cstatus} stopped by user!") diff --git a/bot/helper/mirror_leech_utils/status_utils/gdrive_status.py b/bot/helper/mirror_leech_utils/status_utils/gdrive_status.py index d348ddb04..87c03a67c 100644 --- a/bot/helper/mirror_leech_utils/status_utils/gdrive_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/gdrive_status.py @@ -1,42 +1,40 @@ -from bot.helper.ext_utils.bot_utils import ( +from bot.helper.ext_utils.status_utils import ( MirrorStatus, - get_readable_time, get_readable_file_size, + get_readable_time, ) -class GdriveStatus: - def __init__(self, obj, size, message, gid, status): - self.__obj = obj - self.__size = size - self.__gid = gid - self.__status = status - self.message = message +class GoogleDriveStatus: + def __init__(self, listener, obj, gid, status): + self.listener = listener + self._obj = obj + self._size = self.listener.size + self._gid = gid + self._status = status def processed_bytes(self): - return get_readable_file_size(self.__obj.processed_bytes) + return get_readable_file_size(self._obj.processed_bytes) def size(self): - return get_readable_file_size(self.__size) + return get_readable_file_size(self._size) def status(self): - if self.__status == "up": - if self.__obj.processed_bytes == 0: - return MirrorStatus.STATUS_PROCESSING - return MirrorStatus.STATUS_UPLOADING - if self.__status == "dl": - return MirrorStatus.STATUS_DOWNLOADING - return MirrorStatus.STATUS_CLONING + if self._status == "up": + return MirrorStatus.STATUS_UPLOAD + if self._status == "dl": + return MirrorStatus.STATUS_DOWNLOAD + return MirrorStatus.STATUS_CLONE def name(self): - return self.__obj.name + return self.listener.name def gid(self) -> str: - return self.__gid + return self._gid def progress_raw(self): try: - return self.__obj.processed_bytes / self.__size * 100 + return self._obj.processed_bytes / self._size * 100 except Exception: return 0 @@ -44,14 +42,14 @@ def progress(self): return f"{round(self.progress_raw(), 2)}%" def speed(self): - return f"{get_readable_file_size(self.__obj.speed)}/s" + return f"{get_readable_file_size(self._obj.speed)}/s" def eta(self): try: - seconds = (self.__size - self.__obj.processed_bytes) / self.__obj.speed + seconds = (self._size - self._obj.processed_bytes) / self._obj.speed return get_readable_time(seconds) except Exception: return "-" - def download(self): - return self.__obj + def task(self): + return self._obj diff --git a/bot/helper/mirror_leech_utils/status_utils/mega_status.py b/bot/helper/mirror_leech_utils/status_utils/mega_status.py index 25d2101b7..583e8c6e5 100644 --- a/bot/helper/mirror_leech_utils/status_utils/mega_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/mega_status.py @@ -1,24 +1,30 @@ -from bot.helper.ext_utils.bot_utils import ( +from bot.helper.ext_utils.status_utils import ( MirrorStatus, - get_readable_time, get_readable_file_size, + get_readable_time, ) class MegaDownloadStatus: - def __init__(self, name, size, gid, obj, message): - self.__obj = obj - self.__name = name - self.__size = size - self.__gid = gid - self.message = message + def __init__( + self, + listener, + obj, + gid, + status, + ): + self.listener = listener + self._obj = obj + self._size = self.listener.size + self._gid = gid + self._status = status def name(self): - return self.__name + return self.listener.name def progress_raw(self): try: - return round(self.__obj.downloaded_bytes / self.__size * 100, 2) + return round(self._obj.downloaded_bytes / self._size * 100, 2) except Exception: return 0.0 @@ -26,26 +32,26 @@ def progress(self): return f"{self.progress_raw()}%" def status(self): - return MirrorStatus.STATUS_DOWNLOADING + return MirrorStatus.STATUS_DOWNLOAD def processed_bytes(self): - return get_readable_file_size(self.__obj.downloaded_bytes) + return get_readable_file_size(self._obj.downloaded_bytes) def eta(self): try: - seconds = (self.__size - self.__obj.downloaded_bytes) / self.__obj.speed + seconds = (self._size - self._obj.downloaded_bytes) / self._obj.speed return get_readable_time(seconds) except ZeroDivisionError: return "-" def size(self): - return get_readable_file_size(self.__size) + return get_readable_file_size(self._size) def speed(self): - return f"{get_readable_file_size(self.__obj.speed)}/s" + return f"{get_readable_file_size(self._obj.speed)}/s" def gid(self): - return self.__gid + return self._gid - def download(self): - return self.__obj + def task(self): + return self._obj diff --git a/bot/helper/mirror_leech_utils/status_utils/qbit_status.py b/bot/helper/mirror_leech_utils/status_utils/qbit_status.py index 28576a952..4ae89b8af 100644 --- a/bot/helper/mirror_leech_utils/status_utils/qbit_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/qbit_status.py @@ -1,127 +1,122 @@ -from asyncio import sleep +from asyncio import gather, sleep -from bot import LOGGER, QbTorrents, xnox_client, qb_listener_lock -from bot.helper.ext_utils.bot_utils import ( +from bot import LOGGER, qb_listener_lock, qb_torrents, xnox_client +from bot.helper.ext_utils.bot_utils import sync_to_async +from bot.helper.ext_utils.status_utils import ( MirrorStatus, - sync_to_async, - get_readable_time, get_readable_file_size, + get_readable_time, ) -def get_download(client, tag): +def get_download(tag, old_info=None): try: - return client.torrents_info(tag=tag)[0] + res = xnox_client.torrents_info(tag=tag)[0] + return res or old_info except Exception as e: LOGGER.error(f"{e}: Qbittorrent, while getting torrent info. Tag: {tag}") - return None + return old_info class QbittorrentStatus: def __init__(self, listener, seeding=False, queued=False): - self.__client = xnox_client - self.__listener = listener - self.__info = get_download(self.__client, f"{self.__listener.uid}") self.queued = queued self.seeding = seeding - self.message = listener.message + self.listener = listener + self._info = None - def __update(self): - new_info = get_download(self.__client, f"{self.__listener.uid}") - if new_info is not None: - self.__info = new_info + def update(self): + self._info = get_download(f"{self.listener.mid}", self._info) def progress(self): - return f"{round(self.__info.progress*100, 2)}%" + return f"{round(self._info.progress * 100, 2)}%" def processed_bytes(self): - return get_readable_file_size(self.__info.downloaded) + return get_readable_file_size(self._info.downloaded) def speed(self): - return f"{get_readable_file_size(self.__info.dlspeed)}/s" + return f"{get_readable_file_size(self._info.dlspeed)}/s" def name(self): - if self.__info.state in ["metaDL", "checkingResumeData"]: - return f"[METADATA]{self.__info.name}" - return self.__info.name + if self._info.state in ["metaDL", "checkingResumeData"]: + return f"[METADATA]{self.listener.name}" + return self.listener.name def size(self): - return get_readable_file_size(self.__info.size) + return get_readable_file_size(self._info.size) def eta(self): - return get_readable_time(self.__info.eta) + return get_readable_time(self._info.eta) def status(self): - self.__update() - state = self.__info.state + self.update() + state = self._info.state if state == "queuedDL" or self.queued: return MirrorStatus.STATUS_QUEUEDL if state == "queuedUP": return MirrorStatus.STATUS_QUEUEUP - if state in ["pausedDL", "pausedUP"]: + if state in ["stoppedDL", "stoppedUP"]: return MirrorStatus.STATUS_PAUSED if state in ["checkingUP", "checkingDL"]: - return MirrorStatus.STATUS_CHECKING + return MirrorStatus.STATUS_CHECK if state in ["stalledUP", "uploading"] and self.seeding: - return MirrorStatus.STATUS_SEEDING - return MirrorStatus.STATUS_DOWNLOADING + return MirrorStatus.STATUS_SEED + return MirrorStatus.STATUS_DOWNLOAD def seeders_num(self): - return self.__info.num_seeds + return self._info.num_seeds def leechers_num(self): - return self.__info.num_leechs + return self._info.num_leechs def uploaded_bytes(self): - return get_readable_file_size(self.__info.uploaded) + return get_readable_file_size(self._info.uploaded) - def upload_speed(self): - return f"{get_readable_file_size(self.__info.upspeed)}/s" + def seed_speed(self): + return f"{get_readable_file_size(self._info.upspeed)}/s" def ratio(self): - return f"{round(self.__info.ratio, 3)}" + return f"{round(self._info.ratio, 3)}" def seeding_time(self): - return get_readable_time(self.__info.seeding_time, True) + return get_readable_time(self._info.seeding_time) - def download(self): + def task(self): return self def gid(self): - return self.hash()[:8] + return self.hash()[:12] def hash(self): - self.__update() - return self.__info.hash - - def client(self): - return self.__client + return self._info.hash - def listener(self): - return self.__listener - - async def cancel_download(self): - self.__update() + async def cancel_task(self): + self.listener.is_cancelled = True + await sync_to_async(self.update) await sync_to_async( - self.__client.torrents_pause, torrent_hashes=self.__info.hash + xnox_client.torrents_stop, + torrent_hashes=self._info.hash, ) if not self.seeding: if self.queued: LOGGER.info(f"Cancelling QueueDL: {self.name()}") msg = "task have been removed from queue/download" else: - LOGGER.info(f"Cancelling Download: {self.__info.name}") + LOGGER.info(f"Cancelling Download: {self._info.name}") msg = "Download stopped by user!" await sleep(0.3) - await sync_to_async( - self.__client.torrents_delete, - torrent_hashes=self.__info.hash, - delete_files=True, - ) - await sync_to_async( - self.__client.torrents_delete_tags, tags=self.__info.tags + await gather( + self.listener.on_download_error(msg), + sync_to_async( + xnox_client.torrents_delete, + torrent_hashes=self._info.hash, + delete_files=True, + ), + sync_to_async( + xnox_client.torrents_delete_tags, + tags=self._info.tags, + ), ) async with qb_listener_lock: - if self.__info.tags in QbTorrents: - del QbTorrents[self.__info.tags] - await self.__listener.onDownloadError(msg) + if self._info.tags in qb_torrents: + del qb_torrents[self._info.tags] diff --git a/bot/helper/mirror_leech_utils/status_utils/queue_status.py b/bot/helper/mirror_leech_utils/status_utils/queue_status.py index 2994b806c..e0f2f41f9 100644 --- a/bot/helper/mirror_leech_utils/status_utils/queue_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/queue_status.py @@ -1,27 +1,25 @@ from bot import LOGGER -from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size +from bot.helper.ext_utils.status_utils import MirrorStatus, get_readable_file_size class QueueStatus: - def __init__(self, name, size, gid, listener, status): - self.__name = name - self.__size = size - self.__gid = gid - self.__listener = listener - self.__status = status - self.message = listener.message + def __init__(self, listener, gid, status): + self.listener = listener + self._size = self.listener.size + self._gid = gid + self._status = status def gid(self): - return self.__gid + return self._gid def name(self): - return self.__name + return self.listener.name def size(self): - return get_readable_file_size(self.__size) + return get_readable_file_size(self._size) def status(self): - if self.__status == "dl": + if self._status == "dl": return MirrorStatus.STATUS_QUEUEDL return MirrorStatus.STATUS_QUEUEUP @@ -37,16 +35,17 @@ def speed(self): def eta(self): return "-" - def download(self): + def task(self): return self - async def cancel_download(self): - LOGGER.info(f"Cancelling Queue{self.__status}: {self.__name}") - if self.__status == "dl": - await self.__listener.onDownloadError( - "task have been removed from queue/download" + async def cancel_task(self): + self.listener.is_cancelled = True + LOGGER.info(f"Cancelling Queue{self._status}: {self.listener.name}") + if self._status == "dl": + await self.listener.on_download_error( + "task have been removed from queue/download", ) else: - await self.__listener.onUploadError( - "task have been removed from queue/upload" + await self.listener.on_upload_error( + "task have been removed from queue/upload", ) diff --git a/bot/helper/mirror_leech_utils/status_utils/rclone_status.py b/bot/helper/mirror_leech_utils/status_utils/rclone_status.py index 07c4aae5a..249953a83 100644 --- a/bot/helper/mirror_leech_utils/status_utils/rclone_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/rclone_status.py @@ -1,40 +1,40 @@ -from bot.helper.ext_utils.bot_utils import MirrorStatus +from bot.helper.ext_utils.status_utils import MirrorStatus class RcloneStatus: - def __init__(self, obj, message, gid, status): - self.__obj = obj - self.__gid = gid - self.__status = status - self.message = message + def __init__(self, listener, obj, gid, status): + self._obj = obj + self._gid = gid + self._status = status + self.listener = listener def gid(self): - return self.__gid + return self._gid def progress(self): - return self.__obj.percentage + return self._obj.percentage def speed(self): - return self.__obj.speed + return self._obj.speed def name(self): - return self.__obj.name + return self.listener.name def size(self): - return self.__obj.size + return self._obj.size def eta(self): - return self.__obj.eta + return self._obj.eta def status(self): - if self.__status == "dl": - return MirrorStatus.STATUS_DOWNLOADING - if self.__status == "up": - return MirrorStatus.STATUS_UPLOADING - return MirrorStatus.STATUS_CLONING + if self._status == "dl": + return MirrorStatus.STATUS_DOWNLOAD + if self._status == "up": + return MirrorStatus.STATUS_UPLOAD + return MirrorStatus.STATUS_CLONE def processed_bytes(self): - return self.__obj.transferred_size + return self._obj.transferred_size - def download(self): - return self.__obj + def task(self): + return self._obj diff --git a/bot/helper/mirror_leech_utils/status_utils/sevenz_status.py b/bot/helper/mirror_leech_utils/status_utils/sevenz_status.py new file mode 100644 index 000000000..d83281539 --- /dev/null +++ b/bot/helper/mirror_leech_utils/status_utils/sevenz_status.py @@ -0,0 +1,67 @@ +import contextlib +from time import time + +from bot import LOGGER +from bot.helper.ext_utils.status_utils import ( + MirrorStatus, + get_readable_file_size, + get_readable_time, +) + + +class SevenZStatus: + def __init__(self, listener, obj, gid, status=""): + self.listener = listener + self._obj = obj + self._gid = gid + self._start_time = time() + self._cstatus = status + + def gid(self): + return self._gid + + def _speed_raw(self): + return self._obj.processed_bytes / (time() - self._start_time) + + async def progress(self): + return self._obj.progress + + def speed(self): + return f"{get_readable_file_size(self._speed_raw())}/s" + + def processed_bytes(self): + return get_readable_file_size(self._obj.processed_bytes) + + def name(self): + return self.listener.name + + def size(self): + return get_readable_file_size(self.listener.size) + + def eta(self): + try: + seconds = ( + self.listener.subsize - self._obj.processed_bytes + ) / self._speed_raw() + return get_readable_time(seconds) + except Exception: + return "-" + + def status(self): + if self._cstatus == "Extract": + return MirrorStatus.STATUS_EXTRACT + return MirrorStatus.STATUS_ARCHIVE + + def task(self): + return self + + async def cancel_task(self): + LOGGER.info(f"Cancelling {self._cstatus}: {self.listener.name}") + self.listener.is_cancelled = True + if ( + self.listener.subproc is not None + and self.listener.subproc.returncode is None + ): + with contextlib.suppress(Exception): + self.listener.subproc.kill() + await self.listener.on_upload_error(f"{self._cstatus} stopped by user!") diff --git a/bot/helper/mirror_leech_utils/status_utils/split_status.py b/bot/helper/mirror_leech_utils/status_utils/split_status.py deleted file mode 100644 index 3f465b281..000000000 --- a/bot/helper/mirror_leech_utils/status_utils/split_status.py +++ /dev/null @@ -1,46 +0,0 @@ -from bot import LOGGER -from bot.helper.ext_utils.bot_utils import MirrorStatus, get_readable_file_size - - -class SplitStatus: - def __init__(self, name, size, gid, listener): - self.__name = name - self.__gid = gid - self.__size = size - self.__listener = listener - self.message = listener.message - - def gid(self): - return self.__gid - - def progress(self): - return "0" - - def speed(self): - return "0" - - def name(self): - return self.__name - - def size(self): - return get_readable_file_size(self.__size) - - def eta(self): - return "0s" - - def status(self): - return MirrorStatus.STATUS_SPLITTING - - def processed_bytes(self): - return 0 - - def download(self): - return self - - async def cancel_download(self): - LOGGER.info(f"Cancelling Split: {self.__name}") - if self.__listener.suproc is not None: - self.__listener.suproc.kill() - else: - self.__listener.suproc = "cancelled" - await self.__listener.onUploadError("splitting stopped by user!") diff --git a/bot/helper/mirror_leech_utils/status_utils/telegram_status.py b/bot/helper/mirror_leech_utils/status_utils/telegram_status.py index af0ca55c8..4ee7c22c2 100644 --- a/bot/helper/mirror_leech_utils/status_utils/telegram_status.py +++ b/bot/helper/mirror_leech_utils/status_utils/telegram_status.py @@ -1,53 +1,51 @@ -from bot.helper.ext_utils.bot_utils import ( +from bot.helper.ext_utils.status_utils import ( MirrorStatus, - get_readable_time, get_readable_file_size, + get_readable_time, ) class TelegramStatus: - def __init__(self, obj, size, message, gid, status): - self.__obj = obj - self.__size = size - self.__gid = gid - self.__status = status - self.message = message + def __init__(self, listener, obj, gid, status): + self.listener = listener + self._obj = obj + self._size = self.listener.size + self._gid = gid + self._status = status def processed_bytes(self): - return get_readable_file_size(self.__obj.processed_bytes) + return get_readable_file_size(self._obj.processed_bytes) def size(self): - return get_readable_file_size(self.__size) + return get_readable_file_size(self._size) def status(self): - if self.__status == "up": - if self.__obj.processed_bytes == 0: - return MirrorStatus.STATUS_PROCESSING - return MirrorStatus.STATUS_UPLOADING - return MirrorStatus.STATUS_DOWNLOADING + if self._status == "up": + return MirrorStatus.STATUS_UPLOAD + return MirrorStatus.STATUS_DOWNLOAD def name(self): - return self.__obj.name + return self.listener.name def progress(self): try: - progress_raw = self.__obj.processed_bytes / self.__size * 100 + progress_raw = self._obj.processed_bytes / self._size * 100 except Exception: progress_raw = 0 return f"{round(progress_raw, 2)}%" def speed(self): - return f"{get_readable_file_size(self.__obj.speed)}/s" + return f"{get_readable_file_size(self._obj.speed)}/s" def eta(self): try: - seconds = (self.__size - self.__obj.processed_bytes) / self.__obj.speed + seconds = (self._size - self._obj.processed_bytes) / self._obj.speed return get_readable_time(seconds) except Exception: return "-" - def gid(self) -> str: - return self.__gid + def gid(self): + return self._gid - def download(self): - return self.__obj + def task(self): + return self._obj diff --git a/bot/helper/mirror_leech_utils/status_utils/yt_dlp_status.py b/bot/helper/mirror_leech_utils/status_utils/yt_dlp_status.py new file mode 100644 index 000000000..3ddd4399c --- /dev/null +++ b/bot/helper/mirror_leech_utils/status_utils/yt_dlp_status.py @@ -0,0 +1,56 @@ +from bot.helper.ext_utils.files_utils import get_path_size +from bot.helper.ext_utils.status_utils import ( + MirrorStatus, + get_readable_file_size, + get_readable_time, +) + + +class YtDlpStatus: + def __init__(self, listener, obj, gid): + self._obj = obj + self._gid = gid + self.listener = listener + self._processed_bytes = 0 + + def gid(self): + return self._gid + + def processed_bytes(self): + return get_readable_file_size(self._processed_bytes) + + async def processed_raw(self): + if self._obj.downloaded_bytes != 0: + self._processed_bytes = self._obj.downloaded_bytes + else: + self._processed_bytes = await get_path_size(self.listener.dir) + + def size(self): + return get_readable_file_size(self._obj.size) + + def status(self): + return MirrorStatus.STATUS_DOWNLOAD + + def name(self): + return self.listener.name + + async def progress(self): + await self.processed_raw() + return f"{round(self._obj.progress, 2)}%" + + def speed(self): + return f"{get_readable_file_size(self._obj.download_speed)}/s" + + def eta(self): + if self._obj.eta != "-": + return get_readable_time(self._obj.eta) + try: + seconds = ( + self._obj.size - self._processed_bytes + ) / self._obj.download_speed + return get_readable_time(seconds) + except Exception: + return "-" + + def task(self): + return self._obj diff --git a/bot/helper/mirror_leech_utils/status_utils/ytdlp_status.py b/bot/helper/mirror_leech_utils/status_utils/ytdlp_status.py deleted file mode 100644 index 100ad65cc..000000000 --- a/bot/helper/mirror_leech_utils/status_utils/ytdlp_status.py +++ /dev/null @@ -1,55 +0,0 @@ -from bot.helper.ext_utils.bot_utils import ( - MirrorStatus, - async_to_sync, - get_readable_time, - get_readable_file_size, -) -from bot.helper.ext_utils.files_utils import get_path_size - - -class YtDlpDownloadStatus: - def __init__(self, obj, listener, gid): - self.__obj = obj - self.__listener = listener - self.__gid = gid - self.message = listener.message - - def gid(self): - return self.__gid - - def processed_bytes(self): - return get_readable_file_size(self.processed_raw()) - - def processed_raw(self): - if self.__obj.downloaded_bytes != 0: - return self.__obj.downloaded_bytes - return async_to_sync(get_path_size, self.__listener.dir) - - def size(self): - return get_readable_file_size(self.__obj.size) - - def status(self): - return MirrorStatus.STATUS_DOWNLOADING - - def name(self): - return self.__obj.name - - def progress(self): - return f"{round(self.__obj.progress, 2)}%" - - def speed(self): - return f"{get_readable_file_size(self.__obj.download_speed)}/s" - - def eta(self): - if self.__obj.eta != "-": - return get_readable_time(self.__obj.eta) - try: - seconds = ( - self.__obj.size - self.processed_raw() - ) / self.__obj.download_speed - return get_readable_time(seconds) - except Exception: - return "-" - - def download(self): - return self.__obj diff --git a/bot/helper/mirror_leech_utils/status_utils/zip_status.py b/bot/helper/mirror_leech_utils/status_utils/zip_status.py deleted file mode 100644 index dde5f11b6..000000000 --- a/bot/helper/mirror_leech_utils/status_utils/zip_status.py +++ /dev/null @@ -1,74 +0,0 @@ -from time import time - -from bot import LOGGER -from bot.helper.ext_utils.bot_utils import ( - MirrorStatus, - async_to_sync, - get_readable_time, - get_readable_file_size, -) -from bot.helper.ext_utils.files_utils import get_path_size - - -class ZipStatus: - def __init__(self, name, size, gid, listener): - self.__name = name - self.__size = size - self.__gid = gid - self.__listener = listener - self.__uid = listener.uid - self.__start_time = time() - self.message = listener.message - - def gid(self): - return self.__gid - - def speed_raw(self): - return self.processed_raw() / (time() - self.__start_time) - - def progress_raw(self): - try: - return self.processed_raw() / self.__size * 100 - except Exception: - return 0 - - def progress(self): - return f"{round(self.progress_raw(), 2)}%" - - def speed(self): - return f"{get_readable_file_size(self.speed_raw())}/s" - - def name(self): - return self.__name - - def size(self): - return get_readable_file_size(self.__size) - - def eta(self): - try: - seconds = (self.__size - self.processed_raw()) / self.speed_raw() - return get_readable_time(seconds) - except Exception: - return "-" - - def status(self): - return MirrorStatus.STATUS_ARCHIVING - - def processed_raw(self): - if self.__listener.newDir: - return async_to_sync(get_path_size, self.__listener.newDir) - return async_to_sync(get_path_size, self.__listener.dir) - self.__size - - def processed_bytes(self): - return get_readable_file_size(self.processed_raw()) - - def download(self): - return self - - async def cancel_download(self): - LOGGER.info(f"Cancelling Archive: {self.__name}") - if self.__listener.suproc is not None: - self.__listener.suproc.kill() - else: - self.__listener.suproc = "cancelled" - await self.__listener.onUploadError("archiving stopped by user!") diff --git a/bot/helper/mirror_leech_utils/telegram_uploader.py b/bot/helper/mirror_leech_utils/telegram_uploader.py new file mode 100644 index 000000000..8b18ded66 --- /dev/null +++ b/bot/helper/mirror_leech_utils/telegram_uploader.py @@ -0,0 +1,576 @@ +import contextlib +from asyncio import sleep +from logging import getLogger +from os import path as ospath +from os import walk +from re import match as re_match +from re import sub as re_sub +from time import time + +from aiofiles.os import ( + path as aiopath, +) +from aiofiles.os import ( + remove, + rename, +) +from aioshutil import rmtree +from natsort import natsorted +from PIL import Image +from pyrogram.errors import BadRequest, FloodPremiumWait, FloodWait, RPCError +from pyrogram.types import ( + InputMediaDocument, + InputMediaPhoto, + InputMediaVideo, +) +from tenacity import ( + RetryError, + retry, + retry_if_exception_type, + stop_after_attempt, + wait_exponential, +) + +from bot.core.aeon_client import TgClient +from bot.core.config_manager import Config +from bot.helper.aeon_utils.caption_gen import generate_caption +from bot.helper.ext_utils.bot_utils import sync_to_async +from bot.helper.ext_utils.files_utils import ( + get_base_name, + is_archive, +) +from bot.helper.ext_utils.media_utils import ( + get_audio_thumbnail, + get_document_type, + get_media_info, + get_multiple_frames_thumbnail, + get_video_thumbnail, +) +from bot.helper.telegram_helper.message_utils import delete_message + +LOGGER = getLogger(__name__) + + +class TelegramUploader: + def __init__(self, listener, path): + self._last_uploaded = 0 + self._processed_bytes = 0 + self._listener = listener + self._user_id = listener.user_id + self._path = path + self._start_time = time() + self._total_files = 0 + self._thumb = self._listener.thumb or f"Thumbnails/{listener.user_id}.jpg" + self._msgs_dict = {} + self._corrupted = 0 + self._is_corrupted = False + self._media_dict = {"videos": {}, "documents": {}} + self._last_msg_in_group = False + self._up_path = "" + self._lprefix = "" + self._user_dump = "" + self._lcaption = "" + self._media_group = False + self._is_private = False + self._sent_msg = None + self._user_session = self._listener.user_transmission + self._error = "" + + async def _upload_progress(self, current, _): + if self._listener.is_cancelled: + if self._user_session: + TgClient.user.stop_transmission() + else: + self._listener.client.stop_transmission() + chunk_size = current - self._last_uploaded + self._last_uploaded = current + self._processed_bytes += chunk_size + + async def _user_settings(self): + self._media_group = self._listener.user_dict.get("media_group") or ( + Config.MEDIA_GROUP + if "media_group" not in self._listener.user_dict + else False + ) + self._lprefix = self._listener.user_dict.get("lprefix") or ( + Config.LEECH_FILENAME_PREFIX + if "lprefix" not in self._listener.user_dict + else "" + ) + self._user_dump = self._listener.user_dict.get("user_dump") + self._lcaption = self._listener.user_dict.get("lcaption") or ( + Config.LEECH_FILENAME_CAPTION + if "lcaption" not in self._listener.user_dict + else "" + ) + if self._thumb != "none" and not await aiopath.exists(self._thumb): + self._thumb = None + + async def _msg_to_reply(self): + if self._listener.up_dest: + msg = ( + self._listener.message.link + if self._listener.is_super_chat + else self._listener.message.text.lstrip("/") + ) + try: + if self._user_session: + self._sent_msg = await TgClient.user.send_message( + chat_id=self._listener.up_dest, + text=msg, + disable_web_page_preview=True, + message_thread_id=self._listener.chat_thread_id, + disable_notification=True, + ) + else: + self._sent_msg = await self._listener.client.send_message( + chat_id=self._listener.up_dest, + text=msg, + disable_web_page_preview=True, + message_thread_id=self._listener.chat_thread_id, + disable_notification=True, + ) + self._is_private = self._sent_msg.chat.type.name == "PRIVATE" + except Exception as e: + await self._listener.on_upload_error(str(e)) + return False + elif self._user_session: + self._sent_msg = await TgClient.user.get_messages( + chat_id=self._listener.message.chat.id, + message_ids=self._listener.mid, + ) + if self._sent_msg is None: + self._sent_msg = await TgClient.user.send_message( + chat_id=self._listener.message.chat.id, + text="Deleted Cmd Message! Don't delete the cmd message again!", + disable_web_page_preview=True, + disable_notification=True, + ) + else: + self._sent_msg = self._listener.message + return True + + async def _prepare_file(self, file_, dirpath): + if self._lcaption: + cap_mono = await generate_caption(file_, dirpath, self._lcaption) + if self._lprefix: + if not self._lcaption: + cap_mono = f"{self._lprefix} {file_}" + self._lprefix = re_sub("<.*?>", "", self._lprefix) + new_path = ospath.join(dirpath, f"{self._lprefix} {file_}") + LOGGER.info(self._up_path) + await rename(self._up_path, new_path) + self._up_path = new_path + LOGGER.info(self._up_path) # nxt + if not self._lcaption and not self._lprefix: + cap_mono = f"{file_}" + if len(file_) > 60: + if is_archive(file_): + name = get_base_name(file_) + ext = file_.split(name, 1)[1] + elif match := re_match( + r".+(?=\..+\.0*\d+$)|.+(?=\.part\d+\..+$)", + file_, + ): + name = match.group(0) + ext = file_.split(name, 1)[1] + elif len(fsplit := ospath.splitext(file_)) > 1: + name = fsplit[0] + ext = fsplit[1] + else: + name = file_ + ext = "" + extn = len(ext) + remain = 60 - extn + name = name[:remain] + new_path = ospath.join(dirpath, f"{name}{ext}") + await rename(self._up_path, new_path) + self._up_path = new_path + return cap_mono + + def _get_input_media(self, subkey, key): + rlist = [] + for msg in self._media_dict[key][subkey]: + if key == "videos": + input_media = InputMediaVideo( + media=msg.video.file_id, + caption=msg.caption, + ) + else: + input_media = InputMediaDocument( + media=msg.document.file_id, + caption=msg.caption, + ) + rlist.append(input_media) + return rlist + + async def _send_screenshots(self, dirpath, outputs): + inputs = [ + InputMediaPhoto(ospath.join(dirpath, p), p.rsplit("/", 1)[-1]) + for p in outputs + ] + for i in range(0, len(inputs), 10): + batch = inputs[i : i + 10] + self._sent_msg = ( + await self._sent_msg.reply_media_group( + media=batch, + quote=True, + disable_notification=True, + ) + )[-1] + + async def _send_media_group(self, subkey, key, msgs): + for index, msg in enumerate(msgs): + if self._listener.mixed_leech or not self._user_session: + msgs[index] = await self._listener.client.get_messages( + chat_id=msg[0], + message_ids=msg[1], + ) + else: + msgs[index] = await TgClient.user.get_messages( + chat_id=msg[0], + message_ids=msg[1], + ) + msgs_list = await msgs[0].reply_to_message.reply_media_group( + media=self._get_input_media(subkey, key), + quote=True, + disable_notification=True, + ) + for msg in msgs: + if msg.link in self._msgs_dict: + del self._msgs_dict[msg.link] + await delete_message(msg) + del self._media_dict[key][subkey] + if self._listener.is_super_chat or self._listener.up_dest: + for m in msgs_list: + self._msgs_dict[m.link] = m.caption + self._sent_msg = msgs_list[-1] + + async def upload(self): + await self._user_settings() + res = await self._msg_to_reply() + if not res: + return + for dirpath, _, files in natsorted(await sync_to_async(walk, self._path)): + if dirpath.endswith("/yt-dlp-thumb"): + continue + if dirpath.endswith("_ss"): + await self._send_screenshots(dirpath, files) + await rmtree(dirpath, ignore_errors=True) + continue + for file_ in natsorted(files): + self._error = "" + self._up_path = f_path = ospath.join(dirpath, file_) + if not ospath.exists(self._up_path): + LOGGER.error(f"{self._up_path} not exists! Continue uploading!") + continue + if file_.lower().endswith(tuple(self._listener.extension_filter)): + await remove(self._up_path) + continue + try: + f_size = await aiopath.getsize(self._up_path) + self._total_files += 1 + if f_size == 0: + LOGGER.error( + f"{self._up_path} size is zero, telegram don't upload zero size files", + ) + self._corrupted += 1 + continue + if self._listener.is_cancelled: + return + cap_mono = await self._prepare_file(file_, dirpath) + if self._last_msg_in_group: + group_lists = [ + x for v in self._media_dict.values() for x in v + ] + match = re_match( + r".+(?=\.0*\d+$)|.+(?=\.part\d+\..+$)", + f_path, + ) + if not match or ( + match and match.group(0) not in group_lists + ): + for key, value in list(self._media_dict.items()): + for subkey, msgs in list(value.items()): + if len(msgs) > 1: + await self._send_media_group( + subkey, + key, + msgs, + ) + if ( + self._listener.mixed_leech + and self._listener.user_transmission + ): + self._user_session = f_size > 2097152000 + if self._user_session: + self._sent_msg = await TgClient.user.get_messages( + chat_id=self._sent_msg.chat.id, + message_ids=self._sent_msg.id, + ) + else: + self._sent_msg = ( + await self._listener.client.get_messages( + chat_id=self._sent_msg.chat.id, + message_ids=self._sent_msg.id, + ) + ) + self._last_msg_in_group = False + self._last_uploaded = 0 + await self._upload_file(cap_mono, file_, f_path) + if self._listener.is_cancelled: + return + if ( + not self._is_corrupted + and (self._listener.is_super_chat or self._listener.up_dest) + and not self._is_private + ): + self._msgs_dict[self._sent_msg.link] = file_ + await sleep(1) + except Exception as err: + if isinstance(err, RetryError): + LOGGER.info( + f"Total Attempts: {err.last_attempt.attempt_number}", + ) + err = err.last_attempt.exception() + LOGGER.error(f"{err}. Path: {self._up_path}") + self._error = str(err) + self._corrupted += 1 + if self._listener.is_cancelled: + return + if not self._listener.is_cancelled and await aiopath.exists( + self._up_path, + ): + await remove(self._up_path) + for key, value in list(self._media_dict.items()): + for subkey, msgs in list(value.items()): + if len(msgs) > 1: + try: + await self._send_media_group(subkey, key, msgs) + except Exception as e: + LOGGER.info( + f"While sending media group at the end of task. Error: {e}", + ) + if self._listener.is_cancelled: + return + if self._total_files == 0: + await self._listener.on_upload_error( + "No files to upload. In case you have filled EXTENSION_FILTER, then check if all files have those extensions or not.", + ) + return + if self._total_files <= self._corrupted: + await self._listener.on_upload_error( + f"Files Corrupted or unable to upload. {self._error or 'Check logs!'}", + ) + return + LOGGER.info(f"Leech Completed: {self._listener.name}") + await self._listener.on_upload_complete( + None, + self._msgs_dict, + self._total_files, + self._corrupted, + ) + + @retry( + wait=wait_exponential(multiplier=2, min=4, max=8), + stop=stop_after_attempt(3), + retry=retry_if_exception_type(Exception), + ) + async def _upload_file(self, cap_mono, file, o_path, force_document=False): + if ( + self._thumb is not None + and not await aiopath.exists(self._thumb) + and self._thumb != "none" + ): + self._thumb = None + thumb = self._thumb + self._is_corrupted = False + try: + is_video, is_audio, is_image = await get_document_type(self._up_path) + + if not is_image and thumb is None: + file_name = ospath.splitext(file)[0] + thumb_path = f"{self._path}/yt-dlp-thumb/{file_name}.jpg" + if await aiopath.isfile(thumb_path): + thumb = thumb_path + elif is_audio and not is_video: + thumb = await get_audio_thumbnail(self._up_path) + + if ( + self._listener.as_doc + or force_document + or (not is_video and not is_audio and not is_image) + ): + key = "documents" + if is_video and thumb is None: + thumb = await get_video_thumbnail(self._up_path, None) + + if self._listener.is_cancelled: + return None + if thumb == "none": + thumb = None + self._sent_msg = await self._sent_msg.reply_document( + document=self._up_path, + quote=True, + thumb=thumb, + caption=cap_mono, + force_document=True, + disable_notification=True, + progress=self._upload_progress, + ) + elif is_video: + key = "videos" + duration = (await get_media_info(self._up_path))[0] + if thumb is None and self._listener.thumbnail_layout: + thumb = await get_multiple_frames_thumbnail( + self._up_path, + self._listener.thumbnail_layout, + self._listener.screen_shots, + ) + if thumb is None: + thumb = await get_video_thumbnail(self._up_path, duration) + if thumb is not None and thumb != "none": + with Image.open(thumb) as img: + width, height = img.size + else: + width = 480 + height = 320 + if self._listener.is_cancelled: + return None + if thumb == "none": + thumb = None + self._sent_msg = await self._sent_msg.reply_video( + video=self._up_path, + quote=True, + caption=cap_mono, + duration=duration, + width=width, + height=height, + thumb=thumb, + supports_streaming=True, + disable_notification=True, + progress=self._upload_progress, + ) + elif is_audio: + key = "audios" + duration, artist, title = await get_media_info(self._up_path) + if self._listener.is_cancelled: + return None + self._sent_msg = await self._sent_msg.reply_audio( + audio=self._up_path, + quote=True, + caption=cap_mono, + duration=duration, + performer=artist, + title=title, + thumb=thumb, + disable_notification=True, + progress=self._upload_progress, + ) + else: + key = "photos" + if self._listener.is_cancelled: + return None + self._sent_msg = await self._sent_msg.reply_photo( + photo=self._up_path, + quote=True, + caption=cap_mono, + disable_notification=True, + progress=self._upload_progress, + ) + + await self._copy_message() + + if ( + not self._listener.is_cancelled + and self._media_group + and (self._sent_msg.video or self._sent_msg.document) + ): + key = "documents" if self._sent_msg.document else "videos" + if match := re_match(r".+(?=\.0*\d+$)|.+(?=\.part\d+\..+$)", o_path): + pname = match.group(0) + if pname in self._media_dict[key]: + self._media_dict[key][pname].append( + [self._sent_msg.chat.id, self._sent_msg.id], + ) + else: + self._media_dict[key][pname] = [ + [self._sent_msg.chat.id, self._sent_msg.id], + ] + msgs = self._media_dict[key][pname] + if len(msgs) == 10: + await self._send_media_group(pname, key, msgs) + else: + self._last_msg_in_group = True + + if ( + self._thumb is None + and thumb is not None + and await aiopath.exists(thumb) + ): + await remove(thumb) + except (FloodWait, FloodPremiumWait) as f: + LOGGER.warning(str(f)) + await sleep(f.value * 1.3) + if ( + self._thumb is None + and thumb is not None + and await aiopath.exists(thumb) + ): + await remove(thumb) + return await self._upload_file(cap_mono, file, o_path) + except Exception as err: + if ( + self._thumb is None + and thumb is not None + and await aiopath.exists(thumb) + ): + await remove(thumb) + err_type = "RPCError: " if isinstance(err, RPCError) else "" + LOGGER.error(f"{err_type}{err}. Path: {self._up_path}") + if isinstance(err, BadRequest) and key != "documents": + LOGGER.error(f"Retrying As Document. Path: {self._up_path}") + return await self._upload_file(cap_mono, file, o_path, True) + raise err + + async def _copy_message(self): + await sleep(1) + + async def _copy(target, retries=3): + for attempt in range(retries): + try: + msg = await TgClient.bot.get_messages( + self._sent_msg.chat.id, + self._sent_msg.id, + ) + await msg.copy(target) + return + except Exception as e: + LOGGER.error(f"Attempt {attempt + 1} failed: {e} {msg.id}") + if attempt < retries - 1: + await sleep(0.5) + LOGGER.error(f"Failed to copy message after {retries} attempts") + + # TODO if self.dm_mode: + if self._sent_msg.chat.id != self._user_id: + await _copy(self._user_id) + + if self._user_dump: + with contextlib.suppress(Exception): + await _copy(int(self._user_dump)) + + @property + def speed(self): + try: + return self._processed_bytes / (time() - self._start_time) + except Exception: + return 0 + + @property + def processed_bytes(self): + return self._processed_bytes + + async def cancel_task(self): + self._listener.is_cancelled = True + LOGGER.info(f"Cancelling Upload: {self._listener.name}") + await self._listener.on_upload_error("your upload has been stopped!") diff --git a/bot/helper/mirror_leech_utils/upload_utils/gdriveTools.py b/bot/helper/mirror_leech_utils/upload_utils/gdriveTools.py deleted file mode 100644 index 0e999f1dc..000000000 --- a/bot/helper/mirror_leech_utils/upload_utils/gdriveTools.py +++ /dev/null @@ -1,971 +0,0 @@ -import contextlib -from io import FileIO -from os import path as ospath -from os import remove as osremove -from os import listdir, makedirs -from re import search as re_search -from time import time -from pickle import load as pload -from random import randrange -from logging import ERROR, getLogger -from urllib.parse import quote as rquote -from urllib.parse import parse_qs, urlparse - -from tenacity import ( - RetryError, - retry, - wait_exponential, - stop_after_attempt, - retry_if_exception_type, -) -from google.oauth2 import service_account -from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload -from googleapiclient.errors import HttpError -from googleapiclient.discovery import build - -from bot import GLOBAL_EXTENSION_FILTER, config_dict, list_drives_dict -from bot.helper.aeon_utils.metadata import add_attachment -from bot.helper.ext_utils.bot_utils import ( - SetInterval, - is_mkv, - async_to_sync, - get_readable_file_size, -) -from bot.helper.ext_utils.files_utils import process_file, get_mime_type - -LOGGER = getLogger(__name__) -getLogger("googleapiclient.discovery").setLevel(ERROR) - - -class GoogleDriveHelper: - def __init__(self, name=None, path=None, listener=None): - self.__OAUTH_SCOPE = ["https://www.googleapis.com/auth/drive"] - self.__G_DRIVE_DIR_MIME_TYPE = "application/vnd.google-apps.folder" - self.__G_DRIVE_BASE_DOWNLOAD_URL = ( - "https://drive.google.com/uc?id={}&export=download" - ) - self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL = ( - "https://drive.google.com/drive/folders/{}" - ) - self.__listener = listener - self.__user_id = listener.message.from_user.id if listener else None - self.__path = path - self.__total_bytes = 0 - self.__total_files = 0 - self.__total_folders = 0 - self.__processed_bytes = 0 - self.__total_time = 0 - self.__start_time = 0 - self.__alt_auth = False - self.__is_uploading = False - self.__is_downloading = False - self.__is_cloning = False - self.__is_cancelled = False - self.__is_errored = False - self.__status = None - self.__updater = None - self.__update_interval = 3 - self.__sa_index = 0 - self.__sa_count = 1 - self.__sa_number = 100 - self.__service = self.__authorize() - self.__file_processed_bytes = 0 - self.__processed_bytes = 0 - self.name = name - - @property - def speed(self): - try: - return self.__processed_bytes / self.__total_time - except Exception: - return 0 - - @property - def processed_bytes(self): - return self.__processed_bytes - - def __authorize(self): - credentials = None - if config_dict["USE_SERVICE_ACCOUNTS"]: - json_files = listdir("accounts") - self.__sa_number = len(json_files) - self.__sa_index = randrange(self.__sa_number) - LOGGER.info( - f"Authorizing with {json_files[self.__sa_index]} service account" - ) - credentials = service_account.Credentials.from_service_account_file( - f"accounts/{json_files[self.__sa_index]}", scopes=self.__OAUTH_SCOPE - ) - elif ospath.exists("token.pickle"): - LOGGER.info("Authorize with token.pickle") - with open("token.pickle", "rb") as f: - credentials = pload(f) - else: - LOGGER.error("token.pickle not found!") - return build("drive", "v3", credentials=credentials, cache_discovery=False) - - def __alt_authorize(self): - if not self.__alt_auth: - self.__alt_auth = True - if ospath.exists("token.pickle"): - LOGGER.info("Authorize with token.pickle") - with open("token.pickle", "rb") as f: - credentials = pload(f) - return build( - "drive", "v3", credentials=credentials, cache_discovery=False - ) - LOGGER.error("token.pickle not found!") - return None - - def __switchServiceAccount(self): - if self.__sa_index == self.__sa_number - 1: - self.__sa_index = 0 - else: - self.__sa_index += 1 - self.__sa_count += 1 - LOGGER.info(f"Switching to {self.__sa_index} index") - self.__service = self.__authorize() - - @staticmethod - def getIdFromUrl(link): - if "folders" in link or "file" in link: - regex = r"https:\/\/drive\.google\.com\/(?:drive(.*?)\/folders\/|file(.*?)?\/d\/)([-\w]+)" - res = re_search(regex, link) - if res is None: - raise IndexError("G-Drive ID not found.") - return res.group(3) - parsed = urlparse(link) - return parse_qs(parsed.query)["id"][0] - - @retry( - wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=retry_if_exception_type(Exception), - ) - def getFolderData(self, file_id): - try: - meta = ( - self.__service.files() - .get(fileId=file_id, supportsAllDrives=True) - .execute() - ) - if meta.get("mimeType", "") == self.__G_DRIVE_DIR_MIME_TYPE: - return meta.get("name") - except Exception: - return None - - @retry( - wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=retry_if_exception_type(Exception), - ) - def __getFileMetadata(self, file_id): - return ( - self.__service.files() - .get( - fileId=file_id, - supportsAllDrives=True, - fields="name, id, mimeType, size", - ) - .execute() - ) - - @retry( - wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=retry_if_exception_type(Exception), - ) - def getFilesByFolderId(self, folder_id): - page_token = None - files = [] - while True: - response = ( - self.__service.files() - .list( - supportsAllDrives=True, - includeItemsFromAllDrives=True, - q=f"'{folder_id}' in parents and trashed = false", - spaces="drive", - pageSize=200, - fields="nextPageToken, files(id, name, mimeType, size, shortcutDetails)", - orderBy="folder, name", - pageToken=page_token, - ) - .execute() - ) - files.extend(response.get("files", [])) - page_token = response.get("nextPageToken") - if page_token is None: - break - return files - - async def __progress(self): - if self.__status is not None: - chunk_size = ( - self.__status.total_size * self.__status.progress() - - self.__file_processed_bytes - ) - self.__file_processed_bytes = ( - self.__status.total_size * self.__status.progress() - ) - self.__processed_bytes += chunk_size - self.__total_time += self.__update_interval - - def deletefile(self, link: str): - try: - file_id = self.getIdFromUrl(link) - except (KeyError, IndexError): - return "Google Drive ID could not be found in the provided link" - msg = "" - try: - self.__service.files().delete( - fileId=file_id, supportsAllDrives=True - ).execute() - msg = "Successfully deleted" - LOGGER.info(f"Delete Result: {msg}") - except HttpError as err: - if "File not found" in str(err) or "insufficientFilePermissions" in str( - err - ): - token_service = self.__alt_authorize() - if token_service is not None: - LOGGER.error("File not found. Trying with token.pickle...") - self.__service = token_service - return self.deletefile(link) - err = "File not found or insufficientFilePermissions!" - LOGGER.error(f"Delete Result: {err}") - msg = str(err) - return msg - - def upload(self, file_name, size, gdrive_id): - if not gdrive_id: - gdrive_id = config_dict["GDRIVE_ID"] - self.__is_uploading = True - item_path = f"{self.__path}/{file_name}" - LOGGER.info(f"Uploading: {item_path}") - self.__updater = SetInterval(self.__update_interval, self.__progress) - try: - if ospath.isfile(item_path): - if item_path.lower().endswith(tuple(GLOBAL_EXTENSION_FILTER)): - raise Exception( - "This file extension is excluded by extension filter!" - ) - mime_type = get_mime_type(item_path) - link = self.__upload_file( - item_path, file_name, mime_type, gdrive_id, is_dir=False - ) - if self.__is_cancelled: - return - if link is None: - raise Exception("Upload has been manually cancelled") - LOGGER.info(f"Uploaded To G-Drive: {item_path}") - else: - mime_type = "Folder" - dir_id = self.__create_directory( - ospath.basename(ospath.abspath(file_name)), gdrive_id - ) - result = self.__upload_dir(item_path, dir_id) - if result is None: - raise Exception("Upload has been manually cancelled!") - link = self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id) - if self.__is_cancelled: - return - LOGGER.info(f"Uploaded To G-Drive: {file_name}") - except Exception as err: - if isinstance(err, RetryError): - LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") - err = err.last_attempt.exception() - err = str(err).replace(">", "").replace("<", "") - async_to_sync(self.__listener.onUploadError, err) - self.__is_errored = True - finally: - self.__updater.cancel() - if self.__is_cancelled and not self.__is_errored: - if mime_type == "Folder": - LOGGER.info("Deleting uploaded data from Drive...") - link = self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id) - self.deletefile(link) - return - if self.__is_errored: - return - async_to_sync( - self.__listener.onUploadComplete, - link, - size, - self.__total_files, - self.__total_folders, - mime_type, - file_name, - ) - - def __upload_dir(self, input_directory, dest_id): - list_dirs = listdir(input_directory) - if len(list_dirs) == 0: - return dest_id - new_id = None - for item in list_dirs: - current_file_name = ospath.join(input_directory, item) - if ospath.isdir(current_file_name): - current_dir_id = self.__create_directory(item, dest_id) - new_id = self.__upload_dir(current_file_name, current_dir_id) - self.__total_folders += 1 - elif not item.lower().endswith(tuple(GLOBAL_EXTENSION_FILTER)): - mime_type = get_mime_type(current_file_name) - file_name = current_file_name.split("/")[-1] - self.__upload_file(current_file_name, file_name, mime_type, dest_id) - self.__total_files += 1 - new_id = dest_id - else: - osremove(current_file_name) - new_id = "filter" - if self.__is_cancelled: - break - return new_id - - @retry( - wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=retry_if_exception_type(Exception), - ) - def __create_directory(self, directory_name, dest_id): - directory_name, _ = async_to_sync( - process_file, directory_name, self.__user_id, is_mirror=True - ) - file_metadata = { - "name": directory_name, - "description": "Uploaded by Aeon", - "mimeType": self.__G_DRIVE_DIR_MIME_TYPE, - } - if dest_id is not None: - file_metadata["parents"] = [dest_id] - file = ( - self.__service.files() - .create(body=file_metadata, supportsAllDrives=True) - .execute() - ) - file_id = file.get("id") - LOGGER.info( - f'Created G-Drive Folder:\nName: {file.get("name")}\nID: {file_id}' - ) - return file_id - - @retry( - wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=(retry_if_exception_type(Exception)), - ) - def __upload_file(self, file_path, file_name, mime_type, dest_id, is_dir=True): - location = ospath.dirname(file_path) - file_name, _ = async_to_sync( - process_file, file_name, self.__user_id, location, True - ) - if (atc := self.__listener.attachment) and is_mkv(file_name): - file_name = async_to_sync(add_attachment, file_name, location, atc) - file_metadata = { - "name": file_name, - "description": "Uploaded by Aeon", - "mimeType": mime_type, - } - if dest_id is not None: - file_metadata["parents"] = [dest_id] - - if ospath.getsize(file_path) == 0: - media_body = MediaFileUpload( - file_path, mimetype=mime_type, resumable=False - ) - response = ( - self.__service.files() - .create( - body=file_metadata, media_body=media_body, supportsAllDrives=True - ) - .execute() - ) - drive_file = ( - self.__service.files() - .get(fileId=response["id"], supportsAllDrives=True) - .execute() - ) - return self.__G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get("id")) - media_body = MediaFileUpload( - file_path, - mimetype=mime_type, - resumable=True, - chunksize=100 * 1024 * 1024, - ) - - drive_file = self.__service.files().create( - body=file_metadata, media_body=media_body, supportsAllDrives=True - ) - response = None - retries = 0 - while response is None and not self.__is_cancelled: - try: - self.__status, response = drive_file.next_chunk() - except HttpError as err: - if err.resp.status in [500, 502, 503, 504] and retries < 10: - retries += 1 - continue - if err.resp.get("content-type", "").startswith("application/json"): - reason = ( - eval(err.content).get("error").get("errors")[0].get("reason") - ) - if reason not in [ - "userRateLimitExceeded", - "dailyLimitExceeded", - ]: - raise err - if config_dict["USE_SERVICE_ACCOUNTS"]: - if self.__sa_count >= self.__sa_number: - LOGGER.info( - f"Reached maximum number of service accounts switching, which is {self.__sa_count}" - ) - raise err - if self.__is_cancelled: - return None - self.__switchServiceAccount() - LOGGER.info(f"Got: {reason}, Trying Again.") - return self.__upload_file( - file_path, file_name, mime_type, dest_id - ) - LOGGER.error(f"Got: {reason}") - raise err - if self.__is_cancelled: - return None - if not self.__listener.seed or self.__listener.newDir: - with contextlib.suppress(Exception): - osremove(file_path) - self.__file_processed_bytes = 0 - if not is_dir: - drive_file = ( - self.__service.files() - .get(fileId=response["id"], supportsAllDrives=True) - .execute() - ) - return self.__G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get("id")) - return None - - def clone(self, link, gdrive_id): - if not gdrive_id: - gdrive_id = config_dict["GDRIVE_ID"] - self.__is_cloning = True - self.__start_time = time() - self.__total_files = 0 - self.__total_folders = 0 - try: - file_id = self.getIdFromUrl(link) - except (KeyError, IndexError): - return "Google Drive ID could not be found in the provided link" - msg = "" - LOGGER.info(f"File ID: {file_id}") - try: - meta = self.__getFileMetadata(file_id) - mime_type = meta.get("mimeType") - if mime_type == self.__G_DRIVE_DIR_MIME_TYPE: - dir_id = self.__create_directory(meta.get("name"), gdrive_id) - self.__cloneFolder( - meta.get("name"), meta.get("name"), meta.get("id"), dir_id - ) - durl = self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format(dir_id) - if self.__is_cancelled: - LOGGER.info("Deleting cloned data from Drive...") - self.deletefile(durl) - return None, None, None, None, None - mime_type = "Folder" - size = self.__processed_bytes - else: - file = self.__copyFile(meta.get("id"), gdrive_id, meta.get("name")) - msg += f'Name: {file.get("name")}' - durl = self.__G_DRIVE_BASE_DOWNLOAD_URL.format(file.get("id")) - if mime_type is None: - mime_type = "File" - size = int(meta.get("size", 0)) - return durl, size, mime_type, self.__total_files, self.__total_folders - except Exception as err: - if isinstance(err, RetryError): - LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") - err = err.last_attempt.exception() - err = str(err).replace(">", "").replace("<", "") - if "User rate limit exceeded" in err: - msg = "User rate limit exceeded." - elif "File not found" in err: - if not self.__alt_auth: - token_service = self.__alt_authorize() - if token_service is not None: - LOGGER.error("File not found. Trying with token.pickle...") - self.__service = token_service - return self.clone(link) - msg = "File not found." - else: - msg = f"Error.\n{err}" - async_to_sync(self.__listener.onUploadError, msg) - return None, None, None, None, None - - def __cloneFolder(self, name, local_path, folder_id, dest_id): - LOGGER.info(f"Syncing: {local_path}") - files = self.getFilesByFolderId(folder_id) - if len(files) == 0: - return dest_id - for file in files: - if file.get("mimeType") == self.__G_DRIVE_DIR_MIME_TYPE: - self.__total_folders += 1 - file_path = ospath.join(local_path, file.get("name")) - current_dir_id = self.__create_directory(file.get("name"), dest_id) - self.__cloneFolder( - file.get("name"), file_path, file.get("id"), current_dir_id - ) - elif ( - not file.get("name").lower().endswith(tuple(GLOBAL_EXTENSION_FILTER)) - ): - self.__total_files += 1 - self.__copyFile(file.get("id"), dest_id, file.get("name")) - self.__processed_bytes += int(file.get("size", 0)) - self.__total_time = int(time() - self.__start_time) - if self.__is_cancelled: - break - return None - - @retry( - wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=retry_if_exception_type(Exception), - ) - def __copyFile(self, file_id, dest_id, file_name): - file_name, _ = async_to_sync( - process_file, file_name, self.__user_id, is_mirror=True - ) - body = {"name": file_name, "parents": [dest_id]} - try: - return ( - self.__service.files() - .copy(fileId=file_id, body=body, supportsAllDrives=True) - .execute() - ) - except HttpError as err: - if err.resp.get("content-type", "").startswith("application/json"): - reason = ( - eval(err.content).get("error").get("errors")[0].get("reason") - ) - if reason not in [ - "userRateLimitExceeded", - "dailyLimitExceeded", - "cannotCopyFile", - ]: - raise err - if reason == "cannotCopyFile": - LOGGER.error(err) - elif config_dict["USE_SERVICE_ACCOUNTS"]: - if self.__sa_count >= self.__sa_number: - LOGGER.info( - f"Reached maximum number of service accounts switching, which is {self.__sa_count}" - ) - raise err - if self.__is_cancelled: - return None - self.__switchServiceAccount() - return self.__copyFile(file_id, dest_id, file_name) - else: - LOGGER.error(f"Got: {reason}") - raise err - - def __escapes(self, estr): - chars = ["\\", "'", '"', r"\a", r"\b", r"\f", r"\n", r"\r", r"\t"] - for char in chars: - estr = estr.replace(char, f"\\{char}") - return estr.strip() - - def __get_recursive_list(self, file, rootid): - rtnlist = [] - if rootid == "root": - rootid = ( - self.__service.files() - .get(fileId="root", fields="id") - .execute() - .get("id") - ) - x = file.get("name") - y = file.get("id") - while y != rootid: - rtnlist.append(x) - file = ( - self.__service.files() - .get( - fileId=file.get("parents")[0], - supportsAllDrives=True, - fields="id, name, parents", - ) - .execute() - ) - x = file.get("name") - y = file.get("id") - rtnlist.reverse() - return rtnlist - - def __drive_query(self, dir_id, fileName, stopDup, isRecursive, itemType): - try: - if isRecursive: - if stopDup: - query = f"name = '{fileName}' and " - else: - fileName = fileName.split() - query = "".join( - f"name contains '{name}' and " - for name in fileName - if name != "" - ) - if itemType == "files": - query += ( - "mimeType != 'application/vnd.google-apps.folder' and " - ) - elif itemType == "folders": - query += ( - "mimeType = 'application/vnd.google-apps.folder' and " - ) - query += "trashed = false" - if dir_id == "root": - return ( - self.__service.files() - .list( - q=f"{query} and 'me' in owners", - pageSize=200, - spaces="drive", - fields="files(id, name, mimeType, size, parents)", - orderBy="folder, name asc", - ) - .execute() - ) - return ( - self.__service.files() - .list( - supportsAllDrives=True, - includeItemsFromAllDrives=True, - driveId=dir_id, - q=query, - spaces="drive", - pageSize=150, - fields="files(id, name, mimeType, size, teamDriveId, parents)", - corpora="drive", - orderBy="folder, name asc", - ) - .execute() - ) - if stopDup: - query = f"'{dir_id}' in parents and name = '{fileName}' and " - else: - query = f"'{dir_id}' in parents and " - fileName = fileName.split() - for name in fileName: - if name != "": - query += f"name contains '{name}' and " - if itemType == "files": - query += "mimeType != 'application/vnd.google-apps.folder' and " - elif itemType == "folders": - query += "mimeType = 'application/vnd.google-apps.folder' and " - query += "trashed = false" - return ( - self.__service.files() - .list( - supportsAllDrives=True, - includeItemsFromAllDrives=True, - q=query, - spaces="drive", - pageSize=150, - fields="files(id, name, mimeType, size)", - orderBy="folder, name asc", - ) - .execute() - ) - except Exception as err: - err = str(err).replace(">", "").replace("<", "") - LOGGER.error(err) - return {"files": []} - - def drive_list( - self, fileName, stopDup=False, noMulti=False, isRecursive=True, itemType="" - ): - msg = "" - fileName = self.__escapes(str(fileName)) - contents_no = 0 - telegraph_content = [] - Title = False - if len(list_drives_dict) > 1: - token_service = self.__alt_authorize() - if token_service is not None: - self.__service = token_service - for drive_name, drives_dict in list_drives_dict.items(): - dir_id = drives_dict["drive_id"] - index_url = drives_dict["index_link"] - isRecur = False if isRecursive and len(dir_id) > 23 else isRecursive - response = self.__drive_query( - dir_id, fileName, stopDup, isRecur, itemType - ) - if not response["files"]: - if noMulti: - break - continue - if not Title: - msg += f"

Search Result For {fileName}

" - Title = True - if drive_name: - msg += f"╾────────────╼
{drive_name}
╾────────────╼
" - for file in response.get("files", []): - mime_type = file.get("mimeType") - if mime_type == self.__G_DRIVE_DIR_MIME_TYPE: - furl = self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format( - file.get("id") - ) - msg += f"{file.get('name')}
(folder)

" - msg += f"Drive Link" - if index_url: - if isRecur: - url_path = "/".join( - [ - rquote(n, safe="") - for n in self.__get_recursive_list(file, dir_id) - ] - ) - else: - url_path = rquote(f'{file.get("name")}', safe="") - url = f"{index_url}/{url_path}/" - msg += f' Index Link' - elif mime_type == "application/vnd.google-apps.shortcut": - furl = self.__G_DRIVE_DIR_BASE_DOWNLOAD_URL.format( - file.get("id") - ) - msg += f"⁍{file.get('name')} (shortcut)" - else: - furl = self.__G_DRIVE_BASE_DOWNLOAD_URL.format(file.get("id")) - msg += f"{file.get('name')}
({get_readable_file_size(int(file.get('size', 0)))})

" - msg += f"Drive Link" - if index_url: - if isRecur: - url_path = "/".join( - rquote(n, safe="") - for n in self.__get_recursive_list(file, dir_id) - ) - else: - url_path = rquote(f'{file.get("name")}') - url = f"{index_url}/{url_path}" - msg += f' Index Link' - msg += "

" - contents_no += 1 - if len(msg.encode("utf-8")) > 39000: - telegraph_content.append(msg) - msg = "" - if noMulti: - break - - if msg != "": - telegraph_content.append(msg) - - return telegraph_content, contents_no - - def count(self, link): - try: - file_id = self.getIdFromUrl(link) - except (KeyError, IndexError): - return ( - "Google Drive ID could not be found in the provided link", - None, - None, - None, - None, - ) - LOGGER.info(f"File ID: {file_id}") - try: - return self.__proceed_count(file_id) - except Exception as err: - if isinstance(err, RetryError): - LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") - err = err.last_attempt.exception() - err = str(err).replace(">", "").replace("<", "") - if "File not found" in err: - if not self.__alt_auth: - token_service = self.__alt_authorize() - if token_service is not None: - LOGGER.error("File not found. Trying with token.pickle...") - self.__service = token_service - return self.count(link) - msg = "File not found." - else: - msg = f"Error.\n{err}" - return msg, None, None, None, None - - def __proceed_count(self, file_id): - meta = self.__getFileMetadata(file_id) - name = meta["name"] - LOGGER.info(f"Counting: {name}") - mime_type = meta.get("mimeType") - if mime_type == self.__G_DRIVE_DIR_MIME_TYPE: - self.__gDrive_directory(meta) - mime_type = "Folder" - else: - if mime_type is None: - mime_type = "File" - self.__total_files += 1 - self.__gDrive_file(meta) - return ( - name, - mime_type, - self.__total_bytes, - self.__total_files, - self.__total_folders, - ) - - def __gDrive_file(self, filee): - size = int(filee.get("size", 0)) - self.__total_bytes += size - - def __gDrive_directory(self, drive_folder): - files = self.getFilesByFolderId(drive_folder["id"]) - if len(files) == 0: - return - for filee in files: - shortcut_details = filee.get("shortcutDetails") - if shortcut_details is not None: - mime_type = shortcut_details["targetMimeType"] - file_id = shortcut_details["targetId"] - filee = self.__getFileMetadata(file_id) - else: - mime_type = filee.get("mimeType") - if mime_type == self.__G_DRIVE_DIR_MIME_TYPE: - self.__total_folders += 1 - self.__gDrive_directory(filee) - else: - self.__total_files += 1 - self.__gDrive_file(filee) - - def download(self, link): - self.__is_downloading = True - file_id = self.getIdFromUrl(link) - self.__updater = SetInterval(self.__update_interval, self.__progress) - try: - meta = self.__getFileMetadata(file_id) - if meta.get("mimeType") == self.__G_DRIVE_DIR_MIME_TYPE: - self.__download_folder(file_id, self.__path, self.name) - else: - makedirs(self.__path, exist_ok=True) - self.__download_file( - file_id, self.__path, self.name, meta.get("mimeType") - ) - except Exception as err: - if isinstance(err, RetryError): - LOGGER.info(f"Total Attempts: {err.last_attempt.attempt_number}") - err = err.last_attempt.exception() - err = str(err).replace(">", "").replace("<", "") - if "downloadQuotaExceeded" in err: - err = "Download Quota Exceeded." - elif "File not found" in err: - if not self.__alt_auth: - token_service = self.__alt_authorize() - if token_service is not None: - LOGGER.error("File not found. Trying with token.pickle...") - self.__service = token_service - self.__updater.cancel() - return self.download(link) - err = "File not found!" - async_to_sync(self.__listener.onDownloadError, err) - self.__is_cancelled = True - finally: - self.__updater.cancel() - if self.__is_cancelled: - return None - async_to_sync(self.__listener.on_download_complete) - - def __download_folder(self, folder_id, path, folder_name): - folder_name = folder_name.replace("/", "") - if not ospath.exists(f"{path}/{folder_name}"): - makedirs(f"{path}/{folder_name}") - path += f"/{folder_name}" - result = self.getFilesByFolderId(folder_id) - if len(result) == 0: - return - result = sorted(result, key=lambda k: k["name"]) - for item in result: - file_id = item["id"] - filename = item["name"] - shortcut_details = item.get("shortcutDetails") - if shortcut_details is not None: - file_id = shortcut_details["targetId"] - mime_type = shortcut_details["targetMimeType"] - else: - mime_type = item.get("mimeType") - if mime_type == self.__G_DRIVE_DIR_MIME_TYPE: - self.__download_folder(file_id, path, filename) - elif not ospath.isfile( - f"{path}{filename}" - ) and not filename.lower().endswith(tuple(GLOBAL_EXTENSION_FILTER)): - self.__download_file(file_id, path, filename, mime_type) - if self.__is_cancelled: - break - - @retry( - wait=wait_exponential(multiplier=2, min=3, max=6), - stop=stop_after_attempt(3), - retry=(retry_if_exception_type(Exception)), - ) - def __download_file(self, file_id, path, filename, mime_type): - request = self.__service.files().get_media( - fileId=file_id, supportsAllDrives=True - ) - filename = filename.replace("/", "") - if len(filename.encode()) > 255: - ext = ospath.splitext(filename)[1] - filename = f"{filename[:245]}{ext}" - if self.name.endswith(ext): - self.name = filename - if self.__is_cancelled: - return None - fh = FileIO(f"{path}/{filename}", "wb") - downloader = MediaIoBaseDownload(fh, request, chunksize=100 * 1024 * 1024) - done = False - retries = 0 - while not done: - if self.__is_cancelled: - fh.close() - break - try: - self.__status, done = downloader.next_chunk() - except HttpError as err: - if err.resp.status in [500, 502, 503, 504] and retries < 10: - retries += 1 - continue - if err.resp.get("content-type", "").startswith("application/json"): - reason = ( - eval(err.content).get("error").get("errors")[0].get("reason") - ) - if reason not in [ - "downloadQuotaExceeded", - "dailyLimitExceeded", - ]: - raise err - if config_dict["USE_SERVICE_ACCOUNTS"]: - if self.__sa_count >= self.__sa_number: - LOGGER.info( - f"Reached maximum number of service accounts switching, which is {self.__sa_count}" - ) - raise err - if self.__is_cancelled: - return None - self.__switchServiceAccount() - LOGGER.info(f"Got: {reason}, Trying Again...") - return self.__download_file( - file_id, path, filename, mime_type - ) - LOGGER.error(f"Got: {reason}") - raise err - self.__file_processed_bytes = 0 - return None - - async def cancel_download(self): - self.__is_cancelled = True - if self.__is_downloading: - LOGGER.info(f"Cancelling Download: {self.name}") - await self.__listener.onDownloadError("Download stopped by user!") - elif self.__is_cloning: - LOGGER.info(f"Cancelling Clone: {self.name}") - await self.__listener.onUploadError( - "your clone has been stopped and cloned data has been deleted!" - ) - elif self.__is_uploading: - LOGGER.info(f"Cancelling Upload: {self.name}") - await self.__listener.onUploadError("Cancelled by user!") diff --git a/bot/helper/mirror_leech_utils/upload_utils/telegramEngine.py b/bot/helper/mirror_leech_utils/upload_utils/telegramEngine.py deleted file mode 100644 index 982ff2a56..000000000 --- a/bot/helper/mirror_leech_utils/upload_utils/telegramEngine.py +++ /dev/null @@ -1,688 +0,0 @@ -import contextlib -from os import path as ospath -from os import walk -from re import match as re_match -from time import time -from asyncio import sleep -from logging import ERROR, getLogger -from traceback import format_exc - -from PIL import Image -from natsort import natsorted -from tenacity import ( - RetryError, - retry, - wait_exponential, - stop_after_attempt, - retry_if_exception_type, -) -from aioshutil import copy -from aiofiles.os import path as aiopath -from aiofiles.os import mkdir, makedirs -from aiofiles.os import remove as aioremove -from aiofiles.os import rename as aiorename -from pyrogram.types import InputMediaVideo, InputMediaDocument -from pyrogram.errors import ( - FloodWait, - PeerIdInvalid, - ChannelInvalid, - MessageNotModified, -) - -from bot import ( - IS_PREMIUM_USER, - GLOBAL_EXTENSION_FILTER, - bot, - user, - user_data, - config_dict, -) -from bot.helper.aeon_utils.metadata import add_attachment -from bot.helper.ext_utils.bot_utils import ( - is_mkv, - is_url, - sync_to_async, - is_telegram_link, - download_image_url, -) -from bot.helper.ext_utils.files_utils import ( - get_ss, - take_ss, - is_archive, - process_file, - get_base_name, - clean_unwanted, - get_media_info, - get_audio_thumb, - get_document_type, - get_mediainfo_link, -) -from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.telegram_helper.message_utils import ( - chat_info, - delete_message, - sendMultiMessage, - get_tg_link_content, -) - -LOGGER = getLogger(__name__) -getLogger("pyrogram").setLevel(ERROR) - - -class TgUploader: - def __init__(self, name=None, path=None, listener=None): - self.name = name - self.__last_uploaded = 0 - self.__processed_bytes = 0 - self.__listener = listener - self.__path = path - self.__start_time = time() - self.__total_files = 0 - self.__is_cancelled = False - self.__sent_msg = None - self.__has_buttons = False - self.__msgs_dict = {} - self.__corrupted = 0 - self.__is_corrupted = False - self.__media_dict = {"videos": {}, "documents": {}} - self.__last_msg_in_group = False - self.__prm_media = False - self.__client = bot - self.__up_path = "" - self.__ldump = "" - self.__mediainfo = False - self.__as_doc = False - self.__media_group = False - self.__bot_pm = False - self.__user_id = listener.message.from_user.id - self.__leechmsg = {} - self.__files_utils = self.__listener.files_utils - self.__thumb = f"Thumbnails/{listener.message.from_user.id}.jpg" - - async def get_custom_thumb(self, thumb): - if is_telegram_link(thumb): - try: - msg, client = await get_tg_link_content(thumb) - except Exception as e: - LOGGER.error(f"Thumb Access Error: {e}") - return None - if msg and not msg.photo: - LOGGER.error("Thumb TgLink Invalid: Provide Link to Photo Only !") - return None - _client = bot if client == "bot" else user - photo_dir = await _client.download_media(msg) - elif is_url(thumb): - photo_dir = await download_image_url(thumb) - else: - LOGGER.error("Custom Thumb Invalid") - return None - if await aiopath.exists(photo_dir): - path = "Thumbnails" - if not await aiopath.isdir(path): - await mkdir(path) - des_dir = ospath.join(path, f"{time()}.jpg") - await sync_to_async( - Image.open(photo_dir).convert("RGB").save, des_dir, "JPEG" - ) - await aioremove(photo_dir) - return des_dir - return None - - async def __buttons(self, up_path, is_video=False): - buttons = ButtonMaker() - try: - if is_video and bool(self.__files_utils["screenshots"]): - buttons.url( - "SCREENSHOTS", - await get_ss(up_path, self.__files_utils["screenshots"]), - ) - except Exception as e: - LOGGER.error(f"ScreenShots Error: {e}") - try: - if self.__mediainfo: - m = await get_mediainfo_link(up_path) - buttons.url("MediaInfo", m) - LOGGER.info(m) - except Exception as e: - LOGGER.error(f"MediaInfo Error: {e!s}") - return buttons.column(1) if self.__has_buttons else None - - async def __copy_file(self): - try: - if self.__bot_pm and (self.__leechmsg or self.__listener.isSuperGroup): - destination = "Bot PM" - copied = await bot.copy_message( - chat_id=self.__user_id, - from_chat_id=self.__sent_msg.chat.id, - message_id=self.__sent_msg.id, - reply_to_message_id=self.__listener.botpmmsg.id - if self.__listener.botpmmsg - else None, - ) - if self.__has_buttons: - rply = self.__sent_msg.reply_markup - with contextlib.suppress(MessageNotModified): - await copied.edit_reply_markup(rply) - if len(self.__leechmsg) > 1: - for chat_id, msg in list(self.__leechmsg.items())[1:]: - destination = f"Leech Log: {chat_id}" - self.__leechmsg[chat_id] = await bot.copy_message( - chat_id=chat_id, - from_chat_id=self.__sent_msg.chat.id, - message_id=self.__sent_msg.id, - reply_to_message_id=msg.id, - ) - if msg.text: - await delete_message(msg) - if self.__has_buttons: - with contextlib.suppress(MessageNotModified): - await self.__leechmsg[chat_id].edit_reply_markup( - self.__sent_msg.reply_markup - ) - - if self.__ldump: - destination = "User Dump" - for channel_id in self.__ldump.split(): - chat = await chat_info(channel_id) - try: - dump_copy = await bot.copy_message( - chat_id=chat.id, - from_chat_id=self.__sent_msg.chat.id, - message_id=self.__sent_msg.id, - ) - if self.__has_buttons: - rply = self.__sent_msg.reply_markup - with contextlib.suppress(MessageNotModified): - await dump_copy.edit_reply_markup(rply) - except (ChannelInvalid, PeerIdInvalid) as e: - LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") - continue - except Exception as err: - if not self.__is_cancelled: - LOGGER.error(f"Failed To Send in {destination}:\n{err!s}") - - async def __upload_progress(self, current, total): - if self.__is_cancelled: - if IS_PREMIUM_USER: - user.stop_transmission() - bot.stop_transmission() - chunk_size = current - self.__last_uploaded - self.__last_uploaded = current - self.__processed_bytes += chunk_size - - async def __user_settings(self): - user_dict = user_data.get(self.__user_id, {}) - self.__as_doc = user_dict.get("as_doc") or config_dict["AS_DOCUMENT"] - self.__media_group = ( - user_dict.get("media_group") or config_dict["MEDIA_GROUP"] - ) - self.__bot_pm = True - self.__mediainfo = config_dict["SHOW_MEDIAINFO"] or user_dict.get( - "mediainfo" - ) - self.__ldump = user_dict.get("ldump", "") or "" - self.__has_buttons = bool( - self.__mediainfo or self.__files_utils["screenshots"] - ) - if not await aiopath.exists(self.__thumb): - self.__thumb = None - - async def __msg_to_reply(self): - msg_user = self.__listener.message.from_user - if config_dict["LEECH_DUMP_ID"]: - try: - mention = msg_user.mention(style="HTML") - uid = msg_user.id - msg = f"Task started\n\n• User: {mention}\n• ID: {uid}" - self.__leechmsg = await sendMultiMessage( - config_dict["LEECH_DUMP_ID"], msg - ) - except Exception as er: - await self.__listener.onUploadError(str(er)) - return False - self.__sent_msg = next(iter(self.__leechmsg.values())) - elif IS_PREMIUM_USER: - if not self.__listener.isSuperGroup: - await self.__listener.onUploadError( - "Use SuperGroup to leech with User Client! or Set LEECH_DUMP_ID to Leech in PM" - ) - return False - self.__sent_msg = self.__listener.message - else: - self.__sent_msg = self.__listener.message - return True - - async def __prepare_file(self, prefile_, dirpath): - file_, cap_mono = await process_file(prefile_, self.__user_id, dirpath) - if (atc := self.__listener.attachment) and is_mkv(prefile_): - file_ = await add_attachment(prefile_, dirpath, atc) - if prefile_ != file_: - if ( - self.__listener.seed - and not self.__listener.newDir - and not dirpath.endswith("/splited_files") - ): - dirpath = f"{dirpath}/copied" - await makedirs(dirpath, exist_ok=True) - new_path = ospath.join(dirpath, file_) - self.__up_path = await copy(self.__up_path, new_path) - else: - new_path = ospath.join(dirpath, file_) - await aiorename(self.__up_path, new_path) - self.__up_path = new_path - if len(file_) > 64: - if is_archive(file_): - name = get_base_name(file_) - ext = file_.split(name, 1)[1] - elif match := re_match(r".+(?=\..+\.0*\d+$)|.+(?=\.part\d+\..+)", file_): - name = match.group(0) - ext = file_.split(name, 1)[1] - elif len(fsplit := ospath.splitext(file_)) > 1: - name = fsplit[0] - ext = fsplit[1] - else: - name = file_ - ext = "" - extn = len(ext) - remain = 64 - extn - name = name[:remain] - if ( - self.__listener.seed - and not self.__listener.newDir - and not dirpath.endswith("/splited_files") - ): - dirpath = f"{dirpath}/copied" - await makedirs(dirpath, exist_ok=True) - new_path = ospath.join(dirpath, f"{name}{ext}") - self.__up_path = await copy(self.__up_path, new_path) - else: - new_path = ospath.join(dirpath, f"{name}{ext}") - await aiorename(self.__up_path, new_path) - self.__up_path = new_path - return cap_mono, file_ - - def __get_input_media(self, subkey, key): - rlist = [] - for msg in self.__media_dict[key][subkey]: - if key == "videos": - input_media = InputMediaVideo( - media=msg.video.file_id, caption=msg.caption - ) - else: - input_media = InputMediaDocument( - media=msg.document.file_id, caption=msg.caption - ) - rlist.append(input_media) - return rlist - - async def __switching_client(self): - LOGGER.info( - f'Uploading Media {">" if self.__prm_media else "<"} 2GB by {"User" if self.__prm_media else "Bot"} Client' - ) - self.__client = user if (self.__prm_media and IS_PREMIUM_USER) else bot - - async def __send_media_group(self, subkey, key, msgs): - msgs_list = await msgs[0].reply_to_message.reply_media_group( - media=self.__get_input_media(subkey, key), - quote=True, - disable_notification=True, - ) - for msg in msgs: - if msg.link in self.__msgs_dict: - del self.__msgs_dict[msg.link] - await delete_message(msg) - del self.__media_dict[key][subkey] - if self.__listener.isSuperGroup or config_dict["LEECH_DUMP_ID"]: - for m in msgs_list: - self.__msgs_dict[m.link] = m.caption - self.__sent_msg = msgs_list[-1] - try: - if self.__bot_pm and (self.__leechmsg or self.__listener.isSuperGroup): - destination = "Bot PM" - await bot.copy_media_group( - chat_id=self.__user_id, - from_chat_id=self.__sent_msg.chat.id, - message_id=self.__sent_msg.id, - ) - if self.__ldump: - destination = "Dump" - for channel_id in self.__ldump.split(): - dump_chat = await chat_info(channel_id) - try: - await bot.copy_media_group( - chat_id=dump_chat.id, - from_chat_id=self.__sent_msg.chat.id, - message_id=self.__sent_msg.id, - ) - except (ChannelInvalid, PeerIdInvalid) as e: - LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") - continue - except Exception as err: - if not self.__is_cancelled: - LOGGER.error(f"Failed To Send in {destination}:\n{err!s}") - - async def upload(self, o_files, m_size, size): - await self.__user_settings() - res = await self.__msg_to_reply() - if not res: - return - isDeleted = False - for dirpath, _, files in sorted(await sync_to_async(walk, self.__path)): - if dirpath.endswith("/yt-dlp-thumb"): - continue - for file_ in natsorted(files): - self.__up_path = ospath.join(dirpath, file_) - if file_.lower().endswith(tuple(GLOBAL_EXTENSION_FILTER)): - await aioremove(self.__up_path) - continue - try: - f_size = await aiopath.getsize(self.__up_path) - if ( - self.__listener.seed - and file_ in o_files - and f_size in m_size - ): - continue - self.__total_files += 1 - if f_size == 0: - LOGGER.error( - f"{self.__up_path} size is zero, telegram don't upload zero size files" - ) - self.__corrupted += 1 - continue - if self.__is_cancelled: - return - self.__prm_media = f_size > 2097152000 - cap_mono, file_ = await self.__prepare_file(file_, dirpath) - if self.__last_msg_in_group: - group_lists = [ - x for v in self.__media_dict.values() for x in v - ] - if ( - match := re_match( - r".+(?=\.0*\d+$)|.+(?=\.part\d+\..+)", self.__up_path - ) - ) and match.group(0) not in group_lists: - for key, value in list(self.__media_dict.items()): - for subkey, msgs in list(value.items()): - if len(msgs) > 1: - await self.__send_media_group( - subkey, key, msgs - ) - self.__last_msg_in_group = False - self.__last_uploaded = 0 - await self.__switching_client() - await self.__upload_file(cap_mono, file_) - if not isDeleted: - values_list = list(self.__leechmsg.values()) - if values_list: - await delete_message(values_list[0]) - isDeleted = True - if self.__is_cancelled: - return - if not self.__is_corrupted and ( - self.__listener.isSuperGroup or config_dict["LEECH_DUMP_ID"] - ): - self.__msgs_dict[self.__sent_msg.link] = file_ - await sleep(1) - except Exception as err: - if isinstance(err, RetryError): - LOGGER.info( - f"Total Attempts: {err.last_attempt.attempt_number}" - ) - else: - LOGGER.error(f"{format_exc()}. Path: {self.__up_path}") - if self.__is_cancelled: - return - continue - finally: - if ( - not self.__is_cancelled - and await aiopath.exists(self.__up_path) - and ( - not self.__listener.seed - or self.__listener.newDir - or dirpath.endswith("/splited_files") - or "/copied/" in self.__up_path - ) - ): - await aioremove(self.__up_path) - for key, value in list(self.__media_dict.items()): - for subkey, msgs in list(value.items()): - if len(msgs) > 1: - await self.__send_media_group(subkey, key, msgs) - if self.__is_cancelled: - return - if self.__listener.seed and not self.__listener.newDir: - await clean_unwanted(self.__path) - if self.__total_files == 0: - await self.__listener.onUploadError( - "No files to upload. In case you have filled EXTENSION_FILTER, then check if all files have those extensions or not." - ) - return - if self.__total_files <= self.__corrupted: - await self.__listener.onUploadError( - "Files Corrupted or unable to upload. Check logs!" - ) - return - LOGGER.info(f"Leech Completed: {self.name}") - await self.__listener.onUploadComplete( - None, - size, - self.__msgs_dict, - self.__total_files, - self.__corrupted, - self.name, - ) - - @retry( - wait=wait_exponential(multiplier=2, min=4, max=8), - stop=stop_after_attempt(3), - retry=retry_if_exception_type(Exception), - ) - async def __upload_file(self, cap_mono, file, force_document=False): - if self.__thumb is not None and not await aiopath.exists(self.__thumb): - self.__thumb = None - thumb = self.__thumb - self.__is_corrupted = False - try: - is_video, is_audio, is_image = await get_document_type(self.__up_path) - - if self.__files_utils["thumb"]: - thumb = await self.get_custom_thumb(self.__files_utils["thumb"]) - if not is_image and thumb is None: - file_name = ospath.splitext(file)[0] - thumb_path = f"{self.__path}/yt-dlp-thumb/{file_name}.jpg" - if await aiopath.isfile(thumb_path): - thumb = thumb_path - elif is_audio and not is_video: - thumb = await get_audio_thumb(self.__up_path) - - if ( - self.__as_doc - or force_document - or (not is_video and not is_audio and not is_image) - ): - key = "documents" - if is_video and thumb is None: - thumb = await take_ss(self.__up_path, None) - if self.__is_cancelled: - return None - buttons = await self.__buttons(self.__up_path, is_video) - nrml_media = await self.__client.send_document( - chat_id=self.__sent_msg.chat.id, - reply_to_message_id=self.__sent_msg.id, - document=self.__up_path, - thumb=thumb, - caption=cap_mono, - force_document=True, - disable_notification=True, - progress=self.__upload_progress, - reply_markup=buttons, - ) - - if self.__prm_media and (self.__has_buttons or not self.__leechmsg): - try: - self.__sent_msg = await bot.copy_message( - nrml_media.chat.id, - nrml_media.chat.id, - nrml_media.id, - reply_to_message_id=self.__sent_msg.id, - reply_markup=buttons, - ) - if self.__sent_msg: - await delete_message(nrml_media) - except Exception: - self.__sent_msg = nrml_media - else: - self.__sent_msg = nrml_media - elif is_video: - key = "videos" - duration = (await get_media_info(self.__up_path))[0] - if thumb is None: - thumb = await take_ss(self.__up_path, duration) - if thumb is not None: - with Image.open(thumb) as img: - width, height = img.size - else: - width = 480 - height = 320 - if not self.__up_path.upper().endswith(("MKV", "MP4")): - dirpath, file_ = self.__up_path.rsplit("/", 1) - if ( - self.__listener.seed - and not self.__listener.newDir - and not dirpath.endswith("/splited_files") - ): - dirpath = f"{dirpath}/copied" - await makedirs(dirpath, exist_ok=True) - new_path = ospath.join( - dirpath, f"{ospath.splitext(file_)[0]}.mkv" - ) - self.__up_path = await copy(self.__up_path, new_path) - else: - new_path = f"{ospath.splitext(self.__up_path)[0]}.mkv" - await aiorename(self.__up_path, new_path) - self.__up_path = new_path - if self.__is_cancelled: - return None - buttons = await self.__buttons(self.__up_path, is_video) - nrml_media = await self.__client.send_video( - chat_id=self.__sent_msg.chat.id, - reply_to_message_id=self.__sent_msg.id, - video=self.__up_path, - caption=cap_mono, - duration=duration, - width=width, - height=height, - thumb=thumb, - supports_streaming=True, - disable_notification=True, - progress=self.__upload_progress, - reply_markup=buttons, - ) - if self.__prm_media and (self.__has_buttons or not self.__leechmsg): - try: - self.__sent_msg = await bot.copy_message( - nrml_media.chat.id, - nrml_media.chat.id, - nrml_media.id, - reply_to_message_id=self.__sent_msg.id, - reply_markup=buttons, - ) - if self.__sent_msg: - await delete_message(nrml_media) - except Exception: - self.__sent_msg = nrml_media - else: - self.__sent_msg = nrml_media - elif is_audio: - key = "audios" - duration, artist, title = await get_media_info(self.__up_path) - if self.__is_cancelled: - return None - self.__sent_msg = await self.__client.send_audio( - chat_id=self.__sent_msg.chat.id, - reply_to_message_id=self.__sent_msg.id, - audio=self.__up_path, - caption=cap_mono, - duration=duration, - performer=artist, - title=title, - thumb=thumb, - disable_notification=True, - progress=self.__upload_progress, - reply_markup=await self.__buttons(self.__up_path), - ) - else: - key = "photos" - if self.__is_cancelled: - return None - self.__sent_msg = await self.__client.send_photo( - chat_id=self.__sent_msg.chat.id, - reply_to_message_id=self.__sent_msg.id, - photo=self.__up_path, - caption=cap_mono, - disable_notification=True, - progress=self.__upload_progress, - reply_markup=await self.__buttons(self.__up_path), - ) - - if ( - not self.__is_cancelled - and self.__media_group - and (self.__sent_msg.video or self.__sent_msg.document) - ): - key = "documents" if self.__sent_msg.document else "videos" - if match := re_match( - r".+(?=\.0*\d+$)|.+(?=\.part\d+\..+)", self.__up_path - ): - pname = match.group(0) - if pname in self.__media_dict[key]: - self.__media_dict[key][pname].append(self.__sent_msg) - else: - self.__media_dict[key][pname] = [self.__sent_msg] - msgs = self.__media_dict[key][pname] - if len(msgs) == 10: - await self.__send_media_group(pname, key, msgs) - else: - self.__last_msg_in_group = True - await self.__copy_file() - - if ( - self.__thumb is None - and thumb is not None - and await aiopath.exists(thumb) - ): - await aioremove(thumb) - except FloodWait as f: - LOGGER.warning(str(f)) - await sleep(f.value) - except Exception as err: - if ( - self.__thumb is None - and thumb is not None - and await aiopath.exists(thumb) - ): - await aioremove(thumb) - LOGGER.error(f"{format_exc()}. Path: {self.__up_path}") - if "Telegram says: [400" in str(err) and key != "documents": - LOGGER.error(f"Retrying As Document. Path: {self.__up_path}") - return await self.__upload_file(cap_mono, file, True) - raise err - - @property - def speed(self): - try: - return self.__processed_bytes / (time() - self.__start_time) - except Exception: - return 0 - - @property - def processed_bytes(self): - return self.__processed_bytes - - async def cancel_download(self): - self.__is_cancelled = True - LOGGER.info(f"Cancelling Upload: {self.name}") - await self.__listener.onUploadError("Cancelled by user!") diff --git a/bot/helper/telegram_helper/bot_commands.py b/bot/helper/telegram_helper/bot_commands.py index 913b83790..818d3dbac 100644 --- a/bot/helper/telegram_helper/bot_commands.py +++ b/bot/helper/telegram_helper/bot_commands.py @@ -1,40 +1,42 @@ -from bot import CMD_SUFFIX as i +# ruff: noqa: RUF012 +from bot.core.config_manager import Config +i = Config.CMD_SUFFIX -class _BotCommands: - def __init__(self): - self.StartCommand = "start" - self.MirrorCommand = [f"mirror{i}", f"m{i}"] - self.YtdlCommand = [f"ytdl{i}", f"y{i}"] - self.LeechCommand = [f"leech{i}", f"l{i}"] - self.YtdlLeechCommand = [f"ytdlleech{i}", f"yl{i}"] - self.CloneCommand = [f"clone{i}", f"c{i}"] - self.CountCommand = f"count{i}" - self.DeleteCommand = f"del{i}" - self.StopAllCommand = [f"stopall{i}", "stopallbot"] - self.ListCommand = f"list{i}" - self.SearchCommand = f"search{i}" - self.StatusCommand = [f"status{i}", "statusall"] - self.UsersCommand = f"users{i}" - self.AuthorizeCommand = f"authorize{i}" - self.UnAuthorizeCommand = f"unauthorize{i}" - self.AddSudoCommand = f"addsudo{i}" - self.RmSudoCommand = f"rmsudo{i}" - self.PingCommand = "ping" - self.RestartCommand = [f"restart{i}", "restartall"] - self.StatsCommand = [f"stats{i}", "statsall"] - self.HelpCommand = f"help{i}" - self.LogCommand = f"log{i}" - self.ShellCommand = f"shell{i}" - self.EvalCommand = f"eval{i}" - self.ExecCommand = f"exec{i}" - self.BotSetCommand = f"botsettings{i}" - self.UserSetCommand = f"settings{i}" - self.SpeedCommand = f"speedtest{i}" - self.AddImageCommand = f"addimg{i}" - self.ImagesCommand = f"images{i}" - self.MediaInfoCommand = f"mediainfo{i}" - self.BroadcastCommand = [f"broadcast{i}", "broadcastall"] - -BotCommands = _BotCommands() +class BotCommands: + StartCommand = f"start{i}" + MirrorCommand = [f"mirror{i}", f"m{i}"] + YtdlCommand = [f"ytdl{i}", f"y{i}"] + LeechCommand = [f"leech{i}", f"l{i}"] + YtdlLeechCommand = [f"ytdlleech{i}", f"yl{i}"] + CloneCommand = f"clone{i}" + MediaInfoCommand = f"mediainfo{i}" + CountCommand = f"count{i}" + DeleteCommand = f"del{i}" + CancelAllCommand = f"cancelall{i}" + ForceStartCommand = [f"forcestart{i}", f"fs{i}"] + ListCommand = f"list{i}" + SearchCommand = f"search{i}" + StatusCommand = f"status{i}" + UsersCommand = f"users{i}" + AuthorizeCommand = f"authorize{i}" + UnAuthorizeCommand = f"unauthorize{i}" + AddSudoCommand = f"addsudo{i}" + RmSudoCommand = f"rmsudo{i}" + PingCommand = f"ping{i}" + RestartCommand = f"restart{i}" + RestartSessionsCommand = f"restartses{i}" + StatsCommand = f"stats{i}" + HelpCommand = f"help{i}" + LogCommand = f"log{i}" + ShellCommand = f"shell{i}" + AExecCommand = f"aexec{i}" + ExecCommand = f"exec{i}" + ClearLocalsCommand = f"clearlocals{i}" + BotSetCommand = f"botsettings{i}" + UserSetCommand = f"settings{i}" + SpeedTest = f"speedtest{i}" + BroadcastCommand = [f"broadcast{i}", "broadcastall"] + SelectCommand = f"sel{i}" + RssCommand = f"rss{i}" diff --git a/bot/helper/telegram_helper/button_build.py b/bot/helper/telegram_helper/button_build.py index 6b760c15f..27c187d93 100644 --- a/bot/helper/telegram_helper/button_build.py +++ b/bot/helper/telegram_helper/button_build.py @@ -3,52 +3,55 @@ class ButtonMaker: def __init__(self): - self.main_buttons = [] - self.header_buttons = [] - self.footer_buttons = [] - - def url(self, text, url, position=None): - button = InlineKeyboardButton(text=text, url=url) - if position == "header": - self.header_buttons.append(button) + self._button = [] + self._header_button = [] + self._footer_button = [] + + def url_button(self, key, link, position=None): + if not position: + self._button.append(InlineKeyboardButton(text=key, url=link)) + elif position == "header": + self._header_button.append(InlineKeyboardButton(text=key, url=link)) elif position == "footer": - self.footer_buttons.append(button) - else: - self.main_buttons.append(button) - - def callback(self, text, callback_data, position=None): - button = InlineKeyboardButton(text=text, callback_data=callback_data) - if position == "header": - self.header_buttons.append(button) + self._footer_button.append(InlineKeyboardButton(text=key, url=link)) + + def data_button(self, key, data, position=None): + if not position: + self._button.append(InlineKeyboardButton(text=key, callback_data=data)) + elif position == "header": + self._header_button.append( + InlineKeyboardButton(text=key, callback_data=data), + ) elif position == "footer": - self.footer_buttons.append(button) - else: - self.main_buttons.append(button) + self._footer_button.append( + InlineKeyboardButton(text=key, callback_data=data), + ) - def column(self, main_columns=1, header_columns=8, footer_columns=8): - keyboard = [ - self.main_buttons[i : i + main_columns] - for i in range(0, len(self.main_buttons), main_columns) + def build_menu(self, b_cols=1, h_cols=8, f_cols=8): + menu = [ + self._button[i : i + b_cols] for i in range(0, len(self._button), b_cols) ] - - if self.header_buttons: - if len(self.header_buttons) > header_columns: - header_chunks = [ - self.header_buttons[i : i + header_columns] - for i in range(0, len(self.header_buttons), header_columns) + if self._header_button: + h_cnt = len(self._header_button) + if h_cnt > h_cols: + header_buttons = [ + self._header_button[i : i + h_cols] + for i in range(0, len(self._header_button), h_cols) ] - keyboard = header_chunks + keyboard + menu = header_buttons + menu else: - keyboard.insert(0, self.header_buttons) - - if self.footer_buttons: - if len(self.footer_buttons) > footer_columns: - footer_chunks = [ - self.footer_buttons[i : i + footer_columns] - for i in range(0, len(self.footer_buttons), footer_columns) + menu.insert(0, self._header_button) + if self._footer_button: + if len(self._footer_button) > f_cols: + [ + menu.append(self._footer_button[i : i + f_cols]) + for i in range(0, len(self._footer_button), f_cols) ] - keyboard += footer_chunks else: - keyboard.append(self.footer_buttons) + menu.append(self._footer_button) + return InlineKeyboardMarkup(menu) - return InlineKeyboardMarkup(keyboard) + def reset(self): + self._button = [] + self._header_button = [] + self._footer_button = [] diff --git a/bot/helper/telegram_helper/filters.py b/bot/helper/telegram_helper/filters.py index ca41e25fb..de858b253 100644 --- a/bot/helper/telegram_helper/filters.py +++ b/bot/helper/telegram_helper/filters.py @@ -1,24 +1,24 @@ -from pyrogram.enums import ChatType from pyrogram.filters import create -from bot import OWNER_ID, user_data -from bot.helper.telegram_helper.message_utils import chat_info +from bot import user_data +from bot.core.config_manager import Config class CustomFilters: - async def owner_filter(self, _, message): - user = message.from_user or message.sender_chat + async def owner_filter(self, _, update): + user = update.from_user or update.sender_chat uid = user.id - return uid == OWNER_ID + return uid == Config.OWNER_ID owner = create(owner_filter) - async def authorized_user(self, _, message): - user = message.from_user or message.sender_chat + async def authorized_user(self, _, update): + user = update.from_user or update.sender_chat uid = user.id - chat_id = message.chat.id + chat_id = update.chat.id + thread_id = update.message_thread_id if update.is_topic_message else None return bool( - uid == OWNER_ID + uid == Config.OWNER_ID or ( uid in user_data and ( @@ -26,49 +26,24 @@ async def authorized_user(self, _, message): or user_data[uid].get("is_sudo", False) ) ) - or (chat_id in user_data and user_data[chat_id].get("is_auth", False)) - ) - - authorized = create(authorized_user) - - async def authorized_usetting(self, _, message): - uid = (message.from_user or message.sender_chat).id - chat_id = message.chat.id - isExists = False - if ( - uid == OWNER_ID or ( - uid in user_data + chat_id in user_data + and user_data[chat_id].get("is_auth", False) and ( - user_data[uid].get("is_auth", False) - or user_data[uid].get("is_sudo", False) + thread_id is None + or thread_id in user_data[chat_id].get("thread_ids", []) ) - ) - or (chat_id in user_data and user_data[chat_id].get("is_auth", False)) - ): - isExists = True - elif message.chat.type == ChatType.PRIVATE: - for channel_id in user_data: - if not ( - user_data[channel_id].get("is_auth") - and str(channel_id).startswith("-100") - ): - continue - try: - if await (await chat_info(str(channel_id))).get_member(uid): - isExists = True - break - except Exception: - continue - return isExists + ), + ) - authorized_uset = create(authorized_usetting) + authorized = create(authorized_user) - async def sudo_user(self, _, message): - user = message.from_user or message.sender_chat + async def sudo_user(self, _, update): + user = update.from_user or update.sender_chat uid = user.id return bool( - uid == OWNER_ID or uid in user_data and user_data[uid].get("is_sudo") + uid == Config.OWNER_ID + or (uid in user_data and user_data[uid].get("is_sudo")), ) sudo = create(sudo_user) diff --git a/bot/helper/telegram_helper/message_utils.py b/bot/helper/telegram_helper/message_utils.py index 9576765d3..3abcc5f95 100644 --- a/bot/helper/telegram_helper/message_utils.py +++ b/bot/helper/telegram_helper/message_utils.py @@ -1,229 +1,153 @@ +from asyncio import sleep from re import match as re_match from time import time -from random import choice -from asyncio import sleep -from traceback import format_exc -from aiofiles.os import remove as aioremove -from pyrogram.types import InputMediaPhoto +from cachetools import TTLCache +from pyrogram import Client, enums from pyrogram.errors import ( - RPCError, + FloodPremiumWait, FloodWait, - MediaEmpty, MessageEmpty, - PeerIdInvalid, - WebpageCurlFailed, MessageNotModified, - ReplyMarkupInvalid, - UserNotParticipant, - PhotoInvalidDimensions, ) +from pyrogram.types import InputMediaPhoto from bot import ( - IMAGES, LOGGER, - DELETE_LINKS, - Interval, - bot, - user, - status_reply_dict, - download_dict_lock, - status_reply_dict_lock, + intervals, + status_dict, + task_dict_lock, + user_data, ) -from bot.helper.ext_utils.bot_utils import ( - SetInterval, - sync_to_async, - download_image_url, - get_readable_message, -) -from bot.helper.ext_utils.exceptions import TgLinkError -from bot.helper.telegram_helper.button_build import ButtonMaker - - -async def send_message(message, text, buttons=None, photo=None): +from bot.core.aeon_client import TgClient +from bot.core.config_manager import Config +from bot.helper.ext_utils.bot_utils import SetInterval +from bot.helper.ext_utils.exceptions import TgLinkException +from bot.helper.ext_utils.status_utils import get_readable_message + +session_cache = TTLCache(maxsize=1000, ttl=36000) + + +async def send_message( + message, + text, + buttons=None, + photo=None, + markdown=False, + block=True, +): + parse_mode = enums.ParseMode.MARKDOWN if markdown else enums.ParseMode.HTML try: + if isinstance(message, int): + return await TgClient.bot.send_message( + chat_id=message, + text=text, + disable_web_page_preview=True, + disable_notification=True, + reply_markup=buttons, + parse_mode=parse_mode, + ) if photo: - try: - if photo == "Random": - photo = choice(IMAGES) - return await message.reply_photo( - photo=photo, - reply_to_message_id=message.id, - caption=text, - reply_markup=buttons, - disable_notification=True, - ) - except IndexError: - pass - except (PhotoInvalidDimensions, WebpageCurlFailed, MediaEmpty): - des_dir = await download_image_url(photo) - await send_message(message, text, buttons, des_dir) - await aioremove(des_dir) - return None - except Exception: - LOGGER.error(format_exc()) + return await message.reply_photo( + photo=photo, + reply_to_message_id=message.id, + caption=text, + reply_markup=buttons, + disable_notification=True, + parse_mode=parse_mode, + ) return await message.reply( text=text, quote=True, disable_web_page_preview=True, disable_notification=True, reply_markup=buttons, + parse_mode=parse_mode, ) except FloodWait as f: + if not block: + return message LOGGER.warning(str(f)) await sleep(f.value * 1.2) - return await send_message(message, text, buttons, photo) - except ReplyMarkupInvalid: - return await send_message(message, text, None, photo) + return await send_message(message, text, buttons, photo, markdown) except Exception as e: - LOGGER.error(format_exc()) + LOGGER.error(str(e)) return str(e) -async def sendCustomMsg(chat_id, text, buttons=None, photo=None): +async def edit_message( + message, + text, + buttons=None, + photo=None, + markdown=False, + block=True, +): + parse_mode = enums.ParseMode.MARKDOWN if markdown else enums.ParseMode.HTML try: - if photo: - try: - if photo == "Random": - photo = choice(IMAGES) - return await bot.send_photo( - chat_id=chat_id, - photo=photo, - caption=text, + if message.media: + if photo: + return await message.edit_media( + InputMediaPhoto(photo, text), reply_markup=buttons, - disable_notification=True, + parse_mode=parse_mode, ) - except IndexError: - pass - except (PhotoInvalidDimensions, WebpageCurlFailed, MediaEmpty): - des_dir = await download_image_url(photo) - await sendCustomMsg(chat_id, text, buttons, des_dir) - await aioremove(des_dir) - return None - except Exception: - LOGGER.error(format_exc()) - return await bot.send_message( - chat_id=chat_id, + return await message.edit_caption( + caption=text, + reply_markup=buttons, + parse_mode=parse_mode, + ) + await message.edit( text=text, disable_web_page_preview=True, - disable_notification=True, reply_markup=buttons, + parse_mode=parse_mode, ) except FloodWait as f: + if not block: + return message LOGGER.warning(str(f)) await sleep(f.value * 1.2) - return await sendCustomMsg(chat_id, text, buttons, photo) - except ReplyMarkupInvalid: - return await sendCustomMsg(chat_id, text, None, photo) + return await edit_message(message, text, buttons, photo, markdown) + except (MessageNotModified, MessageEmpty): + pass except Exception as e: - LOGGER.error(format_exc()) + LOGGER.error(str(e)) return str(e) -async def chat_info(channel_id): - if channel_id.startswith("-100"): - channel_id = int(channel_id) - elif channel_id.startswith("@"): - channel_id = channel_id.replace("@", "") - else: - return None - try: - return await bot.get_chat(channel_id) - except PeerIdInvalid as e: - LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") - return None - - -async def isAdmin(message, user_id=None): - if message.chat.type == message.chat.type.PRIVATE: - return None - if user_id: - member = await message.chat.get_member(user_id) - else: - member = await message.chat.get_member(message.from_user.id) - return member.status in [member.status.ADMINISTRATOR, member.status.OWNER] - - -async def sendMultiMessage(chat_ids, text, buttons=None, photo=None): - msg_dict = {} - for channel_id in chat_ids.split(): - chat = await chat_info(channel_id) - try: - if photo: - try: - if photo == "Random": - photo = choice(IMAGES) - sent = await bot.send_photo( - chat_id=chat.id, - photo=photo, - caption=text, - reply_markup=buttons, - disable_notification=True, - ) - msg_dict[chat.id] = sent - continue - except IndexError: - pass - except (PhotoInvalidDimensions, WebpageCurlFailed, MediaEmpty): - des_dir = await download_image_url(photo) - await sendMultiMessage(chat_ids, text, buttons, des_dir) - await aioremove(des_dir) - return None - except Exception as e: - LOGGER.error(str(e)) - sent = await bot.send_message( - chat_id=chat.id, - text=text, - disable_web_page_preview=True, - disable_notification=True, - reply_markup=buttons, - ) - msg_dict[chat.id] = sent - except FloodWait as f: - LOGGER.warning(str(f)) - await sleep(f.value * 1.2) - return await sendMultiMessage(chat_ids, text, buttons, photo) - except Exception as e: - LOGGER.error(str(e)) - return str(e) - return msg_dict - - -async def edit_message(message, text, buttons=None, photo=None): +async def send_file(message, file, caption="", buttons=None): try: - if message.media: - if photo: - return await message.edit_media( - InputMediaPhoto(photo, text), reply_markup=buttons - ) - return await message.edit_caption(caption=text, reply_markup=buttons) - await message.edit( - text=text, disable_web_page_preview=True, reply_markup=buttons + return await message.reply_document( + document=file, + quote=True, + caption=caption, + disable_notification=True, + reply_markup=buttons, ) except FloodWait as f: LOGGER.warning(str(f)) await sleep(f.value * 1.2) - return await edit_message(message, text, buttons, photo) - except (MessageNotModified, MessageEmpty): - pass + return await send_file(message, file, caption, buttons) except Exception as e: LOGGER.error(str(e)) return str(e) -async def sendFile(message, file, caption=None, buttons=None): +async def send_rss(text, chat_id, thread_id): try: - return await message.reply_document( - document=file, - quote=True, - caption=caption, + app = TgClient.user or TgClient.bot + return await app.send_message( + chat_id=chat_id, + text=text, + disable_web_page_preview=True, + message_thread_id=thread_id, disable_notification=True, - reply_markup=buttons, ) - except FloodWait as f: + except (FloodWait, FloodPremiumWait) as f: LOGGER.warning(str(f)) await sleep(f.value * 1.2) - return await sendFile(message, file, caption) + return await send_rss(text) except Exception as e: LOGGER.error(str(e)) return str(e) @@ -247,156 +171,242 @@ async def five_minute_del(message): async def delete_links(message): - if DELETE_LINKS: - if reply_to := message.reply_to_message: - await delete_message(reply_to) - await delete_message(message) + if not Config.DELETE_LINKS: + return + if reply_to := message.reply_to_message: + await delete_message(reply_to) + await delete_message(message) -async def delete_all_messages(): - async with status_reply_dict_lock: - try: - for key, data in list(status_reply_dict.items()): - del status_reply_dict[key] - await delete_message(data[0]) - except Exception as e: - LOGGER.error(str(e)) +async def auto_delete_message(cmd_message=None, bot_message=None): + await sleep(60) + if cmd_message is not None: + await delete_message(cmd_message) + if bot_message is not None: + await delete_message(bot_message) + + +async def delete_status(): + async with task_dict_lock: + for key, data in list(status_dict.items()): + try: + await delete_message(data["message"]) + del status_dict[key] + except Exception as e: + LOGGER.error(str(e)) -async def get_tg_link_content(link): +async def get_tg_link_message(link, user_id=""): message = None + links = [] + user_session = None + + if user_id: + if user_id in session_cache: + user_session = session_cache[user_id] + else: + user_dict = user_data.get(user_id, {}) + session_string = user_dict.get("session_string") + if session_string: + user_session = Client( + f"session_{user_id}", + Config.TELEGRAM_API, + Config.TELEGRAM_HASH, + session_string=session_string, + no_updates=True, + ) + await user_session.start() + session_cache[user_id] = user_session + else: + user_session = TgClient.user + if link.startswith("https://t.me/"): private = False msg = re_match( - r"https:\/\/t\.me\/(?:c\/)?([^\/]+)(?:\/[^\/]+)?\/([0-9]+)", link + r"https:\/\/t\.me\/(?:c\/)?([^\/]+)(?:\/[^\/]+)?\/([0-9-]+)", + link, ) else: private = True msg = re_match( - r"tg:\/\/openmessage\?user_id=([0-9]+)&message_id=([0-9]+)", link + r"tg:\/\/openmessage\?user_id=([0-9]+)&message_id=([0-9-]+)", + link, ) - if not user: - raise TgLinkError("USER_SESSION_STRING required for this private link!") + if not user_session: + raise TgLinkException( + "USER_SESSION_STRING required for this private link!", + ) + + chat = msg[1] + msg_id = msg[2] + if "-" in msg_id: + start_id, end_id = map(int, msg_id.split("-")) + msg_id = start_id + btw = end_id - start_id + if private: + link = link.split("&message_id=")[0] + links.append(f"{link}&message_id={start_id}") + for _ in range(btw): + start_id += 1 + links.append(f"{link}&message_id={start_id}") + else: + link = link.rsplit("/", 1)[0] + links.append(f"{link}/{start_id}") + for _ in range(btw): + start_id += 1 + links.append(f"{link}/{start_id}") + else: + msg_id = int(msg_id) - chat = msg.group(1) - msg_id = int(msg.group(2)) if chat.isdigit(): chat = int(chat) if private else int(f"-100{chat}") if not private: try: - message = await bot.get_messages(chat_id=chat, message_ids=msg_id) + message = await TgClient.bot.get_messages( + chat_id=chat, + message_ids=msg_id, + ) if message.empty: private = True except Exception as e: private = True - if not user: + if not user_session: raise e - if private and user: + if not private: + return (links, TgClient.bot) if links else (message, TgClient.bot) + if user_session: try: - user_message = await user.get_messages(chat_id=chat, message_ids=msg_id) + user_message = await user_session.get_messages( + chat_id=chat, + message_ids=msg_id, + ) except Exception as e: - raise TgLinkError( - f"You don't have access to this chat!. ERROR: {e}" - ) from e + raise TgLinkException("We don't have access to this chat!") from e if not user_message.empty: - return user_message, "user" - raise TgLinkError("Private: Please report!") - if not private: - return message, "bot" - raise TgLinkError("Bot can't download from GROUPS without joining!") + return (links, user_session) if links else (user_message, user_session) + return None + raise TgLinkException("Private: Please report!") -async def update_all_messages(force=False): - async with status_reply_dict_lock: - if ( - not status_reply_dict - or not Interval - or (not force and time() - next(iter(status_reply_dict.values()))[1] < 3) - ): - return - for chat_id in list(status_reply_dict.keys()): - status_reply_dict[chat_id][1] = time() - async with download_dict_lock: - msg, buttons = await sync_to_async(get_readable_message) - if msg is None: - return - async with status_reply_dict_lock: - for chat_id in list(status_reply_dict.keys()): - if ( - status_reply_dict[chat_id] - and msg != status_reply_dict[chat_id][0].text - ): - rmsg = await edit_message( - status_reply_dict[chat_id][0], msg, buttons - ) - if isinstance(rmsg, str) and rmsg.startswith("Telegram says: [400"): - del status_reply_dict[chat_id] - continue - status_reply_dict[chat_id][0].text = msg - status_reply_dict[chat_id][1] = time() +async def check_permission(_, chat, uploader_id, __): + member = await chat.get_member(uploader_id) + if ( + not member.privileges.can_manage_chat + or not member.privileges.can_delete_messages + ): + raise ValueError( + "You don't have enough privileges in this chat!", + ) -async def sendStatusMessage(msg): - async with download_dict_lock: - progress, buttons = await sync_to_async(get_readable_message) - if progress is None: +async def update_status_message(sid, force=False): + if intervals["stopAll"]: return - async with status_reply_dict_lock: - chat_id = msg.chat.id - if chat_id in list(status_reply_dict.keys()): - message = status_reply_dict[chat_id][0] - await delete_message(message) - del status_reply_dict[chat_id] - message = await send_message(msg, progress, buttons) - message.text = progress - status_reply_dict[chat_id] = [message, time()] - if not Interval: - Interval.append(SetInterval(1, update_all_messages)) - - -async def forcesub(message, ids, button=None): - join_button = {} - _msg = "" - for channel_id in ids.split(): - chat = await chat_info(channel_id) - try: - await chat.get_member(message.from_user.id) - except UserNotParticipant: - if username := chat.username: - invite_link = f"https://t.me/{username}" - else: - invite_link = chat.invite_link - join_button[chat.title] = invite_link - except RPCError as e: - LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") - except Exception as e: - LOGGER.error(f"{e} for {channel_id}") - if join_button: - if button is None: - button = ButtonMaker() - _msg = "You haven't joined our channel/group yet!" - for key, value in join_button.items(): - button.url(f"Join {key}", value, "footer") - return _msg, button - - -async def user_info(client, userId): - return await client.get_users(userId) + async with task_dict_lock: + if not status_dict.get(sid): + if obj := intervals["status"].get(sid): + obj.cancel() + del intervals["status"][sid] + return + if not force and time() - status_dict[sid]["time"] < 3: + return + status_dict[sid]["time"] = time() + page_no = status_dict[sid]["page_no"] + status = status_dict[sid]["status"] + is_user = status_dict[sid]["is_user"] + page_step = status_dict[sid]["page_step"] + text, buttons = await get_readable_message( + sid, + is_user, + page_no, + status, + page_step, + ) + if text is None: + del status_dict[sid] + if obj := intervals["status"].get(sid): + obj.cancel() + del intervals["status"][sid] + return + if text != status_dict[sid]["message"].text: + message = await edit_message( + status_dict[sid]["message"], + text, + buttons, + block=False, + ) + if isinstance(message, str): + if message.startswith("Telegram says: [40"): + del status_dict[sid] + if obj := intervals["status"].get(sid): + obj.cancel() + del intervals["status"][sid] + else: + LOGGER.error( + f"Status with id: {sid} haven't been updated. Error: {message}", + ) + return + status_dict[sid]["message"].text = text + status_dict[sid]["time"] = time() -async def BotPm_check(message, button=None): - user_id = message.from_user.id - try: - temp_msg = await message._client.send_message( - chat_id=message.from_user.id, text="Checking Access..." - ) - await temp_msg.delete() - return None, button - except Exception: - if button is None: - button = ButtonMaker() - _msg = "You haven't initiated the bot in a private message!" - button.callback("Start", f"aeon {user_id} private", "header") - return _msg, button +async def send_status_message(msg, user_id=0): + if intervals["stopAll"]: + return + sid = user_id or msg.chat.id + is_user = bool(user_id) + async with task_dict_lock: + if sid in status_dict: + page_no = status_dict[sid]["page_no"] + status = status_dict[sid]["status"] + page_step = status_dict[sid]["page_step"] + text, buttons = await get_readable_message( + sid, + is_user, + page_no, + status, + page_step, + ) + if text is None: + del status_dict[sid] + if obj := intervals["status"].get(sid): + obj.cancel() + del intervals["status"][sid] + return + message = status_dict[sid]["message"] + await delete_message(message) + message = await send_message(msg, text, buttons, block=False) + if isinstance(message, str): + LOGGER.error( + f"Status with id: {sid} haven't been sent. Error: {message}", + ) + return + message.text = text + status_dict[sid].update({"message": message, "time": time()}) + else: + text, buttons = await get_readable_message(sid, is_user) + if text is None: + return + message = await send_message(msg, text, buttons, block=False) + if isinstance(message, str): + LOGGER.error( + f"Status with id: {sid} haven't been sent. Error: {message}", + ) + return + message.text = text + status_dict[sid] = { + "message": message, + "time": time(), + "page_no": 1, + "page_step": 1, + "status": "All", + "is_user": is_user, + } + if not intervals["status"].get(sid) and not is_user: + intervals["status"][sid] = SetInterval( + 1, + update_status_message, + sid, + ) diff --git a/bot/modules/__init__.py b/bot/modules/__init__.py index e69de29bb..8ae6442d8 100644 --- a/bot/modules/__init__.py +++ b/bot/modules/__init__.py @@ -0,0 +1,90 @@ +from .bot_settings import edit_bot_settings, send_bot_settings +from .broadcast import broadcast +from .cancel_task import cancel, cancel_all_buttons, cancel_all_update, cancel_multi +from .chat_permission import add_sudo, authorize, remove_sudo, unauthorize +from .clone import clone_node +from .exec import aioexecute, clear, execute +from .file_selector import confirm_selection, select +from .force_start import remove_from_queue +from .gd_count import count_node +from .gd_delete import delete_file +from .gd_search import gdrive_search, select_type +from .help import arg_usage, bot_help +from .mediainfo import mediainfo +from .mirror_leech import ( + leech, + mirror, +) +from .restart import ( + confirm_restart, + restart_bot, + restart_notification, + restart_sessions, +) +from .rss import get_rss_menu, rss_listener +from .search import initiate_search_tools, torrent_search, torrent_search_update +from .services import log, log_callback, ping, start +from .shell import run_shell +from .speedtest import speedtest +from .stats import bot_stats, get_packages_version +from .status import status_pages, task_status +from .users_settings import ( + edit_user_settings, + get_users_settings, + send_user_settings, +) +from .ytdlp import ytdl, ytdl_leech + +__all__ = [ + "add_sudo", + "aioexecute", + "arg_usage", + "authorize", + "bot_help", + "bot_stats", + "broadcast", + "cancel", + "cancel_all_buttons", + "cancel_all_update", + "cancel_multi", + "clear", + "clone_node", + "confirm_restart", + "confirm_selection", + "count_node", + "delete_file", + "edit_bot_settings", + "edit_user_settings", + "execute", + "gdrive_search", + "get_packages_version", + "get_rss_menu", + "get_users_settings", + "initiate_search_tools", + "leech", + "log", + "log_callback", + "mediainfo", + "mirror", + "ping", + "remove_from_queue", + "remove_sudo", + "restart_bot", + "restart_notification", + "restart_sessions", + "rss_listener", + "run_shell", + "select", + "select_type", + "send_bot_settings", + "send_user_settings", + "speedtest", + "start", + "status_pages", + "task_status", + "torrent_search", + "torrent_search_update", + "unauthorize", + "ytdl", + "ytdl_leech", +] diff --git a/bot/modules/authorize.py b/bot/modules/authorize.py deleted file mode 100644 index ca9c02ea4..000000000 --- a/bot/modules/authorize.py +++ /dev/null @@ -1,107 +0,0 @@ -from pyrogram.filters import command -from pyrogram.handlers import MessageHandler - -from bot import DATABASE_URL, bot, user_data -from bot.helper.ext_utils.bot_utils import update_user_ldata -from bot.helper.ext_utils.db_handler import DbManager -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.message_utils import send_message - - -async def change_authorization(message, is_authorize): - msg = message.text.split() - if len(msg) > 1: - id_ = int(msg[1].strip()) - elif reply_to := message.reply_to_message: - id_ = reply_to.from_user.id - else: - id_ = message.chat.id - if is_authorize: - success_message = "Authorized" - if id_ in user_data and user_data[id_].get("is_auth"): - success_message = "Already authorized!" - else: - update_user_ldata(id_, "is_auth", True) - if DATABASE_URL: - await DbManager().update_user_data(id_) - else: - success_message = "Unauthorized" - if id_ not in user_data or user_data[id_].get("is_auth"): - update_user_ldata(id_, "is_auth", False) - if DATABASE_URL: - await DbManager().update_user_data(id_) - else: - success_message = "Already unauthorized!" - await send_message(message, success_message) - - -async def change_sudo(message, is_sudo): - id_ = "" - msg = message.text.split() - if len(msg) > 1: - id_ = int(msg[1].strip()) - elif reply_to := message.reply_to_message: - id_ = reply_to.from_user.id - if is_sudo: - if id_: - if id_ in user_data and user_data[id_].get("is_sudo"): - success_message = "Already Sudo!" - else: - update_user_ldata(id_, "is_sudo", True) - if DATABASE_URL: - await DbManager().update_user_data(id_) - success_message = "Promoted as Sudo" - else: - success_message = ( - "Give ID or Reply To message of whom you want to Promote." - ) - elif id_ and id_ in user_data and user_data[id_].get("is_sudo"): - update_user_ldata(id_, "is_sudo", False) - if DATABASE_URL: - await DbManager().update_user_data(id_) - success_message = "Demoted" - else: - success_message = ( - "Give ID or Reply To message of whom you want to remove from Sudo" - ) - await send_message(message, success_message) - - -async def authorize(_, message): - await change_authorization(message, True) - - -async def unauthorize(_, message): - await change_authorization(message, False) - - -async def addSudo(_, message): - await change_sudo(message, True) - - -async def removeSudo(_, message): - await change_sudo(message, False) - - -bot.add_handler( - MessageHandler( - authorize, filters=command(BotCommands.AuthorizeCommand) & CustomFilters.sudo - ) -) -bot.add_handler( - MessageHandler( - unauthorize, - filters=command(BotCommands.UnAuthorizeCommand) & CustomFilters.sudo, - ) -) -bot.add_handler( - MessageHandler( - addSudo, filters=command(BotCommands.AddSudoCommand) & CustomFilters.sudo - ) -) -bot.add_handler( - MessageHandler( - removeSudo, filters=command(BotCommands.RmSudoCommand) & CustomFilters.sudo - ) -) diff --git a/bot/modules/bot_settings.py b/bot/modules/bot_settings.py index c355f54f8..7bcf66621 100644 --- a/bot/modules/bot_settings.py +++ b/bot/modules/bot_settings.py @@ -1,482 +1,155 @@ +from asyncio import ( + create_subprocess_exec, + create_subprocess_shell, + gather, + sleep, +) +from functools import partial from io import BytesIO -from os import getcwd, environ +from os import getcwd from time import time -from asyncio import sleep, create_subprocess_exec, create_subprocess_shell -from functools import partial -from collections import OrderedDict -from dotenv import load_dotenv from aiofiles import open as aiopen -from aioshutil import rmtree as aiormtree from aiofiles.os import path as aiopath from aiofiles.os import remove, rename -from pyrogram.enums import ChatType -from pyrogram.filters import regex, create, command -from pyrogram.handlers import MessageHandler, CallbackQueryHandler +from aioshutil import rmtree +from pyrogram.filters import create +from pyrogram.handlers import MessageHandler from bot import ( LOGGER, - DATABASE_URL, - GLOBAL_EXTENSION_FILTER, - Interval, - bot, aria2, - config_dict, aria2_options, - download_dict, - extra_buttons, - shorteners_list, - list_drives_dict, - status_reply_dict_lock, + drives_ids, + drives_names, + extension_filter, + index_urls, + intervals, + task_dict, ) -from bot.modules.torrent_search import initiate_search_tools -from bot.helper.ext_utils.bot_utils import SetInterval, new_thread, sync_to_async -from bot.helper.ext_utils.db_handler import DbManager -from bot.helper.ext_utils.help_strings import bset_display_dict +from bot.core.aeon_client import TgClient +from bot.core.config_manager import Config +from bot.core.startup import update_variables +from bot.helper.ext_utils.bot_utils import SetInterval, new_task, sync_to_async +from bot.helper.ext_utils.db_handler import database from bot.helper.ext_utils.task_manager import start_from_queued -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands +from bot.helper.mirror_leech_utils.rclone_utils.serve import rclone_serve_booter from bot.helper.telegram_helper.button_build import ButtonMaker from bot.helper.telegram_helper.message_utils import ( - sendFile, + delete_message, edit_message, + send_file, send_message, - update_all_messages, + update_status_message, ) -START = 0 -STATE = "view" +from .rss import add_job +from .search import initiate_search_tools + +start = 0 +state = "view" handler_dict = {} -default_values = { - "DEFAULT_UPLOAD": "gd", +DEFAULT_VALUES = { + "DOWNLOAD_DIR": "/usr/src/app/downloads/", + "LEECH_SPLIT_SIZE": TgClient.MAX_SPLIT_SIZE, + "RSS_DELAY": 600, "SEARCH_LIMIT": 0, "UPSTREAM_BRANCH": "main", - "TORRENT_TIMEOUT": 3000, + "DEFAULT_UPLOAD": "rc", } -bool_vars = [ - "AS_DOCUMENT", - "DELETE_LINKS", - "STOP_DUPLICATE", - "SET_COMMANDS", - "SHOW_MEDIAINFO", - "USE_SERVICE_ACCOUNTS", -] - - -async def load_config(): - BOT_TOKEN = environ.get("BOT_TOKEN", "") - if len(BOT_TOKEN) == 0: - BOT_TOKEN = config_dict["BOT_TOKEN"] - - TELEGRAM_API = environ.get("TELEGRAM_API", "") - if len(TELEGRAM_API) == 0: - TELEGRAM_API = config_dict["TELEGRAM_API"] - else: - TELEGRAM_API = int(TELEGRAM_API) - - TELEGRAM_HASH = environ.get("TELEGRAM_HASH", "") - if len(TELEGRAM_HASH) == 0: - TELEGRAM_HASH = config_dict["TELEGRAM_HASH"] - - BOT_MAX_TASKS = environ.get("BOT_MAX_TASKS", "") - BOT_MAX_TASKS = int(BOT_MAX_TASKS) if BOT_MAX_TASKS.isdigit() else "" - - OWNER_ID = environ.get("OWNER_ID", "") - OWNER_ID = config_dict["OWNER_ID"] if len(OWNER_ID) == 0 else int(OWNER_ID) - - GROUPS_EMAIL = environ.get("GROUPS_EMAIL", "") - if len(GROUPS_EMAIL) != 0: - GROUPS_EMAIL = GROUPS_EMAIL.lower() - - DATABASE_URL = environ.get("DATABASE_URL", "") - if len(DATABASE_URL) == 0: - DATABASE_URL = "" - - GDRIVE_ID = environ.get("GDRIVE_ID", "") - if len(GDRIVE_ID) == 0: - GDRIVE_ID = "" - - RCLONE_PATH = environ.get("RCLONE_PATH", "") - if len(RCLONE_PATH) == 0: - RCLONE_PATH = "" - - DEFAULT_UPLOAD = environ.get("DEFAULT_UPLOAD", "") - if DEFAULT_UPLOAD != "rc": - DEFAULT_UPLOAD = "gd" - - RCLONE_FLAGS = environ.get("RCLONE_FLAGS", "") - if len(RCLONE_FLAGS) == 0: - RCLONE_FLAGS = "" - - EXTENSION_FILTER = environ.get("EXTENSION_FILTER", "") - if len(EXTENSION_FILTER) > 0: - fx = EXTENSION_FILTER.split() - GLOBAL_EXTENSION_FILTER.clear() - GLOBAL_EXTENSION_FILTER.append("aria2") - for x in fx: - if x.strip().startswith("."): - clean_x = x.lstrip(".") - GLOBAL_EXTENSION_FILTER.append(clean_x.strip().lower()) - - METADATA_KEY = environ.get("METADATA_KEY", "") - if len(METADATA_KEY) == 0: - METADATA_KEY = "" - - MEGA_EMAIL = environ.get("MEGA_EMAIL", "") - MEGA_PASSWORD = environ.get("MEGA_PASSWORD", "") - if len(MEGA_EMAIL) == 0 or len(MEGA_PASSWORD) == 0: - MEGA_EMAIL = "" - MEGA_PASSWORD = "" - - INDEX_URL = environ.get("INDEX_URL", "").rstrip("/") - if len(INDEX_URL) == 0: - INDEX_URL = "" - - SEARCH_API_LINK = environ.get("SEARCH_API_LINK", "").rstrip("/") - if len(SEARCH_API_LINK) == 0: - SEARCH_API_LINK = "" - - FILELION_API = environ.get("FILELION_API", "") - if len(FILELION_API) == 0: - FILELION_API = "" - - LEECH_LOG_ID = environ.get("LEECH_LOG_ID", "") - LEECH_LOG_ID = "" if len(LEECH_LOG_ID) == 0 else int(LEECH_LOG_ID) - - if len(download_dict) != 0: - async with status_reply_dict_lock: - if Interval: - Interval[0].cancel() - Interval.clear() - Interval.append(SetInterval(1, update_all_messages)) - - YT_DLP_OPTIONS = environ.get("YT_DLP_OPTIONS", "") - if len(YT_DLP_OPTIONS) == 0: - YT_DLP_OPTIONS = "" - - SEARCH_LIMIT = environ.get("SEARCH_LIMIT", "") - SEARCH_LIMIT = 0 if len(SEARCH_LIMIT) == 0 else int(SEARCH_LIMIT) - - LEECH_DUMP_ID = environ.get("LEECH_DUMP_ID", "") - if len(LEECH_DUMP_ID) == 0: - LEECH_DUMP_ID = "" - - CMD_SUFFIX = environ.get("CMD_SUFFIX", "") - - USER_SESSION_STRING = environ.get("USER_SESSION_STRING", "") - - TORRENT_TIMEOUT = environ.get("TORRENT_TIMEOUT", "") - TORRENT_TIMEOUT = 3000 if len(TORRENT_TIMEOUT) == 0 else int(TORRENT_TIMEOUT) - downloads = aria2.get_downloads() - if len(TORRENT_TIMEOUT) == 0: - for download in downloads: - if not download.is_complete: - try: - await sync_to_async( - aria2.client.change_option, - download.gid, - {"bt-stop-timeout": "0"}, - ) - except Exception as e: - LOGGER.error(e) - aria2_options["bt-stop-timeout"] = "0" - if DATABASE_URL: - await DbManager().update_aria2("bt-stop-timeout", "0") - TORRENT_TIMEOUT = "" - else: - for download in downloads: - if not download.is_complete: - try: - await sync_to_async( - aria2.client.change_option, - download.gid, - {"bt-stop-timeout": TORRENT_TIMEOUT}, - ) - except Exception as e: - LOGGER.error(e) - aria2_options["bt-stop-timeout"] = TORRENT_TIMEOUT - if DATABASE_URL: - await DbManager().update_aria2("bt-stop-timeout", TORRENT_TIMEOUT) - TORRENT_TIMEOUT = int(TORRENT_TIMEOUT) - - QUEUE_ALL = environ.get("QUEUE_ALL", "") - QUEUE_ALL = "" if len(QUEUE_ALL) == 0 else int(QUEUE_ALL) - - QUEUE_DOWNLOAD = environ.get("QUEUE_DOWNLOAD", "") - QUEUE_DOWNLOAD = "" if len(QUEUE_DOWNLOAD) == 0 else int(QUEUE_DOWNLOAD) - - QUEUE_UPLOAD = environ.get("QUEUE_UPLOAD", "") - QUEUE_UPLOAD = "" if len(QUEUE_UPLOAD) == 0 else int(QUEUE_UPLOAD) - - STREAMWISH_API = environ.get("STREAMWISH_API", "") - if len(STREAMWISH_API) == 0: - STREAMWISH_API = "" - - STOP_DUPLICATE = environ.get("STOP_DUPLICATE", "") - STOP_DUPLICATE = STOP_DUPLICATE.lower() == "true" - - USE_SERVICE_ACCOUNTS = environ.get("USE_SERVICE_ACCOUNTS", "") - USE_SERVICE_ACCOUNTS = USE_SERVICE_ACCOUNTS.lower() == "true" - - AS_DOCUMENT = environ.get("AS_DOCUMENT", "") - AS_DOCUMENT = AS_DOCUMENT.lower() == "true" - - SHOW_MEDIAINFO = environ.get("SHOW_MEDIAINFO", "") - SHOW_MEDIAINFO = SHOW_MEDIAINFO.lower() == "true" - - MEDIA_GROUP = environ.get("MEDIA_GROUP", "") - MEDIA_GROUP = MEDIA_GROUP.lower() == "true" - - await (await create_subprocess_exec("pkill", "-9", "-f", "gunicorn")).wait() - BASE_URL = environ.get("BASE_URL", "").rstrip("/") - if len(BASE_URL) == 0: - BASE_URL = "" - else: - await create_subprocess_shell( - "gunicorn web.wserver:app --bind 0.0.0.0:80 --worker-class gevent" - ) - - UPSTREAM_REPO = environ.get("UPSTREAM_REPO", "") - if len(UPSTREAM_REPO) == 0: - UPSTREAM_REPO = "" - - UPSTREAM_BRANCH = environ.get("UPSTREAM_BRANCH", "") - if len(UPSTREAM_BRANCH) == 0: - UPSTREAM_BRANCH = "main" - - TORRENT_LIMIT = environ.get("TORRENT_LIMIT", "") - TORRENT_LIMIT = "" if len(TORRENT_LIMIT) == 0 else float(TORRENT_LIMIT) - - DIRECT_LIMIT = environ.get("DIRECT_LIMIT", "") - DIRECT_LIMIT = "" if len(DIRECT_LIMIT) == 0 else float(DIRECT_LIMIT) - - YTDLP_LIMIT = environ.get("YTDLP_LIMIT", "") - YTDLP_LIMIT = "" if len(YTDLP_LIMIT) == 0 else float(YTDLP_LIMIT) - - GDRIVE_LIMIT = environ.get("GDRIVE_LIMIT", "") - GDRIVE_LIMIT = "" if len(GDRIVE_LIMIT) == 0 else float(GDRIVE_LIMIT) - CLONE_LIMIT = environ.get("CLONE_LIMIT", "") - CLONE_LIMIT = "" if len(CLONE_LIMIT) == 0 else float(CLONE_LIMIT) - MEGA_LIMIT = environ.get("MEGA_LIMIT", "") - MEGA_LIMIT = "" if len(MEGA_LIMIT) == 0 else float(MEGA_LIMIT) - - LEECH_LIMIT = environ.get("LEECH_LIMIT", "") - LEECH_LIMIT = "" if len(LEECH_LIMIT) == 0 else float(LEECH_LIMIT) - - DELETE_LINKS = environ.get("DELETE_LINKS", "") - DELETE_LINKS = DELETE_LINKS.lower() == "true" - - FSUB_IDS = environ.get("FSUB_IDS", "") - if len(FSUB_IDS) == 0: - FSUB_IDS = "" - - MIRROR_LOG_ID = environ.get("MIRROR_LOG_ID", "") - if len(MIRROR_LOG_ID) == 0: - MIRROR_LOG_ID = "" - - ATTACHMENT_URL = environ.get("ATTACHMENT_URL", "") - if len(ATTACHMENT_URL) == 0: - ATTACHMENT_URL = "" - - USER_MAX_TASKS = environ.get("USER_MAX_TASKS", "") - USER_MAX_TASKS = "" if len(USER_MAX_TASKS) == 0 else int(USER_MAX_TASKS) - - PLAYLIST_LIMIT = environ.get("PLAYLIST_LIMIT", "") - PLAYLIST_LIMIT = "" if len(PLAYLIST_LIMIT) == 0 else int(PLAYLIST_LIMIT) - - IMAGES = environ.get("IMAGES", "") - IMAGES = ( - IMAGES.replace("'", "") - .replace('"', "") - .replace("[", "") - .replace("]", "") - .replace(",", "") - ).split() - - SET_COMMANDS = environ.get("SET_COMMANDS", "") - SET_COMMANDS = SET_COMMANDS.lower() == "true" - - TOKEN_TIMEOUT = environ.get("TOKEN_TIMEOUT", "") - TOKEN_TIMEOUT = int(TOKEN_TIMEOUT) if TOKEN_TIMEOUT.isdigit() else "" - - list_drives_dict.clear() - - if GDRIVE_ID: - list_drives_dict["Main"] = {"drive_id": GDRIVE_ID, "index_link": INDEX_URL} - - if await aiopath.exists("list_drives.txt"): - async with aiopen("list_drives.txt", "r+") as f: - lines = await f.readlines() - for line in lines: - sep = 2 if line.strip().split()[-1].startswith("http") else 1 - temp = line.strip().rsplit(maxsplit=sep) - name = "Main Custom" if temp[0].casefold() == "Main" else temp[0] - list_drives_dict[name] = { - "drive_id": temp[1], - "index_link": (temp[2] if sep == 2 else ""), - } - - extra_buttons.clear() - if await aiopath.exists("buttons.txt"): - async with aiopen("buttons.txt", "r+") as f: - lines = await f.readlines() - for line in lines: - temp = line.strip().split() - if len(extra_buttons.keys()) == 4: - break - if len(temp) == 2: - extra_buttons[temp[0].replace("_", " ")] = temp[1] - - shorteners_list.clear() - if await aiopath.exists("shorteners.txt"): - async with aiopen("shorteners.txt", "r+") as f: - lines = await f.readlines() - for line in lines: - temp = line.strip().split() - if len(temp) == 2: - shorteners_list.append({"domain": temp[0], "api_key": temp[1]}) - - config_dict.update( - { - "AS_DOCUMENT": AS_DOCUMENT, - "BASE_URL": BASE_URL, - "BOT_TOKEN": BOT_TOKEN, - "BOT_MAX_TASKS": BOT_MAX_TASKS, - "CMD_SUFFIX": CMD_SUFFIX, - "DATABASE_URL": DATABASE_URL, - "DEFAULT_UPLOAD": DEFAULT_UPLOAD, - "DELETE_LINKS": DELETE_LINKS, - "TORRENT_LIMIT": TORRENT_LIMIT, - "DIRECT_LIMIT": DIRECT_LIMIT, - "YTDLP_LIMIT": YTDLP_LIMIT, - "GDRIVE_LIMIT": GDRIVE_LIMIT, - "CLONE_LIMIT": CLONE_LIMIT, - "MEGA_LIMIT": MEGA_LIMIT, - "LEECH_LIMIT": LEECH_LIMIT, - "FSUB_IDS": FSUB_IDS, - "FILELION_API": FILELION_API, - "USER_MAX_TASKS": USER_MAX_TASKS, - "PLAYLIST_LIMIT": PLAYLIST_LIMIT, - "MIRROR_LOG_ID": MIRROR_LOG_ID, - "LEECH_DUMP_ID": LEECH_DUMP_ID, - "IMAGES": IMAGES, - "EXTENSION_FILTER": EXTENSION_FILTER, - "ATTACHMENT_URL": ATTACHMENT_URL, - "GDRIVE_ID": GDRIVE_ID, - "INDEX_URL": INDEX_URL, - "LEECH_LOG_ID": LEECH_LOG_ID, - "TOKEN_TIMEOUT": TOKEN_TIMEOUT, - "MEDIA_GROUP": MEDIA_GROUP, - "MEGA_EMAIL": MEGA_EMAIL, - "MEGA_PASSWORD": MEGA_PASSWORD, - "METADATA_KEY": METADATA_KEY, - "OWNER_ID": OWNER_ID, - "QUEUE_ALL": QUEUE_ALL, - "QUEUE_DOWNLOAD": QUEUE_DOWNLOAD, - "QUEUE_UPLOAD": QUEUE_UPLOAD, - "RCLONE_FLAGS": RCLONE_FLAGS, - "RCLONE_PATH": RCLONE_PATH, - "SEARCH_API_LINK": SEARCH_API_LINK, - "SEARCH_LIMIT": SEARCH_LIMIT, - "SET_COMMANDS": SET_COMMANDS, - "SHOW_MEDIAINFO": SHOW_MEDIAINFO, - "STOP_DUPLICATE": STOP_DUPLICATE, - "STREAMWISH_API": STREAMWISH_API, - "TELEGRAM_API": TELEGRAM_API, - "TELEGRAM_HASH": TELEGRAM_HASH, - "TORRENT_TIMEOUT": TORRENT_TIMEOUT, - "UPSTREAM_REPO": UPSTREAM_REPO, - "UPSTREAM_BRANCH": UPSTREAM_BRANCH, - "USER_SESSION_STRING": USER_SESSION_STRING, - "GROUPS_EMAIL": GROUPS_EMAIL, - "USE_SERVICE_ACCOUNTS": USE_SERVICE_ACCOUNTS, - "YT_DLP_OPTIONS": YT_DLP_OPTIONS, - } - ) - - if DATABASE_URL: - await DbManager().update_config(config_dict) - await initiate_search_tools() - await start_from_queued() - - -async def get_buttons(key=None, edit_type=None, edit_mode=None, mess=None): +async def get_buttons(key=None, edit_type=None): buttons = ButtonMaker() if key is None: - buttons.callback("Config Variables", "botset var") - buttons.callback("Private Files", "botset private") - buttons.callback("Close", "botset close") + buttons.data_button("Config Variables", "botset var") + buttons.data_button("Private Files", "botset private") + buttons.data_button("Close", "botset close") msg = "Bot Settings:" + elif edit_type is not None: + if edit_type == "botvar": + msg = "" + buttons.data_button("Back", "botset var") + if key not in ["TELEGRAM_HASH", "TELEGRAM_API", "OWNER_ID", "BOT_TOKEN"]: + buttons.data_button("Default", f"botset resetvar {key}") + buttons.data_button("Close", "botset close") + if key in [ + "CMD_SUFFIX", + "OWNER_ID", + "USER_SESSION_STRING", + "TELEGRAM_HASH", + "TELEGRAM_API", + "BOT_TOKEN", + "DOWNLOAD_DIR", + "SUDO_USERS", + "AUTHORIZED_CHATS", + ]: + msg += "Restart required for this edit to take effect! You will not see the changes in bot vars, the edit will be in database only!\n\n" + msg += f"Send a valid value for {key}. Current value is '{Config.get(key)}'. Timeout: 60 sec" elif key == "var": - for k in list(OrderedDict(sorted(config_dict.items())).keys())[ - START : 10 + START - ]: - buttons.callback(k, f"botset editvar {k}") - buttons.callback("Back", "botset back") - buttons.callback("Close", "botset close") - for x in range(0, len(config_dict) - 1, 10): - buttons.callback( - f"{int(x/10)+1}", f"botset start var {x}", position="footer" + conf_dict = Config.get_all() + for k in list(conf_dict.keys())[start : 10 + start]: + if ( + key + in [ + "CMD_SUFFIX", + "OWNER_ID", + "USER_SESSION_STRING", + "TELEGRAM_HASH", + "TELEGRAM_API", + "BOT_TOKEN", + "DOWNLOAD_DIR", + "SUDO_USERS", + "AUTHORIZED_CHATS", + ] + and not Config.DATABASE_URL + ): + continue + if k == "DATABASE_URL" and state != "view": + continue + buttons.data_button(k, f"botset botvar {k}") + if state == "view": + buttons.data_button("Edit", "botset edit var") + else: + buttons.data_button("View", "botset view var") + buttons.data_button("Back", "botset back") + buttons.data_button("Close", "botset close") + for x in range(0, len(conf_dict), 10): + buttons.data_button( + f"{int(x / 10)}", + f"botset start var {x}", + position="footer", ) - msg = f"Config Variables | Page: {int(START/10)+1}" + msg = f"Config Variables | Page: {int(start / 10)} | State: {state}" elif key == "private": - buttons.callback("Back", "botset back") - buttons.callback("Close", "botset close") - msg = "Send private files: config.env, token.pickle, cookies.txt, accounts.zip, terabox.txt, .netrc, or any other files!\n\nTo delete a private file, send only the file name as a text message.\n\nPlease note: Changes to .netrc will not take effect for aria2c until it's restarted.\n\nTimeout: 60 seconds" - elif edit_type == "editvar": - msg = f"Variable: {key}\n\n" - msg += f'Description: {bset_display_dict.get(key, "No Description Provided")}\n\n' - if mess.chat.type == ChatType.PRIVATE: - msg += f'Value: {config_dict.get(key, "None")}\n\n' - elif key not in bool_vars: - buttons.callback( - "View value", f"botset showvar {key}", position="header" - ) - buttons.callback("Back", "botset back var", position="footer") - if key not in bool_vars: - if not edit_mode: - buttons.callback("Edit Value", f"botset editvar {key} edit") - else: - buttons.callback("Stop Edit", f"botset editvar {key}") - if ( - key not in ["TELEGRAM_HASH", "TELEGRAM_API", "OWNER_ID", "BOT_TOKEN"] - and key not in bool_vars - ): - buttons.callback("Reset", f"botset resetvar {key}") - buttons.callback("Close", "botset close", position="footer") - if edit_mode and key in [ - "CMD_SUFFIX", - "OWNER_ID", - "USER_SESSION_STRING", - "TELEGRAM_HASH", - "TELEGRAM_API", - "DATABASE_URL", - "BOT_TOKEN", - ]: - msg += "Note: Restart required for this edit to take effect!\n\n" - if edit_mode and key not in bool_vars: - msg += "Send a valid value for the above Var. Timeout: 60 sec" - if key in bool_vars: - if not config_dict.get(key): - buttons.callback("Make it True", f"botset boolvar {key} on") - else: - buttons.callback("Make it False", f"botset boolvar {key} off") - button = buttons.column(1) if key is None else buttons.column(2) + buttons.data_button("Back", "botset back") + buttons.data_button("Close", "botset close") + msg = """Send private file: config.py, token.pickle, rclone.conf, accounts.zip, list_drives.txt, cookies.txt, .netrc or any other private file! +To delete private file send only the file name as text message. +Note: Changing .netrc will not take effect for aria2c until restart. +Timeout: 60 sec""" + button = buttons.build_menu(1) if key is None else buttons.build_menu(2) return msg, button -async def update_buttons(message, key=None, edit_type=None, edit_mode=None): - msg, button = await get_buttons(key, edit_type, edit_mode, message) +async def update_buttons(message, key=None, edit_type=None): + msg, button = await get_buttons(key, edit_type) await edit_message(message, msg, button) +@new_task async def edit_variable(_, message, pre_message, key): handler_dict[message.chat.id] = False value = message.text - if key == "LEECH_LOG_ID": - value = int(value) + if value.lower() == "true": + value = True + elif value.lower() == "false": + value = False + if key == "INCOMPLETE_TASK_NOTIFIER" and Config.DATABASE_URL: + await database.trunc_table("tasks") + elif key == "DOWNLOAD_DIR": + if not value.endswith("/"): + value += "/" elif key == "TORRENT_TIMEOUT": value = int(value) downloads = await sync_to_async(aria2.get_downloads) @@ -491,113 +164,124 @@ async def edit_variable(_, message, pre_message, key): except Exception as e: LOGGER.error(e) aria2_options["bt-stop-timeout"] = f"{value}" + elif key == "LEECH_SPLIT_SIZE": + value = min(int(value), TgClient.MAX_SPLIT_SIZE) elif key == "EXTENSION_FILTER": fx = value.split() - GLOBAL_EXTENSION_FILTER.clear() - GLOBAL_EXTENSION_FILTER.append(".aria2") + extension_filter.clear() + extension_filter.extend(["aria2", "!qB"]) for x in fx: - if x.strip().startswith("."): - clean_x = x.lstrip(".") - GLOBAL_EXTENSION_FILTER.append(clean_x.strip().lower()) + x = x.lstrip(".") + extension_filter.append(x.strip().lower()) elif key == "GDRIVE_ID": - list_drives_dict["Main"] = { - "drive_id": value, - "index_link": config_dict["INDEX_URL"], - } + if drives_names and drives_names[0] == "Main": + drives_ids[0] = value + else: + drives_ids.insert(0, value) elif key == "INDEX_URL": - list_drives_dict["Main"] = { - "drive_id": config_dict["GDRIVE_ID"], - "index_link": value, - } + if drives_names and drives_names[0] == "Main": + index_urls[0] = value + else: + index_urls.insert(0, value) elif value.isdigit(): value = int(value) - config_dict[key] = value - await update_buttons(pre_message, key, "editvar", False) - await message.delete() - if DATABASE_URL: - await DbManager().update_config({key: value}) - if key == "SEARCH_API_LINK": + elif (value.startswith("[") and value.endswith("]")) or ( + value.startswith("{") and value.endswith("}") + ): + value = eval(value) + if key not in [ + "CMD_SUFFIX", + "OWNER_ID", + "USER_SESSION_STRING", + "TELEGRAM_HASH", + "TELEGRAM_API", + "BOT_TOKEN", + "DOWNLOAD_DIR", + "SUDO_USERS", + "AUTHORIZED_CHATS", + ]: + Config.set(key, value) + await update_buttons(pre_message, "var") + await delete_message(message) + await database.update_config({key: value}) + if key in ["SEARCH_PLUGINS", "SEARCH_API_LINK"]: await initiate_search_tools() elif key in ["QUEUE_ALL", "QUEUE_DOWNLOAD", "QUEUE_UPLOAD"]: await start_from_queued() - - + elif key in [ + "RCLONE_SERVE_URL", + "RCLONE_SERVE_PORT", + "RCLONE_SERVE_USER", + "RCLONE_SERVE_PASS", + ]: + await rclone_serve_booter() + elif key == "RSS_DELAY": + add_job() + + +@new_task async def update_private_file(_, message, pre_message): handler_dict[message.chat.id] = False if not message.media and (file_name := message.text): fn = file_name.rsplit(".zip", 1)[0] - if await aiopath.isfile(fn) and file_name != "config.env": + if await aiopath.isfile(fn) and file_name != "config.py": await remove(fn) if fn == "accounts": if await aiopath.exists("accounts"): - await aiormtree("accounts") + await rmtree("accounts", ignore_errors=True) if await aiopath.exists("rclone_sa"): - await aiormtree("rclone_sa") - config_dict["USE_SERVICE_ACCOUNTS"] = False - if DATABASE_URL: - await DbManager().update_config({"USE_SERVICE_ACCOUNTS": False}) + await rmtree("rclone_sa", ignore_errors=True) + Config.USE_SERVICE_ACCOUNTS = False + await database.update_config({"USE_SERVICE_ACCOUNTS": False}) elif file_name in [".netrc", "netrc"]: await (await create_subprocess_exec("touch", ".netrc")).wait() await (await create_subprocess_exec("chmod", "600", ".netrc")).wait() await ( await create_subprocess_exec("cp", ".netrc", "/root/.netrc") ).wait() - elif file_name in ["buttons.txt", "buttons"]: - extra_buttons.clear() - await message.delete() + await delete_message(message) elif doc := message.document: file_name = doc.file_name - await message.download(file_name=f"{getcwd()}/{file_name}") + fpath = f"{getcwd()}/{file_name}" + if await aiopath.exists(fpath): + await remove(fpath) + await message.download(file_name=fpath) if file_name == "accounts.zip": if await aiopath.exists("accounts"): - await aiormtree("accounts") + await rmtree("accounts", ignore_errors=True) if await aiopath.exists("rclone_sa"): - await aiormtree("rclone_sa") + await rmtree("rclone_sa", ignore_errors=True) await ( await create_subprocess_exec( - "7z", "x", "-o.", "-aoa", "accounts.zip", "accounts/*.json" + "7z", + "x", + "-o.", + "-aoa", + "accounts.zip", + "accounts/*.json", ) ).wait() await ( await create_subprocess_exec("chmod", "-R", "777", "accounts") ).wait() elif file_name == "list_drives.txt": - list_drives_dict.clear() - if GDRIVE_ID := config_dict["GDRIVE_ID"]: - list_drives_dict["Main"] = { - "drive_id": GDRIVE_ID, - "index_link": config_dict["INDEX_URL"], - } + drives_ids.clear() + drives_names.clear() + index_urls.clear() + if Config.GDRIVE_ID: + drives_names.append("Main") + drives_ids.append(Config.GDRIVE_ID) + index_urls.append(Config.INDEX_URL) async with aiopen("list_drives.txt", "r+") as f: - lines = await f.readlines() - for line in lines: - sep = 2 if line.strip().split()[-1].startswith("http") else 1 - temp = line.strip().rsplit(maxsplit=sep) - name = "Main Custom" if temp[0].casefold() == "Main" else temp[0] - list_drives_dict[name] = { - "drive_id": temp[1], - "index_link": (temp[2] if sep == 2 else ""), - } - elif file_name == "buttons.txt": - extra_buttons.clear() - async with aiopen("buttons.txt", "r+") as f: lines = await f.readlines() for line in lines: temp = line.strip().split() - if len(extra_buttons.keys()) == 4: - break - if len(temp) == 2: - extra_buttons[temp[0].replace("_", " ")] = temp[1] - elif file_name == "shorteners.txt": - shorteners_list.clear() - async with aiopen("shorteners.txt", "r+") as f: - lines = await f.readlines() - for line in lines: - temp = line.strip().split() - if len(temp) == 2: - shorteners_list.append( - {"domain": temp[0], "api_key": temp[1]} - ) + drives_ids.append(temp[1]) + drives_names.append(temp[0].replace("_", " ")) + if len(temp) > 2: + index_urls.append(temp[2]) + else: + index_urls.append("") elif file_name in [".netrc", "netrc"]: if file_name == "netrc": await rename("netrc", ".netrc") @@ -606,13 +290,13 @@ async def update_private_file(_, message, pre_message): await ( await create_subprocess_exec("cp", ".netrc", "/root/.netrc") ).wait() - elif file_name == "config.env": - load_dotenv("config.env", override=True) + elif file_name == "config.py": await load_config() - await message.delete() + await delete_message(message) + if file_name == "rclone.conf": + await rclone_serve_booter() await update_buttons(pre_message) - if DATABASE_URL: - await DbManager().update_private_file(file_name) + await database.update_private_file(file_name) if await aiopath.exists("accounts.zip"): await remove("accounts.zip") @@ -627,11 +311,12 @@ async def event_filter(_, __, event): return bool( user.id == query.from_user.id and event.chat.id == chat_id - and (event.text or event.document and document) + and (event.text or (event.document and document)), ) handler = client.add_handler( - MessageHandler(pfunc, filters=create(event_filter)), group=-1 + MessageHandler(pfunc, filters=create(event_filter)), + group=-1, ) while handler_dict[chat_id]: await sleep(0.5) @@ -641,34 +326,30 @@ async def event_filter(_, __, event): client.remove_handler(*handler) -@new_thread +@new_task async def edit_bot_settings(client, query): data = query.data.split() message = query.message + handler_dict[message.chat.id] = False if data[1] == "close": - handler_dict[message.chat.id] = False await query.answer() - await message.delete() - await message.reply_to_message.delete() + await delete_message(message.reply_to_message) + await delete_message(message) elif data[1] == "back": - handler_dict[message.chat.id] = False await query.answer() - key = data[2] if len(data) == 3 else None - if key is None: - globals()["START"] = 0 - await update_buttons(message, key) + globals()["start"] = 0 + await update_buttons(message, None) elif data[1] == "var": await query.answer() await update_buttons(message, data[1]) elif data[1] == "resetvar": - handler_dict[message.chat.id] = False - await query.answer("Reset done!", show_alert=True) + await query.answer() value = "" - if data[2] in default_values: - value = default_values[data[2]] + if data[2] in DEFAULT_VALUES: + value = DEFAULT_VALUES[data[2]] elif data[2] == "EXTENSION_FILTER": - GLOBAL_EXTENSION_FILTER.clear() - GLOBAL_EXTENSION_FILTER.append(".aria2") + extension_filter.clear() + extension_filter.extend(["aria2", "!qB"]) elif data[2] == "TORRENT_TIMEOUT": downloads = await sync_to_async(aria2.get_downloads) for download in downloads: @@ -682,86 +363,153 @@ async def edit_bot_settings(client, query): except Exception as e: LOGGER.error(e) aria2_options["bt-stop-timeout"] = "0" - if DATABASE_URL: - await DbManager().update_aria2("bt-stop-timeout", "0") + await database.update_aria2("bt-stop-timeout", "0") elif data[2] == "BASE_URL": await ( await create_subprocess_exec("pkill", "-9", "-f", "gunicorn") ).wait() - config_dict[data[2]] = value - await update_buttons(message, data[2], "editvar", False) - if DATABASE_URL: - await DbManager().update_config({data[2]: value}) - if data[2] == "SEARCH_API_LINK": + elif data[2] == "BASE_URL_PORT": + value = 80 + if Config.BASE_URL: + await ( + await create_subprocess_exec("pkill", "-9", "-f", "gunicorn") + ).wait() + await create_subprocess_shell( + "gunicorn web.wserver:app --bind 0.0.0.0:80 --worker-class gevent", + ) + elif data[2] == "GDRIVE_ID": + if drives_names and drives_names[0] == "Main": + drives_names.pop(0) + drives_ids.pop(0) + index_urls.pop(0) + elif data[2] == "INDEX_URL": + if drives_names and drives_names[0] == "Main": + index_urls[0] = "" + elif data[2] == "INCOMPLETE_TASK_NOTIFIER": + await database.trunc_table("tasks") + Config.set(data[2], value) + await update_buttons(message, "var") + if data[2] == "DATABASE_URL": + await database.disconnect() + await database.update_config({data[2]: value}) + if data[2] in ["SEARCH_PLUGINS", "SEARCH_API_LINK"]: await initiate_search_tools() elif data[2] in ["QUEUE_ALL", "QUEUE_DOWNLOAD", "QUEUE_UPLOAD"]: await start_from_queued() + elif data[2] in [ + "RCLONE_SERVE_URL", + "RCLONE_SERVE_PORT", + "RCLONE_SERVE_USER", + "RCLONE_SERVE_PASS", + ]: + await rclone_serve_booter() elif data[1] == "private": - handler_dict[message.chat.id] = False await query.answer() await update_buttons(message, data[1]) pfunc = partial(update_private_file, pre_message=message) rfunc = partial(update_buttons, message) await event_handler(client, query, pfunc, rfunc, True) - elif data[1] == "boolvar": - handler_dict[message.chat.id] = False - value = data[3] == "on" - await query.answer( - f"Successfully variable changed to {value}!", show_alert=True - ) - config_dict[data[2]] = value - await update_buttons(message, data[2], "editvar", False) - if DATABASE_URL: - await DbManager().update_config({data[2]: value}) - elif data[1] == "editvar": - handler_dict[message.chat.id] = False + elif data[1] == "botvar" and state == "edit": await query.answer() - edit_mode = len(data) == 4 - await update_buttons(message, data[2], data[1], edit_mode) - if data[2] in bool_vars or not edit_mode: - return + await update_buttons(message, data[2], data[1]) pfunc = partial(edit_variable, pre_message=message, key=data[2]) - rfunc = partial(update_buttons, message, data[2], data[1], edit_mode) + rfunc = partial(update_buttons, message, "var") await event_handler(client, query, pfunc, rfunc) - elif data[1] == "showvar": - value = config_dict[data[2]] - if len(str(value)) > 200: + elif data[1] == "botvar" and state == "view": + value = f"{Config.get(data[2])}" + if len(value) > 200: await query.answer() with BytesIO(str.encode(value)) as out_file: out_file.name = f"{data[2]}.txt" - await sendFile(message, out_file) + await send_file(message, out_file) return if value == "": value = None await query.answer(f"{value}", show_alert=True) elif data[1] == "edit": await query.answer() - globals()["STATE"] = "edit" + globals()["state"] = "edit" await update_buttons(message, data[2]) elif data[1] == "view": await query.answer() - globals()["STATE"] = "view" + globals()["state"] = "view" await update_buttons(message, data[2]) elif data[1] == "start": await query.answer() - if int(data[3]) != START: - globals()["START"] = int(data[3]) + if start != int(data[3]): + globals()["start"] = int(data[3]) await update_buttons(message, data[2]) -async def bot_settings(_, message): +@new_task +async def send_bot_settings(_, message): + handler_dict[message.chat.id] = False msg, button = await get_buttons() - globals()["START"] = 0 + globals()["start"] = 0 await send_message(message, msg, button) -bot.add_handler( - MessageHandler( - bot_settings, filters=command(BotCommands.BotSetCommand) & CustomFilters.sudo - ) -) -bot.add_handler( - CallbackQueryHandler( - edit_bot_settings, filters=regex("^botset") & CustomFilters.sudo - ) -) +async def load_config(): + Config.load() + drives_ids.clear() + drives_names.clear() + index_urls.clear() + await update_variables() + + if not await aiopath.exists("accounts"): + Config.USE_SERVICE_ACCOUNTS = False + + if len(task_dict) != 0 and (st := intervals["status"]): + for key, intvl in list(st.items()): + intvl.cancel() + intervals["status"][key] = SetInterval( + 1, + update_status_message, + key, + ) + + downloads = aria2.get_downloads() + if not Config.TORRENT_TIMEOUT: + for download in downloads: + if not download.is_complete: + try: + await sync_to_async( + aria2.client.change_option, + download.gid, + {"bt-stop-timeout": "0"}, + ) + except Exception as e: + LOGGER.error(e) + aria2_options["bt-stop-timeout"] = "0" + await database.update_aria2("bt-stop-timeout", "0") + else: + for download in downloads: + if not download.is_complete: + try: + await sync_to_async( + aria2.client.change_option, + download.gid, + {"bt-stop-timeout": Config.TORRENT_TIMEOUT}, + ) + except Exception as e: + LOGGER.error(e) + aria2_options["bt-stop-timeout"] = Config.TORRENT_TIMEOUT + await database.update_aria2("bt-stop-timeout", Config.TORRENT_TIMEOUT) + + if not Config.INCOMPLETE_TASK_NOTIFIER: + await database.trunc_table("tasks") + + await (await create_subprocess_exec("pkill", "-9", "-f", "gunicorn")).wait() + if Config.BASE_URL: + await create_subprocess_shell( + f"gunicorn web.wserver:app --bind 0.0.0.0:{Config.BASE_URL_PORT} --worker-class gevent", + ) + + if Config.DATABASE_URL: + await database.connect() + config_dict = Config.get_all() + await database.update_config(config_dict) + else: + await database.disconnect() + await gather(initiate_search_tools(), start_from_queued(), rclone_serve_booter()) + add_job() diff --git a/bot/modules/broadcast.py b/bot/modules/broadcast.py index 262edc76c..7281c3901 100644 --- a/bot/modules/broadcast.py +++ b/bot/modules/broadcast.py @@ -1,27 +1,20 @@ import asyncio from time import time -from pyrogram.errors import FloodWait, UserIsBlocked, InputUserDeactivated -from pyrogram.filters import command -from pyrogram.handlers import MessageHandler +from pyrogram.errors import FloodWait, InputUserDeactivated, UserIsBlocked -from bot import DATABASE_URL, bot -from bot.helper.ext_utils.bot_utils import new_task, get_readable_time -from bot.helper.ext_utils.db_handler import DbManager -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands +from bot.helper.ext_utils.bot_utils import new_task +from bot.helper.ext_utils.db_handler import database +from bot.helper.ext_utils.status_utils import get_readable_time from bot.helper.telegram_helper.message_utils import edit_message, send_message @new_task async def broadcast(_, message): - if not DATABASE_URL: - await send_message(message, "DATABASE_URL not provided!") - return - if not message.reply_to_message: await send_message( - message, "Reply to any message to broadcast messages to users in Bot PM." + message, + "Reply to any message to broadcast messages to users in Bot PM.", ) return @@ -30,7 +23,7 @@ async def broadcast(_, message): updater = time() broadcast_message = await send_message(message, "Broadcast in progress...") - for uid in await DbManager().get_pm_uids(): + for uid in await database.get_pm_uids(): try: await message.reply_to_message.copy(uid) successful += 1 @@ -39,7 +32,7 @@ async def broadcast(_, message): await message.reply_to_message.copy(uid) successful += 1 except (UserIsBlocked, InputUserDeactivated): - await DbManager().rm_pm_user(uid) + await database.rm_pm_user(uid) blocked += 1 except Exception: unsuccessful += 1 @@ -65,11 +58,3 @@ def generate_status(total, successful, blocked, unsuccessful, elapsed_time=""): if elapsed_time: status += f"\n\nElapsed Time: {elapsed_time}" return status - - -bot.add_handler( - MessageHandler( - broadcast, - filters=command(BotCommands.BroadcastCommand) & CustomFilters.owner, - ) -) diff --git a/bot/modules/cancel_mirror.py b/bot/modules/cancel_mirror.py deleted file mode 100644 index 97f6268e9..000000000 --- a/bot/modules/cancel_mirror.py +++ /dev/null @@ -1,121 +0,0 @@ -from asyncio import sleep - -from pyrogram.filters import regex, command -from pyrogram.handlers import MessageHandler, CallbackQueryHandler - -from bot import OWNER_ID, bot, bot_name, user_data, download_dict, download_dict_lock -from bot.helper.ext_utils.bot_utils import ( - MirrorStatus, - new_task, - get_all_task, - get_task_by_gid, -) -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.telegram_helper.message_utils import ( - send_message, - delete_message, - one_minute_del, -) - - -@new_task -async def cancel_mirror(_, message): - user_id = message.from_user.id - msg = message.text.split("_", maxsplit=1) - await delete_message(message) - - if len(msg) > 1: - cmd_data = msg[1].split("@", maxsplit=1) - if len(cmd_data) > 1 and cmd_data[1].strip() != bot_name: - return - gid = cmd_data[0] - dl = await get_task_by_gid(gid) - if dl is None: - await delete_message(message) - return - elif reply_to_id := message.reply_to_message_id: - async with download_dict_lock: - dl = download_dict.get(reply_to_id, None) - if dl is None: - await delete_message(message) - return - elif len(msg) == 1: - await delete_message(message) - return - - if user_id not in (OWNER_ID, dl.message.from_user.id) and ( - user_id not in user_data or not user_data[user_id].get("is_sudo") - ): - await delete_message(message) - return - - obj = dl.download() - await obj.cancel_download() - - -async def cancel_all(status): - matches = await get_all_task(status) - if not matches: - return False - for dl in matches: - obj = dl.download() - await obj.cancel_download() - await sleep(1) - return True - - -async def cancell_all_buttons(_, message): - async with download_dict_lock: - count = len(download_dict) - if count == 0: - await send_message(message, "No active tasks!") - return - - buttons = ButtonMaker() - buttons.callback("Downloading", f"stopall {MirrorStatus.STATUS_DOWNLOADING}") - buttons.callback("Uploading", f"stopall {MirrorStatus.STATUS_UPLOADING}") - buttons.callback("Seeding", f"stopall {MirrorStatus.STATUS_SEEDING}") - buttons.callback("Cloning", f"stopall {MirrorStatus.STATUS_CLONING}") - buttons.callback("Extracting", f"stopall {MirrorStatus.STATUS_EXTRACTING}") - buttons.callback("Archiving", f"stopall {MirrorStatus.STATUS_ARCHIVING}") - buttons.callback("QueuedDl", f"stopall {MirrorStatus.STATUS_QUEUEDL}") - buttons.callback("QueuedUp", f"stopall {MirrorStatus.STATUS_QUEUEUP}") - buttons.callback("Paused", f"stopall {MirrorStatus.STATUS_PAUSED}") - buttons.callback("All", "stopall all") - buttons.callback("Close", "stopall close") - button = buttons.column(2) - can_msg = await send_message(message, "Choose tasks to cancel.", button) - await delete_message(message) - await one_minute_del(can_msg) - - -@new_task -async def cancel_all_update(_, query): - data = query.data.split() - message = query.message - reply_to = message.reply_to_message - await query.answer() - if data[1] == "close": - await delete_message(reply_to) - await delete_message(message) - else: - res = await cancel_all(data[1]) - if not res: - await send_message(reply_to, f"No matching tasks for {data[1]}!") - - -bot.add_handler( - MessageHandler( - cancel_mirror, - filters=regex(r"^/stop(_\w+)?(?!all)") & CustomFilters.authorized, - ) -) -bot.add_handler( - MessageHandler( - cancell_all_buttons, - filters=command(BotCommands.StopAllCommand) & CustomFilters.sudo, - ) -) -bot.add_handler(CallbackQueryHandler(cancel_all_update, filters=regex(r"^stopall"))) diff --git a/bot/modules/cancel_task.py b/bot/modules/cancel_task.py new file mode 100644 index 000000000..de27327c2 --- /dev/null +++ b/bot/modules/cancel_task.py @@ -0,0 +1,192 @@ +from asyncio import sleep + +from bot import multi_tags, task_dict, task_dict_lock, user_data +from bot.core.aeon_client import Config +from bot.helper.ext_utils.bot_utils import new_task +from bot.helper.ext_utils.status_utils import ( + MirrorStatus, + get_all_tasks, + get_task_by_gid, +) +from bot.helper.telegram_helper import button_build +from bot.helper.telegram_helper.filters import CustomFilters +from bot.helper.telegram_helper.message_utils import ( + auto_delete_message, + delete_message, + edit_message, + send_message, +) + + +@new_task +async def cancel(_, message): + user_id = message.from_user.id if message.from_user else message.sender_chat.id + msg = message.text.split("_", maxsplit=1) + await delete_message(message) + if len(msg) > 1: + gid = msg[1].split("@", maxsplit=1) + gid = gid[0] + if len(gid) == 4: + multi_tags.discard(gid) + return + task = await get_task_by_gid(gid) + if task is None: + await delete_message(message) + return + elif reply_to_id := message.reply_to_message_id: + async with task_dict_lock: + task = task_dict.get(reply_to_id) + if task is None: + return + elif len(msg) == 1: + return + if user_id not in (Config.OWNER_ID, task.listener.user_id) and ( + user_id not in user_data or not user_data[user_id].get("is_sudo") + ): + return + obj = task.task() + await obj.cancel_task() + + +@new_task +async def cancel_multi(_, query): + data = query.data.split() + user_id = query.from_user.id + if user_id != int(data[1]) and not await CustomFilters.sudo("", query): + await query.answer("Not Yours!", show_alert=True) + return + tag = int(data[2]) + if tag in multi_tags: + multi_tags.discard(int(data[2])) + msg = "Stopped!" + else: + msg = "Already Stopped/Finished!" + await query.answer(msg, show_alert=True) + await delete_message(query.message) + + +async def cancel_all(status, user_id): + matches = await get_all_tasks(status.strip(), user_id) + if not matches: + return False + for task in matches: + obj = task.task() + await obj.cancel_task() + await sleep(2) + return True + + +def create_cancel_buttons(is_sudo, user_id=""): + buttons = button_build.ButtonMaker() + buttons.data_button( + "Downloading", + f"canall ms {MirrorStatus.STATUS_DOWNLOAD} {user_id}", + ) + buttons.data_button( + "Uploading", + f"canall ms {MirrorStatus.STATUS_UPLOAD} {user_id}", + ) + buttons.data_button("Seeding", f"canall ms {MirrorStatus.STATUS_SEED} {user_id}") + buttons.data_button( + "Spltting", + f"canall ms {MirrorStatus.STATUS_SPLIT} {user_id}", + ) + buttons.data_button( + "Cloning", + f"canall ms {MirrorStatus.STATUS_CLONE} {user_id}", + ) + buttons.data_button( + "Extracting", + f"canall ms {MirrorStatus.STATUS_EXTRACT} {user_id}", + ) + buttons.data_button( + "Archiving", + f"canall ms {MirrorStatus.STATUS_ARCHIVE} {user_id}", + ) + buttons.data_button( + "QueuedDl", + f"canall ms {MirrorStatus.STATUS_QUEUEDL} {user_id}", + ) + buttons.data_button( + "QueuedUp", + f"canall ms {MirrorStatus.STATUS_QUEUEUP} {user_id}", + ) + buttons.data_button( + "SampleVideo", + f"canall ms {MirrorStatus.STATUS_SAMVID} {user_id}", + ) + buttons.data_button( + "ConvertMedia", + f"canall ms {MirrorStatus.STATUS_CONVERT} {user_id}", + ) + buttons.data_button( + "FFmpeg", + f"canall ms {MirrorStatus.STATUS_FFMPEG} {user_id}", + ) + buttons.data_button( + "Paused", + f"canall ms {MirrorStatus.STATUS_PAUSED} {user_id}", + ) + buttons.data_button("All", f"canall ms All {user_id}") + if is_sudo: + if user_id: + buttons.data_button("All Added Tasks", f"canall bot ms {user_id}") + else: + buttons.data_button("My Tasks", f"canall user ms {user_id}") + buttons.data_button("Close", f"canall close ms {user_id}") + return buttons.build_menu(2) + + +@new_task +async def cancel_all_buttons(_, message): + async with task_dict_lock: + count = len(task_dict) + if count == 0: + await send_message(message, "No active tasks!") + return + is_sudo = await CustomFilters.sudo("", message) + button = create_cancel_buttons(is_sudo, message.from_user.id) + can_msg = await send_message(message, "Choose tasks to cancel!", button) + await auto_delete_message(message, can_msg) + + +@new_task +async def cancel_all_update(_, query): + data = query.data.split() + message = query.message + reply_to = message.reply_to_message + user_id = int(data[3]) if len(data) > 3 else "" + is_sudo = await CustomFilters.sudo("", query) + if not is_sudo and user_id and user_id != query.from_user.id: + await query.answer("Not Yours!", show_alert=True) + else: + await query.answer() + if data[1] == "close": + await delete_message(reply_to) + await delete_message(message) + elif data[1] == "back": + button = create_cancel_buttons(is_sudo, user_id) + await edit_message(message, "Choose tasks to cancel!", button) + elif data[1] == "bot": + button = create_cancel_buttons(is_sudo, "") + await edit_message(message, "Choose tasks to cancel!", button) + elif data[1] == "user": + button = create_cancel_buttons(is_sudo, query.from_user.id) + await edit_message(message, "Choose tasks to cancel!", button) + elif data[1] == "ms": + buttons = button_build.ButtonMaker() + buttons.data_button("Yes!", f"canall {data[2]} confirm {user_id}") + buttons.data_button("Back", f"canall back confirm {user_id}") + buttons.data_button("Close", f"canall close confirm {user_id}") + button = buttons.build_menu(2) + await edit_message( + message, + f"Are you sure you want to cancel all {data[2]} tasks", + button, + ) + else: + button = create_cancel_buttons(is_sudo, user_id) + await edit_message(message, "Choose tasks to cancel.", button) + res = await cancel_all(data[1], user_id) + if not res: + await send_message(reply_to, f"No matching tasks for {data[1]}!") diff --git a/bot/modules/chat_permission.py b/bot/modules/chat_permission.py new file mode 100644 index 000000000..fb08fc926 --- /dev/null +++ b/bot/modules/chat_permission.py @@ -0,0 +1,115 @@ +from bot import user_data +from bot.helper.ext_utils.bot_utils import new_task, update_user_ldata +from bot.helper.ext_utils.db_handler import database +from bot.helper.telegram_helper.message_utils import send_message + + +@new_task +async def authorize(_, message): + msg = message.text.split() + thread_id = None + if len(msg) > 1: + if "|" in msg: + chat_id, thread_id = list(map(int, msg[1].split("|"))) + else: + chat_id = int(msg[1].strip()) + elif reply_to := message.reply_to_message: + chat_id = ( + reply_to.from_user.id if reply_to.from_user else reply_to.sender_chat.id + ) + else: + if message.is_topic_message: + thread_id = message.message_thread_id + chat_id = message.chat.id + if chat_id in user_data and user_data[chat_id].get("is_auth"): + if ( + thread_id is not None + and thread_id in user_data[chat_id].get("thread_ids", []) + ) or thread_id is None: + msg = "Already Authorized!" + else: + if "thread_ids" in user_data[chat_id]: + user_data[chat_id]["thread_ids"].append(thread_id) + else: + user_data[chat_id]["thread_ids"] = [thread_id] + msg = "Authorized" + else: + update_user_ldata(chat_id, "is_auth", True) + if thread_id is not None: + update_user_ldata(chat_id, "thread_ids", [thread_id]) + await database.update_user_data(chat_id) + msg = "Authorized" + await send_message(message, msg) + + +@new_task +async def unauthorize(_, message): + msg = message.text.split() + thread_id = None + if len(msg) > 1: + if "|" in msg: + chat_id, thread_id = list(map(int, msg[1].split("|"))) + else: + chat_id = int(msg[1].strip()) + elif reply_to := message.reply_to_message: + chat_id = ( + reply_to.from_user.id if reply_to.from_user else reply_to.sender_chat.id + ) + else: + if message.is_topic_message: + thread_id = message.message_thread_id + chat_id = message.chat.id + if chat_id in user_data and user_data[chat_id].get("is_auth"): + if thread_id is not None and thread_id in user_data[chat_id].get( + "thread_ids", + [], + ): + user_data[chat_id]["thread_ids"].remove(thread_id) + else: + update_user_ldata(chat_id, "is_auth", False) + await database.update_user_data(chat_id) + msg = "Unauthorized" + else: + msg = "Already Unauthorized!" + await send_message(message, msg) + + +@new_task +async def add_sudo(_, message): + id_ = "" + msg = message.text.split() + if len(msg) > 1: + id_ = int(msg[1].strip()) + elif reply_to := message.reply_to_message: + id_ = ( + reply_to.from_user.id if reply_to.from_user else reply_to.sender_chat.id + ) + if id_: + if id_ in user_data and user_data[id_].get("is_sudo"): + msg = "Already Sudo!" + else: + update_user_ldata(id_, "is_sudo", True) + await database.update_user_data(id_) + msg = "Promoted as Sudo" + else: + msg = "Give ID or Reply To message of whom you want to Promote." + await send_message(message, msg) + + +@new_task +async def remove_sudo(_, message): + id_ = "" + msg = message.text.split() + if len(msg) > 1: + id_ = int(msg[1].strip()) + elif reply_to := message.reply_to_message: + id_ = ( + reply_to.from_user.id if reply_to.from_user else reply_to.sender_chat.id + ) + if (id_ and id_ not in user_data) or user_data[id_].get("is_sudo"): + update_user_ldata(id_, "is_sudo", False) + await database.update_user_data(id_) + msg = "Demoted" + else: + msg = "Give ID or Reply To message of whom you want to remove from Sudo" + await send_message(message, msg) diff --git a/bot/modules/clone.py b/bot/modules/clone.py index 17f19a783..1475931e0 100644 --- a/bot/modules/clone.py +++ b/bot/modules/clone.py @@ -1,365 +1,343 @@ +from asyncio import gather from json import loads -from asyncio import sleep, gather from secrets import token_hex -from aiofiles.os import path as aiopath -from pyrogram.filters import command -from pyrogram.handlers import MessageHandler +from aiofiles.os import remove -from bot import LOGGER, bot, config_dict, download_dict, download_dict_lock +from bot import LOGGER, bot_loop, task_dict, task_dict_lock +from bot.helper.aeon_utils.access_check import error_check from bot.helper.ext_utils.bot_utils import ( - cmd_exec, - new_task, + COMMAND_USAGE, arg_parser, - is_share_link, + cmd_exec, sync_to_async, - fetch_user_tds, +) +from bot.helper.ext_utils.exceptions import DirectDownloadLinkException +from bot.helper.ext_utils.links_utils import ( + is_gdrive_id, is_gdrive_link, is_rclone_path, - get_telegraph_list, + is_share_link, +) +from bot.helper.ext_utils.task_manager import stop_duplicate_check +from bot.helper.listeners.task_listener import TaskListener +from bot.helper.mirror_leech_utils.download_utils.direct_link_generator import ( + direct_link_generator, +) +from bot.helper.mirror_leech_utils.gdrive_utils.clone import GoogleDriveClone +from bot.helper.mirror_leech_utils.gdrive_utils.count import GoogleDriveCount +from bot.helper.mirror_leech_utils.rclone_utils.transfer import RcloneTransferHelper +from bot.helper.mirror_leech_utils.status_utils.gdrive_status import ( + GoogleDriveStatus, ) -from bot.helper.ext_utils.exceptions import DirectDownloadLinkError -from bot.helper.aeon_utils.nsfw_check import nsfw_precheck -from bot.helper.aeon_utils.send_react import send_react -from bot.helper.ext_utils.help_strings import CLONE_HELP_MESSAGE -from bot.helper.ext_utils.task_manager import task_utils, limit_checker -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.listeners.tasks_listener import MirrorLeechListener -from bot.helper.telegram_helper.bot_commands import BotCommands +from bot.helper.mirror_leech_utils.status_utils.rclone_status import RcloneStatus from bot.helper.telegram_helper.message_utils import ( delete_links, - edit_message, - send_message, delete_message, - one_minute_del, five_minute_del, - sendStatusMessage, -) -from bot.helper.mirror_leech_utils.rclone_utils.list import RcloneList -from bot.helper.mirror_leech_utils.rclone_utils.transfer import RcloneTransferHelper -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper -from bot.helper.mirror_leech_utils.status_utils.gdrive_status import GdriveStatus -from bot.helper.mirror_leech_utils.status_utils.rclone_status import RcloneStatus -from bot.helper.mirror_leech_utils.download_utils.direct_link_generator import ( - direct_link_generator, + send_message, + send_status_message, ) -async def rcloneNode(client, message, link, dst_path, rcf, tag): - if link == "rcl": - link = await RcloneList(client, message).get_rclone_path("rcd") - if not is_rclone_path(link): - await send_message(message, link) - return +class Clone(TaskListener): + def __init__( + self, + client, + message, + _=None, + __=None, + ___=None, + ____=None, + _____=None, + bulk=None, + multi_tag=None, + options="", + ): + if bulk is None: + bulk = [] + self.message = message + self.client = client + self.multi_tag = multi_tag + self.options = options + self.same_dir = {} + self.bulk = bulk + super().__init__() + self.is_clone = True - if link.startswith("mrcc:"): - link = link.split("mrcc:", 1)[1] - config_path = f"tanha/{message.from_user.id}.conf" - else: - config_path = "rcl.conf" + async def new_event(self): + text = self.message.text.split("\n") + input_list = text[0].split(" ") + error_msg, error_button = await error_check(self.message) + if error_msg: + await delete_links(self.message) + error = await send_message(self.message, error_msg, error_button) + return await five_minute_del(error) + args = { + "link": "", + "-i": 0, + "-b": False, + "-n": "", + "-up": "", + "-rcf": "", + "-sync": False, + } - if not await aiopath.exists(config_path): - await send_message(message, f"Rclone Config: {config_path} not Exists!") - return + arg_parser(input_list[1:], args) - if dst_path == "rcl" or config_dict["RCLONE_PATH"] == "rcl": - dst_path = await RcloneList(client, message).get_rclone_path( - "rcu", config_path - ) - if not is_rclone_path(dst_path): - await send_message(message, dst_path) - return + try: + self.multi = int(args["-i"]) + except Exception: + self.multi = 0 - dst_path = (dst_path or config_dict["RCLONE_PATH"]).strip("/") - if not is_rclone_path(dst_path): - await send_message(message, "Given Wrong RClone Destination!") - return - if dst_path.startswith("mrcc:"): - if config_path != f"tanha/{message.from_user.id}.conf": - await send_message( - message, "You should use same rcl.conf to clone between pathies!" - ) - return - dst_path = dst_path.lstrip("mrcc:") - elif config_path != "rcl.conf": - await send_message( - message, "You should use same rcl.conf to clone between pathies!" - ) - return + self.up_dest = args["-up"] + self.rc_flags = args["-rcf"] + self.link = args["link"] + self.name = args["-n"] - remote, src_path = link.split(":", 1) - src_path = src_path.strip("/") + is_bulk = args["-b"] + sync = args["-sync"] + bulk_start = 0 + bulk_end = 0 - cmd = [ - "xone", - "lsjson", - "--fast-list", - "--stat", - "--no-modtime", - "--config", - config_path, - f"{remote}:{src_path}", - ] - res = await cmd_exec(cmd) - if res[2] != 0: - if res[2] != -9: - msg = f"Error: While getting RClone Stats. Path: {remote}:{src_path}. Stderr: {res[1][:4000]}" - await send_message(message, msg) - return - rstat = loads(res[0]) - if rstat["IsDir"]: - name = src_path.rsplit("/", 1)[-1] if src_path else remote - dst_path += name if dst_path.endswith(":") else f"/{name}" - mime_type = "Folder" - else: - name = src_path.rsplit("/", 1)[-1] - mime_type = rstat["MimeType"] + if not isinstance(is_bulk, bool): + dargs = is_bulk.split(":") + bulk_start = dargs[0] or 0 + if len(dargs) == 2: + bulk_end = dargs[1] or 0 + is_bulk = True - listener = MirrorLeechListener(message, tag=tag) - await listener.on_download_start() + if is_bulk: + await self.init_bulk(input_list, bulk_start, bulk_end, Clone) + return None + + await self.get_tag(text) - RCTransfer = RcloneTransferHelper(listener, name) - LOGGER.info( - f"Clone Started: Name: {name} - Source: {link} - Destination: {dst_path}" - ) - gid = token_hex(4) - async with download_dict_lock: - download_dict[message.id] = RcloneStatus(RCTransfer, message, gid, "cl") - await sendStatusMessage(message) - link, destination = await RCTransfer.clone( - config_path, remote, src_path, dst_path, rcf, mime_type - ) - if not link: - return - LOGGER.info(f"Cloning Done: {name}") - cmd1 = [ - "xone", - "lsf", - "--fast-list", - "-R", - "--files-only", - "--config", - config_path, - destination, - ] - cmd2 = [ - "xone", - "lsf", - "--fast-list", - "-R", - "--dirs-only", - "--config", - config_path, - destination, - ] - cmd3 = [ - "xone", - "size", - "--fast-list", - "--json", - "--config", - config_path, - destination, - ] - res1, res2, res3 = await gather(cmd_exec(cmd1), cmd_exec(cmd2), cmd_exec(cmd3)) - if res1[2] != res2[2] != res3[2] != 0: - if res1[2] == -9: - return - files = None - folders = None - size = 0 - LOGGER.error( - f"Error: While getting RClone Stats. Path: {destination}. Stderr: {res1[1][:4000]}" - ) - else: - files = len(res1[0].split("\n")) - folders = len(res2[0].split("\n")) - rsize = loads(res3[0]) - size = rsize["bytes"] - await listener.onUploadComplete( - link, size, files, folders, mime_type, name, destination - ) + if not self.link and (reply_to := self.message.reply_to_message): + self.link = reply_to.text.split("\n", 1)[0].strip() + await self.run_multi(input_list, Clone) -async def gdcloneNode(message, link, listen_up): - if not is_gdrive_link(link) and is_share_link(link): - process_msg = await send_message( - message, f"Processing Link: {link}" - ) + if len(self.link) == 0: + await send_message( + self.message, + COMMAND_USAGE["clone"][0], + COMMAND_USAGE["clone"][1], + ) + return None + LOGGER.info(self.link) try: - link = await sync_to_async(direct_link_generator, link) - LOGGER.info(f"Generated link: {link}") - await edit_message( - process_msg, f"Generated Link: {link}" + await self.before_start() + except Exception as e: + await send_message(self.message, e) + return None + await self._proceed_to_clone(sync) + return None + + async def _proceed_to_clone(self, sync): + if is_share_link(self.link): + try: + self.link = await sync_to_async(direct_link_generator, self.link) + LOGGER.info(f"Generated link: {self.link}") + except DirectDownloadLinkException as e: + LOGGER.error(str(e)) + if str(e).startswith("ERROR:"): + await send_message(self.message, str(e)) + return + if is_gdrive_link(self.link) or is_gdrive_id(self.link): + self.name, mime_type, self.size, files, _ = await sync_to_async( + GoogleDriveCount().count, + self.link, + self.user_id, ) - except DirectDownloadLinkError as e: - LOGGER.error(str(e)) - if str(e).startswith("ERROR:"): - await edit_message(process_msg, str(e)) - await delete_links(message) - await one_minute_del(process_msg) + if mime_type is None: + await send_message(self.message, self.name) return - await delete_message(process_msg) - if is_gdrive_link(link): - gd = GoogleDriveHelper() - name, mime_type, size, files, _ = await sync_to_async(gd.count, link) - if mime_type is None: - await send_message(message, name) - return - if config_dict["STOP_DUPLICATE"]: - LOGGER.info("Checking File/Folder if already in Drive...") - telegraph_content, contents_no = await sync_to_async( - gd.drive_list, name, True, True - ) - if telegraph_content: - msg = f"File/Folder is already available in Drive.\nHere are {contents_no} list results:" - button = await get_telegraph_list(telegraph_content) - await send_message(message, msg, button) + msg, button = await stop_duplicate_check(self) + if msg: + await send_message(self.message, msg, button) return - listener = MirrorLeechListener( - message, - tag=listen_up[0], - is_clone=True, - drive_id=listen_up[1], - index_link=listen_up[2], - ) - if limit_exceeded := await limit_checker(size, listener): - await listener.onUploadError(limit_exceeded) - return - await listener.on_download_start() - LOGGER.info(f"Clone Started: Name: {name} - Source: {link}") - drive = GoogleDriveHelper(name, listener=listener) - if files <= 20: - msg = await send_message(message, f"Cloning: {link}") - link, size, mime_type, files, folders = await sync_to_async( - drive.clone, link, listener.drive_id - ) - await delete_message(msg) - else: - gid = token_hex(4) - async with download_dict_lock: - download_dict[message.id] = GdriveStatus( - drive, size, message, gid, "cl" + await self.on_download_start() + LOGGER.info(f"Clone Started: Name: {self.name} - Source: {self.link}") + drive = GoogleDriveClone(self) + if files <= 10: + msg = await send_message( + self.message, + f"Cloning: {self.link}", ) - await sendStatusMessage(message) - link, size, mime_type, files, folders = await sync_to_async( - drive.clone, link, listener.drive_id + else: + msg = "" + gid = token_hex(4) + async with task_dict_lock: + task_dict[self.mid] = GoogleDriveStatus(self, drive, gid, "cl") + if self.multi <= 1: + await send_status_message(self.message) + flink, mime_type, files, folders, dir_id = await sync_to_async( + drive.clone, + ) + if msg: + await delete_message(msg) + if not flink: + return + await self.on_upload_complete( + flink, + files, + folders, + mime_type, + dir_id=dir_id, ) - if not link: - return - LOGGER.info(f"Cloning Done: {name}") - await listener.onUploadComplete(link, size, files, folders, mime_type, name) - else: - reply_message = await send_message(message, CLONE_HELP_MESSAGE) - await delete_message(message) - await one_minute_del(reply_message) + LOGGER.info(f"Cloning Done: {self.name}") + elif is_rclone_path(self.link): + if self.link.startswith("mrcc:"): + self.link = self.link.replace("mrcc:", "", 1) + self.up_dest = self.up_dest.replace("mrcc:", "", 1) + config_path = f"rclone/{self.user_id}.conf" + else: + config_path = "rclone.conf" + remote, src_path = self.link.split(":", 1) + self.link = src_path.strip("/") + if self.link.startswith("rclone_select"): + mime_type = "Folder" + src_path = "" + if not self.name: + self.name = self.link + else: + src_path = self.link + cmd = [ + "xone", + "lsjson", + "--fast-list", + "--stat", + "--no-modtime", + "--config", + config_path, + f"{remote}:{src_path}", + "-v", + "--log-systemd", + "--log-file", + "rlog.txt", + ] + res = await cmd_exec(cmd) + if res[2] != 0: + if res[2] != -9: + msg = f"Error: While getting rclone stat. Path: {remote}:{src_path}. Stderr: {res[1][:4000]}" + await send_message(self.message, msg) + return + rstat = loads(res[0]) + if rstat["IsDir"]: + if not self.name: + self.name = ( + src_path.rsplit("/", 1)[-1] if src_path else remote + ) + self.up_dest += ( + self.name if self.up_dest.endswith(":") else f"/{self.name}" + ) -@new_task -async def clone(client, message): - await send_react(message) - input_list = message.text.split(" ") - arg_base = { - "link": "", - "-i": "0", - "-up": "", - "-rcf": "", - "-id": "", - "-index": "", - } - args = arg_parser(input_list[1:], arg_base) - i = args["-i"] - dst_path = args["-up"] - rcf = args["-rcf"] - link = args["link"] - drive_id = args["-id"] - index_link = args["-index"] - multi = int(i) if i.isdigit() else 0 + mime_type = "Folder" + else: + if not self.name: + self.name = src_path.rsplit("/", 1)[-1] + mime_type = rstat["MimeType"] - if username := message.from_user.username: - tag = f"@{username}" - else: - tag = message.from_user.mention - if not link and (reply_to := message.reply_to_message): - link = reply_to.text.split("\n", 1)[0].strip() + await self.on_download_start() - @new_task - async def __run_multi(): - if multi > 1: - await sleep(5) - msg = [s.strip() for s in input_list] - index = msg.index("-i") - msg[index + 1] = f"{multi - 1}" - nextmsg = await client.get_messages( - chat_id=message.chat.id, message_ids=message.reply_to_message_id + 1 + RCTransfer = RcloneTransferHelper(self) + LOGGER.info( + f"Clone Started: Name: {self.name} - Source: {self.link} - Destination: {self.up_dest}", ) - nextmsg = await send_message(nextmsg, " ".join(msg)) - nextmsg = await client.get_messages( - chat_id=message.chat.id, message_ids=nextmsg.id + gid = token_hex(4) + async with task_dict_lock: + task_dict[self.mid] = RcloneStatus(self, RCTransfer, gid, "cl") + if self.multi <= 1: + await send_status_message(self.message) + method = "sync" if sync else "copy" + flink, destination = await RCTransfer.clone( + config_path, + remote, + src_path, + mime_type, + method, + ) + if self.link.startswith("rclone_select"): + await remove(self.link) + if not destination: + return + LOGGER.info(f"Cloning Done: {self.name}") + cmd1 = [ + "xone", + "lsf", + "--fast-list", + "-R", + "--files-only", + "--config", + config_path, + destination, + "-v", + "--log-systemd", + "--log-file", + "rlog.txt", + ] + cmd2 = [ + "xone", + "lsf", + "--fast-list", + "-R", + "--dirs-only", + "--config", + config_path, + destination, + "-v", + "--log-systemd", + "--log-file", + "rlog.txt", + ] + cmd3 = [ + "xone", + "size", + "--fast-list", + "--json", + "--config", + config_path, + destination, + "-v", + "--log-systemd", + "--log-file", + "rlog.txt", + ] + res1, res2, res3 = await gather( + cmd_exec(cmd1), + cmd_exec(cmd2), + cmd_exec(cmd3), + ) + if res1[2] != res2[2] != res3[2] != 0: + if res1[2] == -9: + return + files = None + folders = None + self.size = 0 + LOGGER.error( + f"Error: While getting rclone stat. Path: {destination}. Stderr: {res1[1][:4000]}", + ) + else: + files = len(res1[0].split("\n")) + folders = len(res2[0].strip().split("\n")) if res2[0] else 0 + rsize = loads(res3[0]) + self.size = rsize["bytes"] + await self.on_upload_complete( + flink, + files, + folders, + mime_type, + destination, + ) + else: + await send_message( + self.message, + COMMAND_USAGE["clone"][0], + COMMAND_USAGE["clone"][1], ) - nextmsg.from_user = message.from_user - await sleep(5) - clone(client, nextmsg) - - __run_multi() - - if drive_id and is_gdrive_link(drive_id): - drive_id = GoogleDriveHelper.getIdFromUrl(drive_id) - - if len(link) == 0: - reply_message = await send_message(message, CLONE_HELP_MESSAGE) - await delete_message(message) - await one_minute_del(reply_message) - return None - - error_msg = [] - error_button = None - if await nsfw_precheck(message): - error_msg.extend(["NSFW detected"]) - task_utilis_msg, error_button = await task_utils(message) - if task_utilis_msg: - error_msg.extend(task_utilis_msg) - if error_msg: - final_msg = f"Hey, {tag}!\n" - for __i, __msg in enumerate(error_msg, 1): - final_msg += f"\n
{__i}: {__msg}
" - if error_button is not None: - error_button = error_button.column(2) - await delete_links(message) - force_m = await send_message(message, final_msg, error_button) - await five_minute_del(force_m) - return None - - if is_rclone_path(link): - if not await aiopath.exists("rcl.conf") and not await aiopath.exists( - f"tanha/{message.from_user.id}.conf" - ): - await send_message(message, "Rclone Config Not exists!") - return None - if not config_dict["RCLONE_PATH"] and not dst_path: - await send_message(message, "Destination not specified!") - await delete_links(message) - return None - await rcloneNode(client, message, link, dst_path, rcf, tag) - else: - user_tds = await fetch_user_tds(message.from_user.id) - if not drive_id and len(user_tds) == 1: - drive_id, index_link = next(iter(user_tds.values())).values() - if drive_id and not await sync_to_async( - GoogleDriveHelper().getFolderData, drive_id - ): - return await send_message(message, "Google Drive ID validation failed!!") - if not config_dict["GDRIVE_ID"] and not drive_id: - await send_message(message, "GDRIVE_ID not Provided!") - await delete_links(message) - return None - await gdcloneNode(message, link, [tag, drive_id, index_link]) - await delete_links(message) - return None -bot.add_handler( - MessageHandler( - clone, filters=command(BotCommands.CloneCommand) & CustomFilters.authorized - ) -) +async def clone_node(client, message): + bot_loop.create_task(Clone(client, message).new_event()) diff --git a/bot/modules/count.py b/bot/modules/count.py deleted file mode 100644 index b04468ed3..000000000 --- a/bot/modules/count.py +++ /dev/null @@ -1,63 +0,0 @@ -from pyrogram.filters import command -from pyrogram.handlers import MessageHandler - -from bot import bot -from bot.helper.ext_utils.bot_utils import ( - new_task, - sync_to_async, - is_gdrive_link, - get_readable_file_size, -) -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.message_utils import ( - delete_links, - send_message, - delete_message, -) -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper - - -@new_task -async def countNode(_, message): - async def format_node_count(name, mime_type, size, files, folders, tag): - msg = f"{name}\n\n" - msg += f"• Size: {get_readable_file_size(size)}\n" - if mime_type == "Folder": - msg += f"• SubFolders: {folders}\n" - msg += f"• Files: {files}\n" - msg += f"• Counted by: {tag}\n" - msg += f"• User ID: {message.from_user.id}\n" - return msg - - args = message.text.split() - if username := message.from_user.username: - tag = f"@{username}" - else: - tag = message.from_user.mention - - link = args[1] if len(args) > 1 else "" - if len(link) == 0 and (reply_to := message.reply_to_message): - link = reply_to.text.split(maxsplit=1)[0].strip() - - if is_gdrive_link(link): - msg = await send_message(message, f"Counting: {link}") - gd = GoogleDriveHelper() - name, mime_type, size, files, folders = await sync_to_async(gd.count, link) - if mime_type is None: - await send_message(message, name) - await delete_message(msg) - return - msg = await format_node_count(name, mime_type, size, files, folders, tag) - else: - msg = "Send a Google Drive link along with the command or reply to a link with the command." - await send_message(message, msg) - await delete_links(message) - - -bot.add_handler( - MessageHandler( - countNode, - filters=command(BotCommands.CountCommand) & CustomFilters.authorized, - ) -) diff --git a/bot/modules/delete.py b/bot/modules/delete.py deleted file mode 100644 index f70909c00..000000000 --- a/bot/modules/delete.py +++ /dev/null @@ -1,51 +0,0 @@ -from pyrogram.filters import command -from pyrogram.handlers import MessageHandler - -from bot import LOGGER, bot -from bot.helper.ext_utils.bot_utils import new_task, sync_to_async, is_gdrive_link -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.message_utils import ( - send_message, - delete_message, - one_minute_del, -) -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper - - -async def delete_file(link): - try: - LOGGER.info(link) - drive = GoogleDriveHelper() - return await sync_to_async(drive.deletefile, link) - except Exception as e: - LOGGER.error(f"Error deleting Google Drive file: {e!s}") - return f"An error occurred: {e!s}" - - -@new_task -async def deletefile(_, message): - args = message.text.split() - if len(args) > 1: - link = args[1] - elif reply_to := message.reply_to_message: - link = reply_to.text.split(maxsplit=1)[0].strip() - else: - link = "" - - if is_gdrive_link(link): - msg = await delete_file(link) - else: - msg = "Send a Google Drive link along with the command or reply to the link with the command." - - reply_message = await send_message(message, msg) - await delete_message(message) - await one_minute_del(reply_message) - - -bot.add_handler( - MessageHandler( - deletefile, - filters=command(BotCommands.DeleteCommand) & CustomFilters.authorized, - ) -) diff --git a/bot/modules/exec.py b/bot/modules/exec.py new file mode 100644 index 000000000..bcd370d8b --- /dev/null +++ b/bot/modules/exec.py @@ -0,0 +1,114 @@ +from contextlib import redirect_stdout, suppress +from io import BytesIO, StringIO +from os import chdir, getcwd +from os import path as ospath +from textwrap import indent +from traceback import format_exc + +from aiofiles import open as aiopen + +from bot import LOGGER +from bot.core.aeon_client import TgClient +from bot.helper.ext_utils.bot_utils import new_task, sync_to_async +from bot.helper.telegram_helper.message_utils import send_file, send_message + +namespaces = {} + + +def namespace_of(message): + if message.chat.id not in namespaces: + namespaces[message.chat.id] = { + "__builtins__": globals()["__builtins__"], + "bot": TgClient.bot, + "message": message, + "user": message.from_user or message.sender_chat, + "chat": message.chat, + } + + return namespaces[message.chat.id] + + +def log_input(message): + LOGGER.info( + f"IN: {message.text} (user={message.from_user.id if message.from_user else message.sender_chat.id}, chat={message.chat.id})", + ) + + +async def send(msg, message): + if len(str(msg)) > 2000: + with BytesIO(str.encode(msg)) as out_file: + out_file.name = "output.txt" + await send_file(message, out_file) + else: + LOGGER.info(f"OUT: '{msg}'") + await send_message(message, f"{msg}") + + +@new_task +async def aioexecute(_, message): + await send(await do("aexec", message), message) + + +@new_task +async def execute(_, message): + await send(await do("exec", message), message) + + +def cleanup_code(code): + if code.startswith("```") and code.endswith("```"): + return "\n".join(code.split("\n")[1:-1]) + return code.strip("` \n") + + +async def do(func, message): + log_input(message) + content = message.text.split(maxsplit=1)[-1] + body = cleanup_code(content) + env = namespace_of(message) + + chdir(getcwd()) + async with aiopen(ospath.join(getcwd(), "bot/modules/temp.txt"), "w") as temp: + await temp.write(body) + + stdout = StringIO() + + try: + if func == "exec": + exec(f"def func():\n{indent(body, ' ')}", env) + else: + exec(f"async def func():\n{indent(body, ' ')}", env) + except Exception as e: + return f"{e.__class__.__name__}: {e}" + + rfunc = env["func"] + + try: + with redirect_stdout(stdout): + func_return = ( + await sync_to_async(rfunc) if func == "exec" else await rfunc() + ) + except Exception: + value = stdout.getvalue() + return f"{value}{format_exc()}" + else: + value = stdout.getvalue() + result = None + if func_return is None: + if value: + result = f"{value}" + else: + with suppress(Exception): + result = f"{await sync_to_async(eval, body, env)!r}" + else: + result = f"{value}{func_return}" + if result: + return result + + +@new_task +async def clear(_, message): + log_input(message) + global namespaces + if message.chat.id in namespaces: + del namespaces[message.chat.id] + await send("Locals Cleared.", message) diff --git a/bot/modules/executor.py b/bot/modules/executor.py deleted file mode 100644 index ffa794b94..000000000 --- a/bot/modules/executor.py +++ /dev/null @@ -1,126 +0,0 @@ -from io import BytesIO, StringIO -from os import path, chdir, getcwd -from re import match -from textwrap import indent -from traceback import format_exc -from contextlib import suppress, redirect_stdout - -from aiofiles import open as aiopen -from pyrogram.filters import command -from pyrogram.handlers import MessageHandler, EditedMessageHandler - -from bot import LOGGER, bot, user -from bot.helper.ext_utils.bot_utils import new_task -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.message_utils import sendFile, send_message - - -def create_execution_environment(message): - return { - "__builtins__": globals()["__builtins__"], - "bot": bot, - "message": message, - "user": user, - } - - -def log_input(message): - LOGGER.info( - f"INPUT: {message.text} (User ID={message.from_user.id} | Chat ID={message.chat.id})" - ) - - -async def send_response(msg, message): - if len(str(msg)) > 2000: - with BytesIO(str.encode(msg)) as out_file: - out_file.name = "output.txt" - await sendFile(message, out_file) - else: - LOGGER.info(f"OUTPUT: '{msg}'") - if not msg or msg == "\n": - msg = "MessageEmpty" - elif not bool(match(r"<(spoiler|b|i|code|s|u)>", msg)): - msg = f"
{msg}
" - await send_message(message, msg) - - -@new_task -async def evaluate(_, message): - content = message.text.split(maxsplit=1) - if len(content) == 1: - await send_response("No command to execute.", message) - else: - await send_response(await execute_code(eval, message), message) - - -@new_task -async def execute(_, message): - content = message.text.split(maxsplit=1) - if len(content) == 1: - await send_response("No command to execute.", message) - else: - await send_response(await execute_code(exec, message), message) - - -def cleanup_code(code): - if code.startswith("```") and code.endswith("```"): - return "\n".join(code.split("\n")[1:-1]) - return code.strip("` \n") - - -async def execute_code(func, message): - log_input(message) - content = message.text.split(maxsplit=1)[-1] - code = cleanup_code(content) - env = create_execution_environment(message) - - chdir(getcwd()) - async with aiopen(path.join(getcwd(), "bot/modules/temp.txt"), "w") as temp_file: - await temp_file.write(code) - - stdout = StringIO() - to_compile = f'async def func():\n{indent(code, " ")}' - - try: - exec(to_compile, env) - except Exception as e: - return f"{e.__class__.__name__}: {e}" - - func = env["func"] - - try: - with redirect_stdout(stdout): - func_return = await func() - except Exception: - return f"{stdout.getvalue()}{format_exc()}" - else: - result = stdout.getvalue() - if func_return is not None: - result += str(func_return) - elif not result: - with suppress(Exception): - result = repr(eval(code, env)) - return result - - -bot.add_handler( - MessageHandler( - evaluate, filters=command(BotCommands.EvalCommand) & CustomFilters.sudo - ) -) -bot.add_handler( - MessageHandler( - execute, filters=command(BotCommands.ExecCommand) & CustomFilters.sudo - ) -) -bot.add_handler( - EditedMessageHandler( - evaluate, filters=command(BotCommands.EvalCommand) & CustomFilters.sudo - ) -) -bot.add_handler( - EditedMessageHandler( - execute, filters=command(BotCommands.ExecCommand) & CustomFilters.sudo - ) -) diff --git a/bot/modules/file_selector.py b/bot/modules/file_selector.py new file mode 100644 index 000000000..920bbaf1d --- /dev/null +++ b/bot/modules/file_selector.py @@ -0,0 +1,162 @@ +import contextlib + +from aiofiles.os import path as aiopath +from aiofiles.os import remove + +from bot import ( + LOGGER, + aria2, + task_dict, + task_dict_lock, + user_data, + xnox_client, +) +from bot.core.config_manager import Config +from bot.helper.ext_utils.bot_utils import ( + bt_selection_buttons, + new_task, + sync_to_async, +) +from bot.helper.ext_utils.status_utils import MirrorStatus, get_task_by_gid +from bot.helper.telegram_helper.message_utils import ( + delete_message, + send_message, + send_status_message, +) + + +@new_task +async def select(_, message): + if not Config.BASE_URL: + await send_message(message, "Base URL not defined!") + return + user_id = message.from_user.id + msg = message.text.split() + if len(msg) > 1: + gid = msg[1] + task = await get_task_by_gid(gid) + if task is None: + await send_message(message, f"GID: {gid} Not Found.") + return + elif reply_to_id := message.reply_to_message_id: + async with task_dict_lock: + task = task_dict.get(reply_to_id) + if task is None: + await send_message(message, "This is not an active task!") + return + elif len(msg) == 1: + msg = ( + "Reply to an active /cmd which was used to start the download or add gid along with cmd\n\n" + + "This command mainly for selection incase you decided to select files from already added torrent. " + + "But you can always use /cmd with arg `s` to select files before download start." + ) + await send_message(message, msg) + return + + if user_id not in (Config.OWNER_ID, task.listener.user_id) and ( + user_id not in user_data or not user_data[user_id].get("is_sudo") + ): + await send_message(message, "This task is not for you!") + return + if await sync_to_async(task.status) not in [ + MirrorStatus.STATUS_DOWNLOAD, + MirrorStatus.STATUS_PAUSED, + MirrorStatus.STATUS_QUEUEDL, + ]: + await send_message( + message, + "Task should be in download or pause (incase message deleted by wrong) or queued status (incase you have used torrent file)!", + ) + return + if task.name().startswith("[METADATA]") or task.name().startswith("Trying"): + await send_message(message, "Try after downloading metadata finished!") + return + + try: + id_ = task.gid() + if task.listener.is_qbit: + if not task.queued: + await sync_to_async(task.update) + id_ = task.hash() + await sync_to_async( + xnox_client.torrents_stop, + torrent_hashes=id_, + ) + elif not task.queued: + await sync_to_async(task.update) + try: + await sync_to_async(aria2.client.force_pause, id_) + except Exception as e: + LOGGER.error( + f"{e} Error in pause, this mostly happens after abuse aria2", + ) + task.listener.select = True + except Exception: + await send_message(message, "This is not a bittorrent task!") + return + + SBUTTONS = bt_selection_buttons(id_) + msg = "Your download paused. Choose files then press Done Selecting button to resume downloading." + await send_message(message, msg, SBUTTONS) + + +@new_task +async def confirm_selection(_, query): + user_id = query.from_user.id + data = query.data.split() + message = query.message + task = await get_task_by_gid(data[2]) + if task is None: + await query.answer("This task has been cancelled!", show_alert=True) + await delete_message(message) + return + if user_id != task.listener.user_id: + await query.answer("This task is not for you!", show_alert=True) + elif data[1] == "pin": + await query.answer(data[3], show_alert=True) + elif data[1] == "done": + await query.answer() + id_ = data[3] + if hasattr(task, "seeding"): + if task.listener.is_qbit: + tor_info = ( + await sync_to_async( + xnox_client.torrents_info, + torrent_hash=id_, + ) + )[0] + path = tor_info.content_path.rsplit("/", 1)[0] + res = await sync_to_async( + xnox_client.torrents_files, + torrent_hash=id_, + ) + for f in res: + if f.priority == 0: + f_paths = [f"{path}/{f.name}", f"{path}/{f.name}.!qB"] + for f_path in f_paths: + if await aiopath.exists(f_path): + with contextlib.suppress(Exception): + await remove(f_path) + if not task.queued: + await sync_to_async( + xnox_client.torrents_start, + torrent_hashes=id_, + ) + else: + res = await sync_to_async(aria2.client.get_files, id_) + for f in res: + if f["selected"] == "false" and await aiopath.exists(f["path"]): + with contextlib.suppress(Exception): + await remove(f["path"]) + if not task.queued: + try: + await sync_to_async(aria2.client.unpause, id_) + except Exception as e: + LOGGER.error( + f"{e} Error in resume, this mostly happens after abuse aria2. Try to use select cmd again!", + ) + await send_status_message(message) + await delete_message(message) + else: + await delete_message(message) + await task.cancel_task() diff --git a/bot/modules/force_start.py b/bot/modules/force_start.py new file mode 100644 index 000000000..52764684e --- /dev/null +++ b/bot/modules/force_start.py @@ -0,0 +1,84 @@ +from bot import ( + queue_dict_lock, + queued_dl, + queued_up, + task_dict, + task_dict_lock, + user_data, +) +from bot.core.config_manager import Config +from bot.helper.ext_utils.bot_utils import new_task +from bot.helper.ext_utils.status_utils import get_task_by_gid +from bot.helper.ext_utils.task_manager import ( + start_dl_from_queued, + start_up_from_queued, +) +from bot.helper.telegram_helper.bot_commands import BotCommands +from bot.helper.telegram_helper.message_utils import send_message + + +@new_task +async def remove_from_queue(_, message): + user_id = message.from_user.id if message.from_user else message.sender_chat.id + msg = message.text.split() + status = msg[1] if len(msg) > 1 and msg[1] in ["fd", "fu"] else "" + if (status and len(msg) > 2) or (not status and len(msg) > 1): + gid = msg[2] if status else msg[1] + task = await get_task_by_gid(gid) + if task is None: + await send_message(message, f"GID: {gid} Not Found.") + return + elif reply_to_id := message.reply_to_message_id: + async with task_dict_lock: + task = task_dict.get(reply_to_id) + if task is None: + await send_message(message, "This is not an active task!") + return + elif len(msg) in {1, 2}: + msg = f"""Reply to an active Command message which was used to start the download/upload. +/{BotCommands.ForceStartCommand[0]} fd (to remove it from download queue) or fu (to remove it from upload queue) or nothing to start remove it from both download and upload queue. +Also send /{BotCommands.ForceStartCommand[0]} GID fu|fd or obly gid to force start by removeing the task rom queue! +Examples: +/{BotCommands.ForceStartCommand[1]} GID fu (force upload) +/{BotCommands.ForceStartCommand[1]} GID (force download and upload) +By reply to task cmd: +/{BotCommands.ForceStartCommand[1]} (force download and upload) +/{BotCommands.ForceStartCommand[1]} fd (force download) +""" + await send_message(message, msg) + return + if user_id not in (Config.OWNER_ID, task.listener.user_id) and ( + user_id not in user_data or not user_data[user_id].get("is_sudo") + ): + await send_message(message, "This task is not for you!") + return + listener = task.listener + msg = "" + async with queue_dict_lock: + if status == "fu": + listener.force_upload = True + if listener.mid in queued_up: + await start_up_from_queued(listener.mid) + msg = "Task have been force started to upload!" + else: + msg = "Force upload enabled for this task!" + elif status == "fd": + listener.force_download = True + if listener.mid in queued_dl: + await start_dl_from_queued(listener.mid) + msg = "Task have been force started to download only!" + else: + msg = "This task not in download queue!" + else: + listener.force_download = True + listener.force_upload = True + if listener.mid in queued_up: + await start_up_from_queued(listener.mid) + msg = "Task have been force started to upload!" + elif listener.mid in queued_dl: + await start_dl_from_queued(listener.mid) + msg = "Task have been force started to download and upload will start once download finish!" + else: + msg = "This task not in queue!" + if msg: + await send_message(message, msg) diff --git a/bot/modules/gd_count.py b/bot/modules/gd_count.py new file mode 100644 index 000000000..2ee42ee95 --- /dev/null +++ b/bot/modules/gd_count.py @@ -0,0 +1,42 @@ +from bot.helper.ext_utils.bot_utils import new_task, sync_to_async +from bot.helper.ext_utils.links_utils import is_gdrive_link +from bot.helper.ext_utils.status_utils import get_readable_file_size +from bot.helper.mirror_leech_utils.gdrive_utils.count import GoogleDriveCount +from bot.helper.telegram_helper.message_utils import delete_message, send_message + + +@new_task +async def count_node(_, message): + args = message.text.split() + user = message.from_user or message.sender_chat + if username := user.username: + tag = f"@{username}" + else: + tag = message.from_user.mention + + link = args[1] if len(args) > 1 else "" + if len(link) == 0 and (reply_to := message.reply_to_message): + link = reply_to.text.split(maxsplit=1)[0].strip() + + if is_gdrive_link(link): + msg = await send_message(message, f"Counting: {link}") + name, mime_type, size, files, folders = await sync_to_async( + GoogleDriveCount().count, + link, + user.id, + ) + if mime_type is None: + await send_message(message, name) + return + await delete_message(msg) + msg = f"Name: {name}" + msg += f"\n\nSize: {get_readable_file_size(size)}" + msg += f"\n\nType: {mime_type}" + if mime_type == "Folder": + msg += f"\nSubFolders: {folders}" + msg += f"\nFiles: {files}" + msg += f"\n\ncc: {tag}" + else: + msg = "Send Gdrive link along with command or by replying to the link by command" + + await send_message(message, msg) diff --git a/bot/modules/gd_delete.py b/bot/modules/gd_delete.py new file mode 100644 index 000000000..9b816f33e --- /dev/null +++ b/bot/modules/gd_delete.py @@ -0,0 +1,27 @@ +from bot import LOGGER +from bot.helper.ext_utils.bot_utils import new_task, sync_to_async +from bot.helper.ext_utils.links_utils import is_gdrive_link +from bot.helper.mirror_leech_utils.gdrive_utils.delete import GoogleDriveDelete +from bot.helper.telegram_helper.message_utils import ( + auto_delete_message, + send_message, +) + + +@new_task +async def delete_file(_, message): + args = message.text.split() + user = message.from_user or message.sender_chat + if len(args) > 1: + link = args[1] + elif reply_to := message.reply_to_message: + link = reply_to.text.split(maxsplit=1)[0].strip() + else: + link = "" + if is_gdrive_link(link): + LOGGER.info(link) + msg = await sync_to_async(GoogleDriveDelete().deletefile, link, user.id) + else: + msg = "Send Gdrive link along with command or by replying to the link by command" + reply_message = await send_message(message, msg) + await auto_delete_message(message, reply_message) diff --git a/bot/modules/gd_search.py b/bot/modules/gd_search.py new file mode 100644 index 000000000..1c85463da --- /dev/null +++ b/bot/modules/gd_search.py @@ -0,0 +1,101 @@ +from bot import LOGGER, user_data +from bot.helper.ext_utils.bot_utils import ( + get_telegraph_list, + new_task, + sync_to_async, +) +from bot.helper.mirror_leech_utils.gdrive_utils.search import GoogleDriveSearch +from bot.helper.telegram_helper.button_build import ButtonMaker +from bot.helper.telegram_helper.message_utils import edit_message, send_message + + +async def list_buttons(user_id, is_recursive=True, user_token=False): + buttons = ButtonMaker() + buttons.data_button( + "Folders", + f"list_types {user_id} folders {is_recursive} {user_token}", + ) + buttons.data_button( + "Files", + f"list_types {user_id} files {is_recursive} {user_token}", + ) + buttons.data_button( + "Both", + f"list_types {user_id} both {is_recursive} {user_token}", + ) + buttons.data_button( + f"Recursive: {is_recursive}", + f"list_types {user_id} rec {is_recursive} {user_token}", + ) + buttons.data_button( + f"User Token: {user_token}", + f"list_types {user_id} ut {is_recursive} {user_token}", + ) + buttons.data_button("Cancel", f"list_types {user_id} cancel") + return buttons.build_menu(2) + + +async def _list_drive(key, message, item_type, is_recursive, user_token, user_id): + LOGGER.info(f"listing: {key}") + if user_token: + user_dict = user_data.get(user_id, {}) + target_id = user_dict.get("gdrive_id", "") or "" + LOGGER.info(target_id) + else: + target_id = "" + telegraph_content, contents_no = await sync_to_async( + GoogleDriveSearch(is_recursive=is_recursive, item_type=item_type).drive_list, + key, + target_id, + user_id, + ) + if telegraph_content: + try: + button = await get_telegraph_list(telegraph_content) + except Exception as e: + await edit_message(message, e) + return + msg = f"Found {contents_no} result for {key}" + await edit_message(message, msg, button) + else: + await edit_message(message, f"No result found for {key}") + + +@new_task +async def select_type(_, query): + user_id = query.from_user.id + message = query.message + key = message.reply_to_message.text.split(maxsplit=1)[1].strip() + data = query.data.split() + if user_id != int(data[1]): + return await query.answer(text="Not Yours!", show_alert=True) + if data[2] == "rec": + await query.answer() + is_recursive = not bool(eval(data[3])) + buttons = await list_buttons(user_id, is_recursive, eval(data[4])) + return await edit_message(message, "Choose list options:", buttons) + if data[2] == "ut": + await query.answer() + user_token = not bool(eval(data[4])) + buttons = await list_buttons(user_id, eval(data[3]), user_token) + return await edit_message(message, "Choose list options:", buttons) + if data[2] == "cancel": + await query.answer() + return await edit_message(message, "list has been canceled!") + await query.answer() + item_type = data[2] + is_recursive = eval(data[3]) + user_token = eval(data[4]) + await edit_message(message, f"Searching for {key}") + await _list_drive(key, message, item_type, is_recursive, user_token, user_id) + return None + + +@new_task +async def gdrive_search(_, message): + if len(message.text.split()) == 1: + return await send_message(message, "Send a search key along with command") + user_id = message.from_user.id + buttons = await list_buttons(user_id) + await send_message(message, "Choose list options:", buttons) + return None diff --git a/bot/modules/help.py b/bot/modules/help.py new file mode 100644 index 000000000..94fca8b49 --- /dev/null +++ b/bot/modules/help.py @@ -0,0 +1,60 @@ +from bot.helper.ext_utils.bot_utils import COMMAND_USAGE, new_task +from bot.helper.ext_utils.help_messages import ( + CLONE_HELP_DICT, + MIRROR_HELP_DICT, + YT_HELP_DICT, + help_string, +) +from bot.helper.telegram_helper.button_build import ButtonMaker +from bot.helper.telegram_helper.message_utils import ( + delete_message, + edit_message, + send_message, +) + + +@new_task +async def arg_usage(_, query): + data = query.data.split() + message = query.message + if data[1] == "close": + await delete_message(message) + elif data[1] == "back": + if data[2] == "m": + await edit_message( + message, + COMMAND_USAGE["mirror"][0], + COMMAND_USAGE["mirror"][1], + ) + elif data[2] == "y": + await edit_message( + message, + COMMAND_USAGE["yt"][0], + COMMAND_USAGE["yt"][1], + ) + elif data[2] == "c": + await edit_message( + message, + COMMAND_USAGE["clone"][0], + COMMAND_USAGE["clone"][1], + ) + elif data[1] == "mirror": + buttons = ButtonMaker() + buttons.data_button("Back", "help back m") + button = buttons.build_menu() + await edit_message(message, MIRROR_HELP_DICT[data[2]], button) + elif data[1] == "yt": + buttons = ButtonMaker() + buttons.data_button("Back", "help back y") + button = buttons.build_menu() + await edit_message(message, YT_HELP_DICT[data[2]], button) + elif data[1] == "clone": + buttons = ButtonMaker() + buttons.data_button("Back", "help back c") + button = buttons.build_menu() + await edit_message(message, CLONE_HELP_DICT[data[2]], button) + + +@new_task +async def bot_help(_, message): + await send_message(message, help_string) diff --git a/bot/modules/images.py b/bot/modules/images.py deleted file mode 100644 index b59a155ae..000000000 --- a/bot/modules/images.py +++ /dev/null @@ -1,174 +0,0 @@ -from asyncio import sleep as asleep - -from telegraph import upload_file -from aiofiles.os import remove as aioremove -from pyrogram.filters import regex, command -from pyrogram.handlers import MessageHandler, CallbackQueryHandler - -from bot import IMAGES, LOGGER, DATABASE_URL, bot -from bot.helper.ext_utils.bot_utils import new_task, handle_index -from bot.helper.ext_utils.db_handler import DbManager -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.telegram_helper.message_utils import ( - edit_message, - send_message, - delete_message, -) - - -@new_task -async def picture_add(_, message): - reply = message.reply_to_message - msg = await send_message(message, "Fetching input...") - - if len(message.command) > 1 or (reply and reply.text): - msg_text = reply.text if reply else message.command[1] - if not msg_text.startswith("http"): - return await edit_message( - msg, "This is not a valid link. It must start with 'http'." - ) - graph_url = msg_text.strip() - await edit_message(msg, f"Adding your link: {graph_url}") - - elif reply and reply.photo: - if reply.photo.file_size > 5242880 * 2: - return await edit_message( - msg, "Media format is not supported. Only photos are allowed." - ) - - try: - photo_dir = await reply.download() - await edit_message( - msg, "Now, uploading to graph.org, Please Wait..." - ) - await asleep(1) - graph_url = f"https://graph.org{upload_file(photo_dir)[0]}" - LOGGER.info(f"Telegraph link : {graph_url}") - except Exception as e: - LOGGER.error(f"Images Error: {e!s}") - await edit_message(msg, str(e)) - finally: - await aioremove(photo_dir) - - else: - help_msg = f"Add an image using /{BotCommands.AddImageCommand} followed by IMAGE_LINK, or reply to an image with /{BotCommands.AddImageCommand}." - return await edit_message(msg, help_msg) - - IMAGES.append(graph_url) - - if DATABASE_URL: - await DbManager().update_config({"IMAGES": IMAGES}) - - await asleep(1.5) - await edit_message( - msg, - f"Successfully added to the images list!\n\nTotal images: {len(IMAGES)}", - ) - return None - - -async def pictures(_, message): - if not IMAGES: - await send_message( - message, - f"No images to display! Add images using /{BotCommands.AddImageCommand}.", - ) - else: - to_edit = await send_message(message, "Generating a grid of your images...") - buttons = ButtonMaker() - user_id = message.from_user.id - buttons.callback("<<", f"images {user_id} turn -1") - buttons.callback(">>", f"images {user_id} turn 1") - buttons.callback("Remove image", f"images {user_id} remove 0") - buttons.callback("Close", f"images {user_id} close") - buttons.callback("Remove all", f"images {user_id} removeall", "footer") - await delete_message(to_edit) - await send_message( - message, - f"Image No. : 1 / {len(IMAGES)}", - buttons.column(2), - IMAGES[0], - ) - - -@new_task -async def pics_callback(_, query): - message = query.message - user_id = query.from_user.id - data = query.data.split() - - if user_id != int(data[1]): - await query.answer(text="Not authorized user!", show_alert=True) - return - - if data[2] == "turn": - await query.answer() - ind = handle_index(int(data[3]), IMAGES) - no = len(IMAGES) - abs(ind + 1) if ind < 0 else ind + 1 - pic_info = f"Image No. : {no} / {len(IMAGES)}" - buttons = ButtonMaker() - buttons.callback("<<", f"images {data[1]} turn {ind-1}") - buttons.callback(">>", f"images {data[1]} turn {ind+1}") - buttons.callback("Remove Image", f"images {data[1]} remove {ind}") - buttons.callback("Close", f"images {data[1]} close") - buttons.callback("Remove all", f"images {data[1]} removeall", "footer") - await edit_message(message, pic_info, buttons.column(2), IMAGES[ind]) - - elif data[2] == "remove": - IMAGES.pop(int(data[3])) - if DATABASE_URL: - await DbManager().update_config({"IMAGES": IMAGES}) - query.answer("Image has been successfully deleted", show_alert=True) - - if len(IMAGES) == 0: - await query.message.delete() - await send_message( - message, - f"No images to display! Add images using /{BotCommands.AddImageCommand}.", - ) - return - - ind = int(data[3]) + 1 - ind = len(IMAGES) - abs(ind) if ind < 0 else ind - pic_info = f"Image No. : {ind+1} / {len(IMAGES)}" - buttons = ButtonMaker() - buttons.callback("<<", f"images {data[1]} turn {ind-1}") - buttons.callback(">>", f"images {data[1]} turn {ind+1}") - buttons.callback("Remove image", f"images {data[1]} remove {ind}") - buttons.callback("Close", f"images {data[1]} close") - buttons.callback("Remove all", f"images {data[1]} removeall", "footer") - await edit_message(message, pic_info, buttons.column(2), IMAGES[ind]) - - elif data[2] == "removeall": - IMAGES.clear() - if DATABASE_URL: - await DbManager().update_config({"IMAGES": IMAGES}) - await query.answer( - "All images have been successfully deleted.", show_alert=True - ) - await send_message( - message, - f"No images to display! Add images using /{BotCommands.AddImageCommand}.", - ) - await message.delete() - else: - await query.answer() - await message.delete() - await message.reply_to_message.delete() - - -bot.add_handler( - MessageHandler( - picture_add, - filters=command(BotCommands.AddImageCommand) & CustomFilters.authorized, - ) -) -bot.add_handler( - MessageHandler( - pictures, - filters=command(BotCommands.ImagesCommand) & CustomFilters.authorized, - ) -) -bot.add_handler(CallbackQueryHandler(pics_callback, filters=regex(r"^images"))) diff --git a/bot/modules/list.py b/bot/modules/list.py deleted file mode 100644 index 20cc18576..000000000 --- a/bot/modules/list.py +++ /dev/null @@ -1,111 +0,0 @@ -from pyrogram.filters import regex, command -from pyrogram.handlers import MessageHandler, CallbackQueryHandler - -from bot import LOGGER, bot -from bot.helper.ext_utils.bot_utils import ( - new_task, - sync_to_async, - checking_access, - get_telegraph_list, -) -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.telegram_helper.message_utils import ( - isAdmin, - delete_links, - edit_message, - send_message, - one_minute_del, - five_minute_del, -) -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper - - -async def list_buttons(user_id, isRecursive=True): - buttons = ButtonMaker() - buttons.callback("Folders", f"list_types {user_id} folders {isRecursive}") - buttons.callback("Files", f"list_types {user_id} files {isRecursive}") - buttons.callback("Both", f"list_types {user_id} both {isRecursive}") - buttons.callback( - f"Recursive: {isRecursive}", f"list_types {user_id} rec {isRecursive}" - ) - buttons.callback("Cancel", f"list_types {user_id} cancel") - return buttons.column(2) - - -async def _list_drive(key, message, item_type, isRecursive): - LOGGER.info(f"listing: {key}") - gdrive = GoogleDriveHelper() - telegraph_content, contents_no = await sync_to_async( - gdrive.drive_list, key, isRecursive=isRecursive, itemType=item_type - ) - if telegraph_content: - try: - button = await get_telegraph_list(telegraph_content) - except Exception as e: - await edit_message(message, e) - return - msg = f"Found {contents_no} result for {key}" - await edit_message(message, msg, button) - else: - await edit_message(message, f"No result found for {key}") - - -@new_task -async def select_type(_, query): - user_id = query.from_user.id - message = query.message - key = message.reply_to_message.text.split(maxsplit=1)[1].strip() - data = query.data.split() - if user_id != int(data[1]): - return await query.answer(text="Not Yours!", show_alert=True) - if data[2] == "rec": - await query.answer() - isRecursive = not bool(eval(data[3])) - buttons = await list_buttons(user_id, isRecursive) - return await edit_message(message, "Choose list options:", buttons) - if data[2] == "cancel": - await query.answer() - return await edit_message(message, "List has been canceled!") - await query.answer() - item_type = data[2] - isRecursive = eval(data[3]) - await edit_message(message, f"Searching for {key}...") - await _list_drive(key, message, item_type, isRecursive) - return None - - -@new_task -async def drive_list(_, message): - if len(message.text.split()) == 1: - reply_message = await send_message( - message, "Send a search key along with command" - ) - await delete_links(message) - await one_minute_del(reply_message) - return - user_id = message.from_user.id - if ( - not await isAdmin(message, user_id) - and message.chat.type != message.chat.type.PRIVATE - ): - msg, btn = await checking_access(user_id) - if msg is not None: - reply_message = await send_message(message, msg, btn.column(1)) - await delete_links(message) - await five_minute_del(reply_message) - return - buttons = await list_buttons(user_id) - reply_message = await send_message(message, "Choose list options:", buttons) - await five_minute_del(reply_message) - await delete_links(message) - - -bot.add_handler( - MessageHandler( - drive_list, - filters=command(BotCommands.ListCommand) & CustomFilters.authorized, - ) -) -bot.add_handler(CallbackQueryHandler(select_type, filters=regex("^list_types"))) diff --git a/bot/modules/mediainfo.py b/bot/modules/mediainfo.py index d4dc87312..32e6cb4ba 100644 --- a/bot/modules/mediainfo.py +++ b/bot/modules/mediainfo.py @@ -1,24 +1,52 @@ -from os import path as ospath from os import getcwd +from os import path as ospath from re import search as re_search from shlex import split as ssplit import aiohttp from aiofiles import open as aiopen -from aiofiles.os import path as aiopath from aiofiles.os import mkdir +from aiofiles.os import path as aiopath from aiofiles.os import remove as aioremove -from pyrogram.filters import command -from pyrogram.handlers import MessageHandler -from bot import LOGGER, bot +from bot import LOGGER +from bot.core.aeon_client import TgClient +from bot.helper.aeon_utils.access_check import token_check from bot.helper.ext_utils.bot_utils import cmd_exec -from bot.helper.telegram_helper.filters import CustomFilters from bot.helper.ext_utils.telegraph_helper import telegraph from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.message_utils import edit_message, send_message +from bot.helper.telegram_helper.button_build import ButtonMaker +from bot.helper.telegram_helper.message_utils import ( + delete_links, + edit_message, + five_minute_del, + send_message, +) + +section_dict = {"General", "Video", "Audio", "Text", "Image"} -section_dict = {"General", "Video", "Audio", "Text", "Menu"} + +def parseinfo(out, file_size): + tc = "" + skip = False + file_size_line = f"File size : {file_size / (1024 * 1024):.2f} MiB" + + for line in out.split("\n"): + if line.startswith("Menu"): + skip = True + elif any(line.startswith(section) for section in section_dict): + skip = False + if not line.startswith("General"): + tc += "
" + tc += f"
{line.replace('Text', 'Subtitle')}
"
+        if not skip:
+            # Replace File size line
+            if line.startswith("File size"):
+                line = file_size_line
+            key, sep, value = line.partition(":")
+            tc += f"{key.strip():<28}{sep} {value.strip()}\n"
+    tc += "

" + return tc async def gen_mediainfo(message, link=None, media=None, msg=None): @@ -28,33 +56,40 @@ async def gen_mediainfo(message, link=None, media=None, msg=None): if not await aiopath.isdir(path): await mkdir(path) + file_size = 0 if link: filename = re_search(".+/(.+)", link).group(1) des_path = ospath.join(path, filename) headers = { - "user-agent": "Mozilla/5.0 (Linux; Android 12; 2201116PI) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Mobile Safari/537.36" + "user-agent": "Mozilla/5.0 (Linux; Android 12; 2201116PI) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Mobile Safari/537.36", } async with ( aiohttp.ClientSession() as session, session.get(link, headers=headers) as response, aiopen(des_path, "wb") as f, ): - async for chunk in response.content.iter_chunked(10000000): - await f.write(chunk) - break + file_size = int(response.headers.get("Content-Length", 0)) + async with aiopen(des_path, "wb") as f: + async for chunk in response.content.iter_chunked(10000000): + await f.write(chunk) + break elif media: des_path = ospath.join(path, media.file_name) - if media.file_size <= 50000000: + file_size = media.file_size + if file_size <= 30000000: await msg.download(ospath.join(getcwd(), des_path)) else: - async for chunk in bot.stream_media(media, limit=5): + async for chunk in TgClient.bot.stream_media(media, limit=3): async with aiopen(des_path, "ab") as f: await f.write(chunk) + # Get MediaInfo stdout, _, _ = await cmd_exec(ssplit(f'mediainfo "{des_path}"')) + + # Parse MediaInfo with updated file size tc = f"

{ospath.basename(des_path)}



" if stdout: - tc += parseinfo(stdout) + tc += parseinfo(stdout, file_size) except Exception as e: LOGGER.error(e) @@ -69,27 +104,16 @@ async def gen_mediainfo(message, link=None, media=None, msg=None): ) -def parseinfo(out): - tc = "" - trigger = False - for line in out.split("\n"): - for section in section_dict: - if line.startswith(section): - trigger = True - if not line.startswith("General"): - tc += "
" - tc += f"

{line.replace('Text', 'Subtitle')}

" - break - if trigger: - tc += "
"
-            trigger = False
-        else:
-            tc += line + "\n"
-    tc += "

" - return tc - - async def mediainfo(_, message): + user_id = message.from_user.id + buttons = ButtonMaker() + if message.chat.type != message.chat.type.PRIVATE: + msg, buttons = await token_check(user_id, buttons) + if msg is not None: + reply_message = await send_message(message, msg, buttons.build_menu(1)) + await delete_links(message) + await five_minute_del(reply_message) + return reply = message.reply_to_message help_msg = ( "By replying to media:" @@ -102,18 +126,11 @@ async def mediainfo(_, message): await gen_mediainfo(message, link) elif reply: if file := next( - (i for i in [reply.document, reply.video, reply.audio] if i), None + (i for i in [reply.document, reply.video, reply.audio] if i), + None, ): await gen_mediainfo(message, None, file, reply) else: await send_message(message, help_msg) else: await send_message(message, help_msg) - - -bot.add_handler( - MessageHandler( - mediainfo, - filters=command(BotCommands.MediaInfoCommand) & CustomFilters.authorized, - ) -) diff --git a/bot/modules/mirror_leech.py b/bot/modules/mirror_leech.py index 0bbeafd61..6b6b4a4fd 100644 --- a/bot/modules/mirror_leech.py +++ b/bot/modules/mirror_leech.py @@ -1,477 +1,409 @@ -import contextlib -from re import match as re_match +# ruff: noqa: RUF006 +from asyncio import create_task from base64 import b64encode -from asyncio import sleep +from re import match as re_match from aiofiles.os import path as aiopath -from pyrogram.filters import command -from pyrogram.handlers import MessageHandler -from bot import LOGGER, bot, user_data, config_dict +from bot import LOGGER, bot_loop, task_dict_lock +from bot.core.aeon_client import TgClient +from bot.core.config_manager import Config +from bot.helper.aeon_utils.access_check import error_check from bot.helper.ext_utils.bot_utils import ( - is_url, - new_task, - is_magnet, + COMMAND_USAGE, arg_parser, - is_mega_link, + get_content_type, sync_to_async, - fetch_user_tds, +) +from bot.helper.ext_utils.exceptions import DirectDownloadLinkException +from bot.helper.ext_utils.links_utils import ( + is_gdrive_id, is_gdrive_link, + is_magnet, + is_mega_link, is_rclone_path, - get_content_type, is_telegram_link, + is_url, ) -from bot.helper.ext_utils.bulk_links import extract_bulk_links -from bot.helper.ext_utils.exceptions import DirectDownloadLinkError -from bot.helper.aeon_utils.nsfw_check import nsfw_precheck -from bot.helper.aeon_utils.send_react import send_react -from bot.helper.ext_utils.help_strings import MIRROR_HELP_MESSAGE -from bot.helper.ext_utils.task_manager import task_utils -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.listeners.tasks_listener import MirrorLeechListener -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.message_utils import ( - delete_links, - edit_message, - send_message, - delete_message, - one_minute_del, - five_minute_del, - get_tg_link_content, +from bot.helper.listeners.task_listener import TaskListener +from bot.helper.mirror_leech_utils.download_utils.aria2_download import ( + add_aria2c_download, +) +from bot.helper.mirror_leech_utils.download_utils.direct_downloader import ( + add_direct_download, +) +from bot.helper.mirror_leech_utils.download_utils.direct_link_generator import ( + direct_link_generator, ) -from bot.helper.mirror_leech_utils.rclone_utils.list import RcloneList -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper from bot.helper.mirror_leech_utils.download_utils.gd_download import add_gd_download from bot.helper.mirror_leech_utils.download_utils.mega_download import ( add_mega_download, ) from bot.helper.mirror_leech_utils.download_utils.qbit_download import add_qb_torrent -from bot.helper.mirror_leech_utils.download_utils.aria2_download import ( - add_aria2c_download, -) from bot.helper.mirror_leech_utils.download_utils.rclone_download import ( add_rclone_download, ) -from bot.helper.mirror_leech_utils.download_utils.direct_downloader import ( - add_direct_download, -) from bot.helper.mirror_leech_utils.download_utils.telegram_download import ( TelegramDownloadHelper, ) -from bot.helper.mirror_leech_utils.download_utils.direct_link_generator import ( - direct_link_generator, +from bot.helper.telegram_helper.message_utils import ( + delete_links, + five_minute_del, + get_tg_link_message, + send_message, ) -@new_task -async def _mirror_leech( - client, message, is_qbit=False, is_leech=False, same_dir=None, bulk=[] -): - await send_react(message) - user = message.from_user or message.sender_chat - user_id = user.id - user_dict = user_data.get(user_id, {}) - text = message.text.split("\n") - input_list = text[0].split(" ") - arg_base = { - "link": "", - "-t": "", - "-m": "", - "-n": "", - "-h": "", - "-u": "", - "-p": "", - "-up": "", - "-rcf": "", - "-id": "", - "-index": "", - "-d": False, - "-j": False, - "-s": False, - "-b": False, - "-e": False, - "-z": False, - "-i": "0", - "-ss": "0", - "-atc": "", - } - - args = arg_parser(input_list[1:], arg_base) - attachment = ( - args["-atc"] - or user_dict.get("attachment", "") - or config_dict["ATTACHMENT_URL"] - ) - i = args["-i"] - link = args["link"] - headers = args["-h"] - folder_name = args["-m"] - seed = args["-d"] - join = args["-j"] - select = args["-s"] - isBulk = args["-b"] - name = args["-n"] - extract = args["-e"] - compress = args["-z"] - up = args["-up"] - thumb = args["-t"] - rcf = args["-rcf"] - drive_id = args["-id"] - index_link = args["-index"] - ss = args["-ss"] - multi = int(i) if i.isdigit() else 0 - sshots = min(int(ss) if ss.isdigit() else 0, 10) - bulk_start = 0 - bulk_end = 0 - ratio = None - seed_time = None - reply_to = None - file_ = None - session = "" - - if link: - if is_magnet(link) or link.endswith(".torrent"): - is_qbit = True - elif not link and (reply_to := message.reply_to_message) and reply_to.text: - reply_text = reply_to.text.split("\n", 1)[0].strip() - if reply_text and is_magnet(reply_text): - is_qbit = True - if reply_to := message.reply_to_message: - file_ = ( - reply_to.document - or reply_to.photo - or reply_to.video - or reply_to.audio - or reply_to.voice - or reply_to.video_note - or reply_to.sticker - or reply_to.animation - or None - ) - if reply_to.document and ( - file_.mime_type == "application/x-bittorrent" - or file_.file_name.endswith(".torrent") - ): - is_qbit = True - if not isinstance(seed, bool): - dargs = seed.split(":") - ratio = dargs[0] or None - if len(dargs) == 2: - seed_time = dargs[1] or None - seed = True - - if not isinstance(isBulk, bool): - dargs = isBulk.split(":") - bulk_start = dargs[0] or None - if len(dargs) == 2: - bulk_end = dargs[1] or None - isBulk = True - - if drive_id and is_gdrive_link(drive_id): - drive_id = GoogleDriveHelper.getIdFromUrl(drive_id) - - if folder_name and not isBulk: - seed = False +class Mirror(TaskListener): + def __init__( + self, + client, + message, + is_qbit=False, + is_leech=False, + same_dir=None, + bulk=None, + multi_tag=None, + options="", + ): + if same_dir is None: + same_dir = {} + if bulk is None: + bulk = [] + self.message = message + self.client = client + self.multi_tag = multi_tag + self.options = options + self.same_dir = same_dir + self.bulk = bulk + super().__init__() + self.is_qbit = is_qbit + self.is_leech = is_leech + + async def new_event(self): + text = self.message.text.split("\n") + input_list = text[0].split(" ") + error_msg, error_button = await error_check(self.message) + if error_msg: + await delete_links(self.message) + error = await send_message(self.message, error_msg, error_button) + return await five_minute_del(error) + user_id = self.message.from_user.id if self.message.from_user else "" + args = { + "-doc": False, + "-med": False, + "-d": False, + "-j": False, + "-s": False, + "-b": False, + "-e": False, + "-z": False, + "-sv": False, + "-ss": False, + "-f": False, + "-fd": False, + "-fu": False, + "-ml": False, + "-i": 0, + "-sp": 0, + "link": "", + "-n": "", + "-m": "", + "-up": "", + "-rcf": "", + "-au": "", + "-ap": "", + "-h": "", + "-t": "", + "-ca": "", + "-cv": "", + "-ns": "", + "-md": "", + "-tl": "", + "-ff": set(), + } + + arg_parser(input_list[1:], args) + + self.select = args["-s"] + self.seed = args["-d"] + self.name = args["-n"] + self.up_dest = args["-up"] + self.rc_flags = args["-rcf"] + self.link = args["link"] + self.compress = args["-z"] + self.extract = args["-e"] + self.join = args["-j"] + self.thumb = args["-t"] + self.split_size = args["-sp"] + self.sample_video = args["-sv"] + self.screen_shots = args["-ss"] + self.force_run = args["-f"] + self.force_download = args["-fd"] + self.force_upload = args["-fu"] + self.convert_audio = args["-ca"] + self.convert_video = args["-cv"] + self.name_sub = args["-ns"] + self.mixed_leech = args["-ml"] + self.thumbnail_layout = args["-tl"] + self.as_doc = args["-doc"] + self.as_med = args["-med"] + self.metadata = args["-md"] + self.folder_name = f"/{args['-m']}" if len(args["-m"]) > 0 else "" + + headers = args["-h"] + is_bulk = args["-b"] + + bulk_start = 0 + bulk_end = 0 ratio = None seed_time = None - folder_name = f"/{folder_name}" - if same_dir is None: - same_dir = {"total": multi, "tasks": set(), "name": folder_name} - same_dir["tasks"].add(message.id) + reply_to = None + file_ = None + session = TgClient.bot - if isBulk: try: - bulk = await extract_bulk_links(message, bulk_start, bulk_end) - if len(bulk) == 0: - raise ValueError("Bulk Empty!") + self.multi = int(args["-i"]) except Exception: - await send_message( - message, - "Reply to text file or tg message that have links seperated by new line!", - ) - return None - b_msg = input_list[:1] - b_msg.append(f"{bulk[0]} -i {len(bulk)}") - nextmsg = await send_message(message, " ".join(b_msg)) - nextmsg = await client.get_messages( - chat_id=message.chat.id, message_ids=nextmsg.id - ) - nextmsg.from_user = message.from_user - _mirror_leech(client, nextmsg, is_qbit, is_leech, same_dir, bulk) - return None + self.multi = 0 - if len(bulk) != 0: - del bulk[0] - - @new_task - async def __run_multi(): - if multi <= 1: - return - await sleep(5) - if len(bulk) != 0: - msg = input_list[:1] - msg.append(f"{bulk[0]} -i {multi - 1}") - nextmsg = await send_message(message, " ".join(msg)) - else: - msg = [s.strip() for s in input_list] - index = msg.index("-i") - msg[index + 1] = f"{multi - 1}" - nextmsg = await client.get_messages( - chat_id=message.chat.id, message_ids=message.reply_to_message_id + 1 - ) - nextmsg = await send_message(nextmsg, " ".join(msg)) - nextmsg = await client.get_messages( - chat_id=message.chat.id, message_ids=nextmsg.id - ) - if folder_name: - same_dir["tasks"].add(nextmsg.id) - nextmsg.from_user = message.from_user - await sleep(5) - _mirror_leech(client, nextmsg, is_qbit, is_leech, same_dir, bulk) - - __run_multi() - - path = f"/usr/src/app/downloads/{message.id}{folder_name}" - - if len(text) > 1 and text[1].startswith("Tag: "): - tag, id_ = text[1].split("Tag: ")[1].split() - message.from_user = await client.get_users(id_) - with contextlib.suppress(Exception): - await message.unpin() - elif sender_chat := message.sender_chat: - tag = sender_chat.title - if username := message.from_user.username: - tag = f"@{username}" - else: - tag = message.from_user.mention - if link and is_telegram_link(link): try: - reply_to, session = await get_tg_link_content(link) + if args["-ff"]: + if isinstance(args["-ff"], set): + self.ffmpeg_cmds = args["-ff"] + else: + self.ffmpeg_cmds = eval(args["-ff"]) except Exception as e: - await send_message(message, f"ERROR: {e}") - await delete_links(message) + self.ffmpeg_cmds = None + LOGGER.error(e) + + if not isinstance(self.seed, bool): + dargs = self.seed.split(":") + ratio = dargs[0] or None + if len(dargs) == 2: + seed_time = dargs[1] or None + self.seed = True + + if not isinstance(is_bulk, bool): + dargs = is_bulk.split(":") + bulk_start = dargs[0] or 0 + if len(dargs) == 2: + bulk_end = dargs[1] or 0 + is_bulk = True + + if not is_bulk: + if self.multi > 0: + if self.folder_name: + async with task_dict_lock: + if self.folder_name in self.same_dir: + self.same_dir[self.folder_name]["tasks"].add(self.mid) + for fd_name in self.same_dir: + if fd_name != self.folder_name: + self.same_dir[fd_name]["total"] -= 1 + elif self.same_dir: + self.same_dir[self.folder_name] = { + "total": self.multi, + "tasks": {self.mid}, + } + for fd_name in self.same_dir: + if fd_name != self.folder_name: + self.same_dir[fd_name]["total"] -= 1 + else: + self.same_dir = { + self.folder_name: { + "total": self.multi, + "tasks": {self.mid}, + }, + } + elif self.same_dir: + async with task_dict_lock: + for fd_name in self.same_dir: + self.same_dir[fd_name]["total"] -= 1 + else: + await self.init_bulk(input_list, bulk_start, bulk_end, Mirror) return None - elif not link and (reply_to := message.reply_to_message) and reply_to.text: - reply_text = reply_to.text.split("\n", 1)[0].strip() - if reply_text and is_telegram_link(reply_text): + + if len(self.bulk) != 0: + del self.bulk[0] + + await self.run_multi(input_list, Mirror) + + await self.get_tag(text) + + path = f"{Config.DOWNLOAD_DIR}{self.mid}{self.folder_name}" + + if ( + not self.link + and (reply_to := self.message.reply_to_message) + and reply_to.text + ): + self.link = reply_to.text.split("\n", 1)[0].strip() + if is_telegram_link(self.link): try: - reply_to, session = await get_tg_link_content(reply_text) + reply_to, session = await get_tg_link_message(self.link, user_id) except Exception as e: - await send_message(message, f"ERROR: {e}") - await delete_links(message) - return None - - if reply_to: - file_ = ( - reply_to.document - or reply_to.photo - or reply_to.video - or reply_to.audio - or reply_to.voice - or reply_to.video_note - or reply_to.sticker - or reply_to.animation - or None - ) - - if file_ is None: - reply_text = reply_to.text.split("\n", 1)[0].strip() - if ( - is_url(reply_text) - or is_magnet(reply_text) - or is_rclone_path(reply_text) - ): - link = reply_text - elif reply_to.document and ( - file_.mime_type == "application/x-bittorrent" - or file_.file_name.endswith(".torrent") - ): - link = await reply_to.download() - file_ = None - - if ( - not is_url(link) - and not is_magnet(link) - and not await aiopath.exists(link) - and not is_rclone_path(link) - and file_ is None - ): - reply_message = await send_message(message, MIRROR_HELP_MESSAGE) - await delete_message(message) - await one_minute_del(reply_message) - return None + x = await send_message(self.message, f"ERROR: {e}") + await self.remove_from_same_dir() + await delete_links(self.message) + return await five_minute_del(x) - error_msg = [] - error_button = None - if await nsfw_precheck(message): - error_msg.extend(["NSFW detected"]) - task_utilis_msg, error_button = await task_utils(message) - if task_utilis_msg: - error_msg.extend(task_utilis_msg) - if error_msg: - final_msg = f"Hey, {tag}!\n" - for __i, __msg in enumerate(error_msg, 1): - final_msg += f"\n
{__i}: {__msg}
" - if error_button is not None: - error_button = error_button.column(2) - await delete_links(message) - force_m = await send_message(message, final_msg, error_button) - await five_minute_del(force_m) - return None + if isinstance(reply_to, list): + self.bulk = reply_to + b_msg = input_list[:1] + self.options = " ".join(input_list[1:]) + b_msg.append(f"{self.bulk[0]} -i {len(self.bulk)} {self.options}") + nextmsg = await send_message(self.message, " ".join(b_msg)) + nextmsg = await self.client.get_messages( + chat_id=self.message.chat.id, + message_ids=nextmsg.id, + ) + if self.message.from_user: + nextmsg.from_user = self.user + else: + nextmsg.sender_chat = self.user + await Mirror( + self.client, + nextmsg, + self.is_qbit, + self.is_leech, + self.same_dir, + self.bulk, + self.multi_tag, + self.options, + ).new_event() + return await delete_links(self.message) - if ( - not is_mega_link(link) - and not is_qbit - and not is_magnet(link) - and not is_rclone_path(link) - and not is_gdrive_link(link) - and not link.endswith(".torrent") - and file_ is None - ): - content_type = await get_content_type(link) - if content_type is None or re_match(r"text/html|text/plain", content_type): - process_msg = await send_message( - message, f"Processing: {link}" + if reply_to: + file_ = ( + reply_to.document + or reply_to.photo + or reply_to.video + or reply_to.audio + or reply_to.voice + or reply_to.video_note + or reply_to.sticker + or reply_to.animation + or None ) - try: - link = await sync_to_async(direct_link_generator, link) - if isinstance(link, tuple): - link, headers = link - elif isinstance(link, str): - LOGGER.info(f"Generated link: {link}") - except DirectDownloadLinkError as e: - LOGGER.info(str(e)) - if str(e).startswith("ERROR:"): - await edit_message(process_msg, str(e)) - await delete_links(message) - await one_minute_del(process_msg) - return None - await delete_message(process_msg) - - if not is_leech: - if config_dict["DEFAULT_UPLOAD"] == "rc" and not up or up == "rc": - up = config_dict["RCLONE_PATH"] - if not up and config_dict["DEFAULT_UPLOAD"] == "gd": - up = "gd" - user_tds = await fetch_user_tds(message.from_user.id) - if not drive_id and len(user_tds) == 1: - drive_id, index_link = next(iter(user_tds.values())).values() - if drive_id and not await sync_to_async( - GoogleDriveHelper().getFolderData, drive_id + + if file_ is None: + if reply_text := reply_to.text: + self.link = reply_text.split("\n", 1)[0].strip() + else: + reply_to = None + elif reply_to.document and ( + file_.mime_type == "application/x-bittorrent" + or file_.file_name.endswith((".torrent", ".dlc", ".nzb")) ): - return await send_message( - message, "Google Drive ID validation failed!!" - ) - if up == "gd" and not config_dict["GDRIVE_ID"] and not drive_id: - await send_message(message, "GDRIVE_ID not Provided!") - return None - if not up: - await send_message(message, "No Rclone Destination!") - return None - if up not in ["rcl", "gd"]: - if up.startswith("mrcc:"): - config_path = f"tanha/{message.from_user.id}.conf" - else: - config_path = "rcl.conf" - if not await aiopath.exists(config_path): - await send_message( - message, f"Rclone Config: {config_path} not Exists!" - ) - return None - if up != "gd" and not is_rclone_path(up): - await send_message(message, "Wrong Rclone Upload Destination!") - await delete_links(message) - return None + self.link = await reply_to.download() + file_ = None - if link == "rcl": - link = await RcloneList(client, message).get_rclone_path("rcd") - if not is_rclone_path(link): - await send_message(message, link) - await delete_links(message) - return None + try: + if ( + self.link + and (is_magnet(self.link) or self.link.endswith(".torrent")) + ) or ( + file_ and file_.file_name and file_.file_name.endswith(".torrent") + ): + self.is_qbit = True + except Exception: + pass - if up == "rcl" and not is_leech: - up = await RcloneList(client, message).get_rclone_path("rcu") - if not is_rclone_path(up): - await send_message(message, up) - await delete_links(message) - return None + if ( + (not self.link and file_ is None) + or (is_telegram_link(self.link) and reply_to is None) + or ( + file_ is None + and not is_url(self.link) + and not is_magnet(self.link) + and not await aiopath.exists(self.link) + and not is_rclone_path(self.link) + and not is_gdrive_id(self.link) + and not is_gdrive_link(self.link) + ) + ): + x = await send_message( + self.message, + COMMAND_USAGE["mirror"][0], + COMMAND_USAGE["mirror"][1], + ) + await self.remove_from_same_dir() + await delete_links(self.message) + return await five_minute_del(x) - listener = MirrorLeechListener( - message, - compress, - extract, - is_qbit, - is_leech, - tag, - select, - seed, - same_dir, - rcf, - up, - join, - drive_id=drive_id, - index_link=index_link, - attachment=attachment, - files_utils={"screenshots": sshots, "thumb": thumb}, - ) - - if file_ is not None: - await delete_links(message) - await TelegramDownloadHelper(listener).add_download( - reply_to, f"{path}/", name, session - ) - elif isinstance(link, dict): - await add_direct_download(link, path, listener, name) - elif is_rclone_path(link): - if link.startswith("mrcc:"): - link = link.split("mrcc:", 1)[1] - config_path = f"tanha/{message.from_user.id}.conf" - else: - config_path = "rcl.conf" - if not await aiopath.exists(config_path): - await send_message(message, f"Rclone Config: {config_path} not Exists!") - return None - await add_rclone_download(link, config_path, f"{path}/", name, listener) - elif is_gdrive_link(link): - await delete_links(message) - await add_gd_download(link, path, listener, name) - elif is_mega_link(link): - await delete_links(message) - await add_mega_download(link, f"{path}/", listener, name) - elif is_qbit: - await add_qb_torrent(link, path, listener, ratio, seed_time) - LOGGER.info("Downloading with qbitEngine") - else: - ussr = args["-u"] - pssw = args["-p"] - if ussr or pssw: - auth = f"{ussr}:{pssw}" - headers += ( - f" authorization: Basic {b64encode(auth.encode()).decode('ascii')}" + if len(self.link) > 0: + LOGGER.info(self.link) + + try: + await self.before_start() + except Exception as e: + x = await send_message(self.message, e) + await self.remove_from_same_dir() + await delete_links(self.message) + return await five_minute_del(x) + + if ( + not self.is_qbit + and not is_magnet(self.link) + and not is_rclone_path(self.link) + and not is_gdrive_link(self.link) + and not self.link.endswith(".torrent") + and file_ is None + and not is_gdrive_id(self.link) + ): + content_type = await get_content_type(self.link) + if content_type is None or re_match( + r"text/html|text/plain", + content_type, + ): + try: + self.link = await sync_to_async(direct_link_generator, self.link) + if isinstance(self.link, tuple): + self.link, headers = self.link + elif isinstance(self.link, str): + LOGGER.info(f"Generated link: {self.link}") + except DirectDownloadLinkException as e: + e = str(e) + if "This link requires a password!" not in e: + LOGGER.info(e) + if e.startswith("ERROR:"): + x = await send_message(self.message, e) + await self.remove_from_same_dir() + await delete_links(self.message) + return await five_minute_del(x) + + if file_ is not None: + create_task( + TelegramDownloadHelper(self).add_download( + reply_to, + f"{path}/", + session, + ), ) - await add_aria2c_download( - link, path, listener, name, headers, ratio, seed_time - ) - await delete_links(message) - return None + elif isinstance(self.link, dict): + create_task(add_direct_download(self, path)) + elif self.is_qbit: + create_task(add_qb_torrent(self, path, ratio, seed_time)) + elif is_rclone_path(self.link): + create_task(add_rclone_download(self, f"{path}/")) + elif is_mega_link(self.link): + create_task(add_mega_download(self, f"{path}/")) + elif is_gdrive_link(self.link) or is_gdrive_id(self.link): + create_task(add_gd_download(self, path)) + else: + ussr = args["-au"] + pssw = args["-ap"] + if ussr or pssw: + auth = f"{ussr}:{pssw}" + headers += f" authorization: Basic {b64encode(auth.encode()).decode('ascii')}" + create_task(add_aria2c_download(self, path, headers, ratio, seed_time)) + await delete_links(self.message) + return None async def mirror(client, message): - _mirror_leech(client, message) + bot_loop.create_task(Mirror(client, message).new_event()) async def leech(client, message): - _mirror_leech(client, message, is_leech=True) - - -bot.add_handler( - MessageHandler( - mirror, filters=command(BotCommands.MirrorCommand) & CustomFilters.authorized - ) -) -bot.add_handler( - MessageHandler( - leech, filters=command(BotCommands.LeechCommand) & CustomFilters.authorized - ) -) + bot_loop.create_task(Mirror(client, message, is_leech=True).new_event()) diff --git a/bot/modules/restart.py b/bot/modules/restart.py new file mode 100644 index 000000000..77ba8198b --- /dev/null +++ b/bot/modules/restart.py @@ -0,0 +1,144 @@ +import contextlib +from asyncio import create_subprocess_exec, gather +from os import execl as osexecl +from sys import executable + +from aiofiles import open as aiopen +from aiofiles.os import path as aiopath +from aiofiles.os import remove + +from bot import LOGGER, intervals, scheduler +from bot.core.aeon_client import TgClient +from bot.core.config_manager import Config +from bot.helper.ext_utils.bot_utils import new_task, sync_to_async +from bot.helper.ext_utils.db_handler import database +from bot.helper.ext_utils.files_utils import clean_all +from bot.helper.telegram_helper import button_build +from bot.helper.telegram_helper.message_utils import delete_message, send_message + + +@new_task +async def restart_bot(_, message): + buttons = button_build.ButtonMaker() + buttons.data_button("Yes!", "botrestart confirm") + buttons.data_button("Cancel", "botrestart cancel") + button = buttons.build_menu(2) + await send_message( + message, + "Are you sure you want to restart the bot ?!", + button, + ) + + +@new_task +async def restart_sessions(_, message): + buttons = button_build.ButtonMaker() + buttons.data_button("Yes!", "sessionrestart confirm") + buttons.data_button("Cancel", "sessionrestart cancel") + button = buttons.build_menu(2) + await send_message( + message, + "Are you sure you want to restart the session(s) ?!", + button, + ) + + +async def send_incomplete_task_message(cid, msg_id, msg): + try: + if msg.startswith("Restarted Successfully!"): + await TgClient.bot.edit_message_text( + chat_id=cid, + message_id=msg_id, + text=msg, + disable_web_page_preview=True, + ) + await remove(".restartmsg") + else: + await TgClient.bot.send_message( + chat_id=cid, + text=msg, + disable_web_page_preview=True, + disable_notification=True, + ) + except Exception as e: + LOGGER.error(e) + + +async def restart_notification(): + if await aiopath.isfile(".restartmsg"): + async with aiopen(".restartmsg") as f: + content = await f.read() + chat_id, msg_id = map(int, content.splitlines()) + else: + chat_id, msg_id = 0, 0 + + if ( + Config.INCOMPLETE_TASK_NOTIFIER + and Config.DATABASE_URL + and (notifier_dict := await database.get_incomplete_tasks()) + ): + for cid, data in notifier_dict.items(): + msg = "Restarted Successfully!" if cid == chat_id else "Bot Restarted!" + for tag, links in data.items(): + msg += f"\n\n{tag}: " + for index, link in enumerate(links, start=1): + msg += f" {index} |" + if len(msg.encode()) > 4000: + await send_incomplete_task_message(cid, msg_id, msg) + msg = "" + if msg: + await send_incomplete_task_message(cid, msg_id, msg) + + if await aiopath.isfile(".restartmsg"): + with contextlib.suppress(Exception): + await TgClient.bot.edit_message_text( + chat_id=chat_id, + message_id=msg_id, + text="Restarted Successfully!", + ) + await remove(".restartmsg") + + +@new_task +async def confirm_restart(_, query): + await query.answer() + data = query.data.split() + message = query.message + await delete_message(message) + if data[1] == "confirm": + reply_to = message.reply_to_message + intervals["stopAll"] = True + restart_message = await send_message(reply_to, "Restarting...") + await delete_message(message) + # await TgClient.stop() + if scheduler.running: + scheduler.shutdown(wait=False) + if qb := intervals["qb"]: + qb.cancel() + if st := intervals["status"]: + for intvl in list(st.values()): + intvl.cancel() + await sync_to_async(clean_all) + proc1 = await create_subprocess_exec( + "pkill", + "-9", + "-f", + "gunicorn|xria|xnox|xtra|xone|7z|split", + ) + proc2 = await create_subprocess_exec("python3", "update.py") + proc3 = await create_subprocess_exec( + "uv", + "pip", + "install", + "-r", + "requirements.txt", + "--system", + "--break-system-packages", + "--upgrade", + ) + await gather(proc1.wait(), proc2.wait(), proc3.wait()) + async with aiopen(".restartmsg", "w") as f: + await f.write(f"{restart_message.chat.id}\n{restart_message.id}\n") + osexecl(executable, executable, "-m", "bot") + else: + await delete_message(message) diff --git a/bot/modules/rss.py b/bot/modules/rss.py new file mode 100644 index 000000000..222802a68 --- /dev/null +++ b/bot/modules/rss.py @@ -0,0 +1,847 @@ +from asyncio import Lock, sleep +from datetime import datetime, timedelta +from functools import partial +from io import BytesIO +from re import IGNORECASE, compile +from time import time + +from apscheduler.triggers.interval import IntervalTrigger +from feedparser import parse as feed_parse +from httpx import AsyncClient +from pyrogram.filters import create +from pyrogram.handlers import MessageHandler + +from bot import LOGGER, rss_dict, scheduler +from bot.core.config_manager import Config +from bot.helper.ext_utils.bot_utils import arg_parser, get_size_bytes, new_task +from bot.helper.ext_utils.db_handler import database +from bot.helper.ext_utils.exceptions import RssShutdownException +from bot.helper.ext_utils.help_messages import RSS_HELP_MESSAGE +from bot.helper.ext_utils.status_utils import get_readable_file_size +from bot.helper.telegram_helper.button_build import ButtonMaker +from bot.helper.telegram_helper.filters import CustomFilters +from bot.helper.telegram_helper.message_utils import ( + delete_message, + edit_message, + send_file, + send_message, + send_rss, +) + +rss_dict_lock = Lock() +handler_dict = {} +size_regex = compile(r"(\d+(\.\d+)?\s?(GB|MB|KB|GiB|MiB|KiB))", IGNORECASE) + +headers = { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36", + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", + "Accept-Language": "en-US,en;q=0.5", +} + + +async def rss_menu(event): + user_id = event.from_user.id + buttons = ButtonMaker() + buttons.data_button("Subscribe", f"rss sub {user_id}") + buttons.data_button("Subscriptions", f"rss list {user_id} 0") + buttons.data_button("Get Items", f"rss get {user_id}") + buttons.data_button("Edit", f"rss edit {user_id}") + buttons.data_button("Pause", f"rss pause {user_id}") + buttons.data_button("Resume", f"rss resume {user_id}") + buttons.data_button("Unsubscribe", f"rss unsubscribe {user_id}") + if await CustomFilters.sudo("", event): + buttons.data_button("All Subscriptions", f"rss listall {user_id} 0") + buttons.data_button("Pause All", f"rss allpause {user_id}") + buttons.data_button("Resume All", f"rss allresume {user_id}") + buttons.data_button("Unsubscribe All", f"rss allunsub {user_id}") + buttons.data_button("Delete User", f"rss deluser {user_id}") + if scheduler.running: + buttons.data_button("Shutdown Rss", f"rss shutdown {user_id}") + else: + buttons.data_button("Start Rss", f"rss start {user_id}") + buttons.data_button("Close", f"rss close {user_id}") + button = buttons.build_menu(2) + msg = f"Rss Menu | Users: {len(rss_dict)} | Running: {scheduler.running}" + return msg, button + + +async def update_rss_menu(query): + msg, button = await rss_menu(query) + await edit_message(query.message, msg, button) + + +@new_task +async def get_rss_menu(_, message): + msg, button = await rss_menu(message) + await send_message(message, msg, button) + + +@new_task +async def rss_sub(_, message, pre_event): + user_id = message.from_user.id + handler_dict[user_id] = False + if username := message.from_user.username: + tag = f"@{username}" + else: + tag = message.from_user.mention + msg = "" + items = message.text.split("\n") + for index, item in enumerate(items, start=1): + args = item.split() + if len(args) < 2: + await send_message( + message, + f"{item}. Wrong Input format. Read help message before adding new subcription!", + ) + continue + title = args[0].strip() + if (user_feeds := rss_dict.get(user_id, False)) and title in user_feeds: + await send_message( + message, + f"This title {title} already subscribed! Choose another title!", + ) + continue + feed_link = args[1].strip() + if feed_link.startswith(("-inf", "-exf", "-c")): + await send_message( + message, + f"Wrong input in line {index}! Add Title! Read the example!", + ) + continue + inf_lists = [] + exf_lists = [] + if len(args) > 2: + arg_base = {"-c": None, "-inf": None, "-exf": None, "-stv": None} + arg_parser(args[2:], arg_base) + cmd = arg_base["-c"] + inf = arg_base["-inf"] + exf = arg_base["-exf"] + stv = arg_base["-stv"] + if stv is not None: + stv = stv.lower() == "true" + if inf is not None: + filters_list = inf.split("|") + for x in filters_list: + y = x.split(" or ") + inf_lists.append(y) + if exf is not None: + filters_list = exf.split("|") + for x in filters_list: + y = x.split(" or ") + exf_lists.append(y) + else: + inf = None + exf = None + cmd = None + stv = False + try: + async with AsyncClient( + headers=headers, + follow_redirects=True, + timeout=60, + verify=False, + ) as client: + res = await client.get(feed_link) + html = res.text + rss_d = feed_parse(html) + last_title = rss_d.entries[0]["title"] + if rss_d.entries[0].get("size"): + size = int(rss_d.entries[0]["size"]) + elif rss_d.entries[0].get("summary"): + summary = rss_d.entries[0]["summary"] + matches = size_regex.findall(summary) + sizes = [match[0] for match in matches] + size = get_size_bytes(sizes[0]) + else: + size = 0 + msg += "Subscribed!" + msg += ( + f"\nTitle: {title}\nFeed Url: {feed_link}" + ) + msg += f"\nlatest record for {rss_d.feed.title}:" + msg += f"\nName: {last_title.replace('>', '').replace('<', '')}" + try: + last_link = rss_d.entries[0]["links"][1]["href"] + except IndexError: + last_link = rss_d.entries[0]["link"] + msg += f"\nLink: {last_link}" + if size: + msg += f"\nSize: {get_readable_file_size(size)}" + msg += f"\nCommand: {cmd}" + msg += f"\nFilters:-\ninf: {inf}\nexf: {exf}\nsensitive: {stv}" + async with rss_dict_lock: + if rss_dict.get(user_id, False): + rss_dict[user_id][title] = { + "link": feed_link, + "last_feed": last_link, + "last_title": last_title, + "inf": inf_lists, + "exf": exf_lists, + "paused": False, + "command": cmd, + "sensitive": stv, + "tag": tag, + } + else: + rss_dict[user_id] = { + title: { + "link": feed_link, + "last_feed": last_link, + "last_title": last_title, + "inf": inf_lists, + "exf": exf_lists, + "paused": False, + "command": cmd, + "sensitive": stv, + "tag": tag, + }, + } + LOGGER.info( + f"Rss Feed Added: id: {user_id} - title: {title} - link: {feed_link} - c: {cmd} - inf: {inf} - exf: {exf} - stv {stv}", + ) + except (IndexError, AttributeError) as e: + emsg = f"The link: {feed_link} doesn't seem to be a RSS feed or it's region-blocked!" + await send_message(message, emsg + "\nError: " + str(e)) + except Exception as e: + await send_message(message, str(e)) + if msg: + await database.rss_update(user_id) + await send_message(message, msg) + is_sudo = await CustomFilters.sudo("", message) + if scheduler.state == 2: + scheduler.resume() + elif is_sudo and not scheduler.running: + add_job() + scheduler.start() + await update_rss_menu(pre_event) + + +async def get_user_id(title): + async with rss_dict_lock: + return next( + ( + (True, user_id) + for user_id, feed in rss_dict.items() + if feed["title"] == title + ), + (False, False), + ) + + +@new_task +async def rss_update(_, message, pre_event, state): + user_id = message.from_user.id + handler_dict[user_id] = False + titles = message.text.split() + is_sudo = await CustomFilters.sudo("", message) + updated = [] + for title in titles: + title = title.strip() + if not (res := rss_dict[user_id].get(title, False)): + if is_sudo: + res, user_id = await get_user_id(title) + if not res: + user_id = message.from_user.id + await send_message(message, f"{title} not found!") + continue + istate = rss_dict[user_id][title].get("paused", False) + if (istate and state == "pause") or (not istate and state == "resume"): + await send_message(message, f"{title} already {state}d!") + continue + async with rss_dict_lock: + updated.append(title) + if state == "unsubscribe": + del rss_dict[user_id][title] + elif state == "pause": + rss_dict[user_id][title]["paused"] = True + elif state == "resume": + rss_dict[user_id][title]["paused"] = False + if state == "resume": + if scheduler.state == 2: + scheduler.resume() + elif is_sudo and not scheduler.running: + add_job() + scheduler.start() + if is_sudo and Config.DATABASE_URL and user_id != message.from_user.id: + await database.rss_update(user_id) + if not rss_dict[user_id]: + async with rss_dict_lock: + del rss_dict[user_id] + await database.rss_delete(user_id) + if not rss_dict: + await database.trunc_table("rss") + if updated: + LOGGER.info(f"Rss link with Title(s): {updated} has been {state}d!") + await send_message( + message, + f"Rss links with Title(s): {updated} has been {state}d!", + ) + if rss_dict.get(user_id): + await database.rss_update(user_id) + await update_rss_menu(pre_event) + + +async def rss_list(query, start, all_users=False): + user_id = query.from_user.id + buttons = ButtonMaker() + if all_users: + list_feed = f"All subscriptions | Page: {int(start / 5)} " + async with rss_dict_lock: + keysCount = sum(len(v.keys()) for v in rss_dict.values()) + index = 0 + for titles in rss_dict.values(): + for index, (title, data) in enumerate( + list(titles.items())[start : 5 + start], + ): + list_feed += f"\n\nTitle: {title}\n" + list_feed += f"Feed Url: {data['link']}\n" + list_feed += f"Command: {data['command']}\n" + list_feed += f"Inf: {data['inf']}\n" + list_feed += f"Exf: {data['exf']}\n" + list_feed += f"Sensitive: {data.get('sensitive', False)}\n" + list_feed += f"Paused: {data['paused']}\n" + list_feed += f"User: {data['tag'].replace('@', '', 1)}" + index += 1 + if index == 5: + break + else: + list_feed = f"Your subscriptions | Page: {int(start / 5)} " + async with rss_dict_lock: + keysCount = len(rss_dict.get(user_id, {}).keys()) + for title, data in list(rss_dict[user_id].items())[start : 5 + start]: + list_feed += f"\n\nTitle: {title}\nFeed Url: {data['link']}\n" + list_feed += f"Command: {data['command']}\n" + list_feed += f"Inf: {data['inf']}\n" + list_feed += f"Exf: {data['exf']}\n" + list_feed += f"Sensitive: {data.get('sensitive', False)}\n" + list_feed += f"Paused: {data['paused']}\n" + buttons.data_button("Back", f"rss back {user_id}") + buttons.data_button("Close", f"rss close {user_id}") + if keysCount > 5: + for x in range(0, keysCount, 5): + buttons.data_button( + f"{int(x / 5)}", + f"rss list {user_id} {x}", + position="footer", + ) + button = buttons.build_menu(2) + if query.message.text.html == list_feed: + return + await edit_message(query.message, list_feed, button) + + +@new_task +async def rss_get(_, message, pre_event): + user_id = message.from_user.id + handler_dict[user_id] = False + args = message.text.split() + if len(args) < 2: + await send_message( + message, + f"{args}. Wrong Input format. You should add number of the items you want to get. Read help message before adding new subcription!", + ) + await update_rss_menu(pre_event) + return + try: + title = args[0] + count = int(args[1]) + data = rss_dict[user_id].get(title, False) + if data and count > 0: + try: + msg = await send_message( + message, + f"Getting the last {count} item(s) from {title}", + ) + async with AsyncClient( + headers=headers, + follow_redirects=True, + timeout=60, + verify=False, + ) as client: + res = await client.get(data["link"]) + html = res.text + rss_d = feed_parse(html) + item_info = "" + for item_num in range(count): + try: + link = rss_d.entries[item_num]["links"][1]["href"] + except IndexError: + link = rss_d.entries[item_num]["link"] + item_info += f"Name: {rss_d.entries[item_num]['title'].replace('>', '').replace('<', '')}\n" + item_info += f"Link: {link}\n\n" + item_info_ecd = item_info.encode() + if len(item_info_ecd) > 4000: + with BytesIO(item_info_ecd) as out_file: + out_file.name = f"rssGet {title} items_no. {count}.txt" + await send_file(message, out_file) + await delete_message(msg) + else: + await edit_message(msg, item_info) + except IndexError as e: + LOGGER.error(str(e)) + await edit_message( + msg, + "Parse depth exceeded. Try again with a lower value.", + ) + except Exception as e: + LOGGER.error(str(e)) + await edit_message(msg, str(e)) + else: + await send_message(message, "Enter a valid title. Title not found!") + except Exception as e: + LOGGER.error(str(e)) + await send_message(message, f"Enter a valid value!. {e}") + await update_rss_menu(pre_event) + + +@new_task +async def rss_edit(_, message, pre_event): + user_id = message.from_user.id + handler_dict[user_id] = False + items = message.text.split("\n") + updated = False + for item in items: + args = item.split() + title = args[0].strip() + if len(args) < 2: + await send_message( + message, + f"{item}. Wrong Input format. Read help message before editing!", + ) + continue + if not rss_dict[user_id].get(title, False): + await send_message(message, "Enter a valid title. Title not found!") + continue + updated = True + inf_lists = [] + exf_lists = [] + arg_base = {"-c": None, "-inf": None, "-exf": None, "-stv": None} + arg_parser(args[1:], arg_base) + cmd = arg_base["-c"] + inf = arg_base["-inf"] + exf = arg_base["-exf"] + stv = arg_base["-stv"] + async with rss_dict_lock: + if stv is not None: + stv = stv.lower() == "true" + rss_dict[user_id][title]["sensitive"] = stv + if cmd is not None: + if cmd.lower() == "none": + cmd = None + rss_dict[user_id][title]["command"] = cmd + if inf is not None: + if inf.lower() != "none": + filters_list = inf.split("|") + for x in filters_list: + y = x.split(" or ") + inf_lists.append(y) + rss_dict[user_id][title]["inf"] = inf_lists + if exf is not None: + if exf.lower() != "none": + filters_list = exf.split("|") + for x in filters_list: + y = x.split(" or ") + exf_lists.append(y) + rss_dict[user_id][title]["exf"] = exf_lists + if updated: + await database.rss_update(user_id) + await update_rss_menu(pre_event) + + +@new_task +async def rss_delete(_, message, pre_event): + handler_dict[message.from_user.id] = False + users = message.text.split() + for user in users: + user = int(user) + async with rss_dict_lock: + del rss_dict[user] + await database.rss_delete(user) + await update_rss_menu(pre_event) + + +async def event_handler(client, query, pfunc): + user_id = query.from_user.id + handler_dict[user_id] = True + start_time = time() + + async def event_filter(_, __, event): + user = event.from_user or event.sender_chat + return bool( + user.id == user_id + and event.chat.id == query.message.chat.id + and event.text, + ) + + handler = client.add_handler( + MessageHandler(pfunc, create(event_filter)), + group=-1, + ) + while handler_dict[user_id]: + await sleep(0.5) + if time() - start_time > 60: + handler_dict[user_id] = False + await update_rss_menu(query) + client.remove_handler(*handler) + + +@new_task +async def rss_listener(client, query): + user_id = query.from_user.id + message = query.message + data = query.data.split() + if int(data[2]) != user_id and not await CustomFilters.sudo("", query): + await query.answer( + text="You don't have permission to use these buttons!", + show_alert=True, + ) + elif data[1] == "close": + await query.answer() + handler_dict[user_id] = False + await delete_message(message.reply_to_message) + await delete_message(message) + elif data[1] == "back": + await query.answer() + handler_dict[user_id] = False + await update_rss_menu(query) + elif data[1] == "sub": + await query.answer() + handler_dict[user_id] = False + buttons = ButtonMaker() + buttons.data_button("Back", f"rss back {user_id}") + buttons.data_button("Close", f"rss close {user_id}") + button = buttons.build_menu(2) + await edit_message(message, RSS_HELP_MESSAGE, button) + pfunc = partial(rss_sub, pre_event=query) + await event_handler(client, query, pfunc) + elif data[1] == "list": + handler_dict[user_id] = False + if len(rss_dict.get(int(data[2]), {})) == 0: + await query.answer(text="No subscriptions!", show_alert=True) + else: + await query.answer() + start = int(data[3]) + await rss_list(query, start) + elif data[1] == "get": + handler_dict[user_id] = False + if len(rss_dict.get(int(data[2]), {})) == 0: + await query.answer(text="No subscriptions!", show_alert=True) + else: + await query.answer() + buttons = ButtonMaker() + buttons.data_button("Back", f"rss back {user_id}") + buttons.data_button("Close", f"rss close {user_id}") + button = buttons.build_menu(2) + await edit_message( + message, + "Send one title with value separated by space get last X items.\nTitle Value\nTimeout: 60 sec.", + button, + ) + pfunc = partial(rss_get, pre_event=query) + await event_handler(client, query, pfunc) + elif data[1] in ["unsubscribe", "pause", "resume"]: + handler_dict[user_id] = False + if len(rss_dict.get(int(data[2]), {})) == 0: + await query.answer(text="No subscriptions!", show_alert=True) + else: + await query.answer() + buttons = ButtonMaker() + buttons.data_button("Back", f"rss back {user_id}") + if data[1] == "pause": + buttons.data_button("Pause AllMyFeeds", f"rss uallpause {user_id}") + elif data[1] == "resume": + buttons.data_button("Resume AllMyFeeds", f"rss uallresume {user_id}") + elif data[1] == "unsubscribe": + buttons.data_button("Unsub AllMyFeeds", f"rss uallunsub {user_id}") + buttons.data_button("Close", f"rss close {user_id}") + button = buttons.build_menu(2) + await edit_message( + message, + f"Send one or more rss titles separated by space to {data[1]}.\nTimeout: 60 sec.", + button, + ) + pfunc = partial(rss_update, pre_event=query, state=data[1]) + await event_handler(client, query, pfunc) + elif data[1] == "edit": + handler_dict[user_id] = False + if len(rss_dict.get(int(data[2]), {})) == 0: + await query.answer(text="No subscriptions!", show_alert=True) + else: + await query.answer() + buttons = ButtonMaker() + buttons.data_button("Back", f"rss back {user_id}") + buttons.data_button("Close", f"rss close {user_id}") + button = buttons.build_menu(2) + msg = """Send one or more rss titles with new filters or command separated by new line. +Examples: +Title1 -c mirror -up remote:path/subdir -exf none -inf 1080 or 720 -stv true +Title2 -c none -inf none -stv false +Title3 -c mirror -rcf xxx -up xxx -z pswd -stv false +Note: Only what you provide will be edited, the rest will be the same like example 2: exf will stay same as it is. +Timeout: 60 sec. Argument -c for command and arguments + """ + await edit_message(message, msg, button) + pfunc = partial(rss_edit, pre_event=query) + await event_handler(client, query, pfunc) + elif data[1].startswith("uall"): + handler_dict[user_id] = False + if len(rss_dict.get(int(data[2]), {})) == 0: + await query.answer(text="No subscriptions!", show_alert=True) + return + await query.answer() + if data[1].endswith("unsub"): + async with rss_dict_lock: + del rss_dict[int(data[2])] + await database.rss_delete(int(data[2])) + await update_rss_menu(query) + elif data[1].endswith("pause"): + async with rss_dict_lock: + for title in list(rss_dict[int(data[2])].keys()): + rss_dict[int(data[2])][title]["paused"] = True + await database.rss_update(int(data[2])) + elif data[1].endswith("resume"): + async with rss_dict_lock: + for title in list(rss_dict[int(data[2])].keys()): + rss_dict[int(data[2])][title]["paused"] = False + if scheduler.state == 2: + scheduler.resume() + await database.rss_update(int(data[2])) + await update_rss_menu(query) + elif data[1].startswith("all"): + if len(rss_dict) == 0: + await query.answer(text="No subscriptions!", show_alert=True) + return + await query.answer() + if data[1].endswith("unsub"): + async with rss_dict_lock: + rss_dict.clear() + await database.trunc_table("rss") + await update_rss_menu(query) + elif data[1].endswith("pause"): + async with rss_dict_lock: + for user in list(rss_dict.keys()): + for title in list(rss_dict[user].keys()): + rss_dict[int(data[2])][title]["paused"] = True + if scheduler.running: + scheduler.pause() + await database.rss_update_all() + elif data[1].endswith("resume"): + async with rss_dict_lock: + for user in list(rss_dict.keys()): + for title in list(rss_dict[user].keys()): + rss_dict[int(data[2])][title]["paused"] = False + if scheduler.state == 2: + scheduler.resume() + elif not scheduler.running: + add_job() + scheduler.start() + await database.rss_update_all() + elif data[1] == "deluser": + if len(rss_dict) == 0: + await query.answer(text="No subscriptions!", show_alert=True) + else: + await query.answer() + buttons = ButtonMaker() + buttons.data_button("Back", f"rss back {user_id}") + buttons.data_button("Close", f"rss close {user_id}") + button = buttons.build_menu(2) + msg = "Send one or more user_id separated by space to delete their resources.\nTimeout: 60 sec." + await edit_message(message, msg, button) + pfunc = partial(rss_delete, pre_event=query) + await event_handler(client, query, pfunc) + elif data[1] == "listall": + if not rss_dict: + await query.answer(text="No subscriptions!", show_alert=True) + else: + await query.answer() + start = int(data[3]) + await rss_list(query, start, all_users=True) + elif data[1] == "shutdown": + if scheduler.running: + await query.answer() + scheduler.shutdown(wait=False) + await sleep(0.5) + await update_rss_menu(query) + else: + await query.answer(text="Already Stopped!", show_alert=True) + elif data[1] == "start": + if not scheduler.running: + await query.answer() + add_job() + scheduler.start() + await update_rss_menu(query) + else: + await query.answer(text="Already Running!", show_alert=True) + + +async def rss_monitor(): + chat = Config.RSS_CHAT + if not chat: + LOGGER.warning("RSS_CHAT not added! Shutting down rss scheduler...") + scheduler.shutdown(wait=False) + return + if len(rss_dict) == 0: + scheduler.pause() + return + all_paused = True + rss_topic_id = rss_chat_id = None + if isinstance(chat, int): + rss_chat_id = chat + elif "|" in chat: + rss_chat_id, rss_topic_id = [ + int(x) if x.lstrip("-").isdigit() else x for x in chat.split("|", 1) + ] + elif chat.lstrip("-").isdigit(): + rss_chat_id = int(chat) + for user, items in list(rss_dict.items()): + for title, data in items.items(): + try: + if data["paused"]: + continue + tries = 0 + while True: + try: + async with AsyncClient( + headers=headers, + follow_redirects=True, + timeout=60, + verify=False, + ) as client: + res = await client.get(data["link"]) + html = res.text + break + except Exception: + tries += 1 + if tries > 3: + raise + continue + rss_d = feed_parse(html) + try: + last_link = rss_d.entries[0]["links"][1]["href"] + except IndexError: + last_link = rss_d.entries[0]["link"] + finally: + all_paused = False + last_title = rss_d.entries[0]["title"] + if ( + data["last_feed"] == last_link + or data["last_title"] == last_title + ): + continue + feed_count = 0 + while True: + try: + await sleep(10) + except Exception: + raise RssShutdownException("Rss Monitor Stopped!") + try: + item_title = rss_d.entries[feed_count]["title"] + try: + url = rss_d.entries[feed_count]["links"][1]["href"] + except IndexError: + url = rss_d.entries[feed_count]["link"] + if ( + data["last_feed"] == url + or data["last_title"] == item_title + ): + break + if rss_d.entries[feed_count].get("size"): + size = int(rss_d.entries[feed_count]["size"]) + elif rss_d.entries[feed_count].get("summary"): + summary = rss_d.entries[feed_count]["summary"] + matches = size_regex.findall(summary) + sizes = [match[0] for match in matches] + size = get_size_bytes(sizes[0]) + else: + size = 0 + except IndexError: + LOGGER.warning( + f"Reached Max index no. {feed_count} for this feed: {title}. Maybe you need to use less RSS_DELAY to not miss some torrents", + ) + break + parse = True + for flist in data["inf"]: + if ( + data.get("sensitive", False) + and all( + x.lower() not in item_title.lower() for x in flist + ) + ) or ( + not data.get("sensitive", False) + and all(x not in item_title for x in flist) + ): + parse = False + feed_count += 1 + break + if not parse: + continue + for flist in data["exf"]: + if ( + data.get("sensitive", False) + and any(x.lower() in item_title.lower() for x in flist) + ) or ( + not data.get("sensitive", False) + and any(x in item_title for x in flist) + ): + parse = False + feed_count += 1 + break + if not parse: + continue + if command := data["command"]: + if ( + size + and Config.RSS_SIZE_LIMIT + and size > Config.RSS_SIZE_LIMIT + ): + feed_count += 1 + continue + cmd = command.split(maxsplit=1) + cmd.insert(1, url) + feed_msg = " ".join(cmd) + if not feed_msg.startswith("/"): + feed_msg = f"/{feed_msg}" + else: + feed_msg = f"Name: {item_title.replace('>', '').replace('<', '')}" + feed_msg += f"\n\nLink: {url}" + if size: + feed_msg += ( + f"\nSize: {get_readable_file_size(size)}" + ) + feed_msg += f"\nTag: {data['tag']} {user}" + await send_rss(feed_msg, rss_chat_id, rss_topic_id) + feed_count += 1 + async with rss_dict_lock: + if user not in rss_dict or not rss_dict[user].get(title, False): + continue + rss_dict[user][title].update( + {"last_feed": last_link, "last_title": last_title}, + ) + await database.rss_update(user) + LOGGER.info(f"Feed Name: {title}") + LOGGER.info(f"Last item: {last_link}") + except RssShutdownException as ex: + LOGGER.info(ex) + break + except Exception as e: + LOGGER.error(f"{e} - Feed Name: {title} - Feed Link: {data['link']}") + continue + if all_paused: + scheduler.pause() + + +def add_job(): + scheduler.add_job( + rss_monitor, + trigger=IntervalTrigger(seconds=Config.RSS_DELAY), + id="0", + name="RSS", + misfire_grace_time=15, + max_instances=1, + next_run_time=datetime.now() + timedelta(seconds=20), + replace_existing=True, + ) + + +add_job() +scheduler.start() diff --git a/bot/modules/search.py b/bot/modules/search.py new file mode 100644 index 000000000..fb5cc927a --- /dev/null +++ b/bot/modules/search.py @@ -0,0 +1,324 @@ +import contextlib +from html import escape +from urllib.parse import quote + +from httpx import AsyncClient + +from bot import LOGGER, xnox_client +from bot.core.config_manager import Config +from bot.helper.ext_utils.bot_utils import new_task, sync_to_async +from bot.helper.ext_utils.status_utils import get_readable_file_size +from bot.helper.ext_utils.telegraph_helper import telegraph +from bot.helper.telegram_helper.button_build import ButtonMaker +from bot.helper.telegram_helper.message_utils import edit_message, send_message + +PLUGINS = [] +SITES = None +TELEGRAPH_LIMIT = 300 + + +async def initiate_search_tools(): + qb_plugins = await sync_to_async(xnox_client.search_plugins) + if Config.SEARCH_PLUGINS: + globals()["PLUGINS"] = [] + if qb_plugins: + names = [plugin["name"] for plugin in qb_plugins] + await sync_to_async( + xnox_client.search_uninstall_plugin, + names=names, + ) + await sync_to_async( + xnox_client.search_install_plugin, + Config.SEARCH_PLUGINS, + ) + elif qb_plugins: + for plugin in qb_plugins: + await sync_to_async( + xnox_client.search_uninstall_plugin, + names=plugin["name"], + ) + globals()["PLUGINS"] = [] + + if Config.SEARCH_API_LINK: + global SITES + try: + async with AsyncClient() as client: + response = await client.get(f"{Config.SEARCH_API_LINK}/api/v1/sites") + data = response.json() + SITES = { + str(site): str(site).capitalize() for site in data["supported_sites"] + } + SITES["all"] = "All" + except Exception as e: + LOGGER.error( + f"{e} Can't fetching sites from SEARCH_API_LINK make sure use latest version of API", + ) + SITES = None + + +async def search(key, site, message, method): + if method.startswith("api"): + if method == "apisearch": + LOGGER.info(f"API Searching: {key} from {site}") + if site == "all": + api = f"{Config.SEARCH_API_LINK}/api/v1/all/search?query={key}&limit={Config.SEARCH_LIMIT}" + else: + api = f"{Config.SEARCH_API_LINK}/api/v1/search?site={site}&query={key}&limit={Config.SEARCH_LIMIT}" + elif method == "apitrend": + LOGGER.info(f"API Trending from {site}") + if site == "all": + api = f"{Config.SEARCH_API_LINK}/api/v1/all/trending?limit={Config.SEARCH_LIMIT}" + else: + api = f"{Config.SEARCH_API_LINK}/api/v1/trending?site={site}&limit={Config.SEARCH_LIMIT}" + elif method == "apirecent": + LOGGER.info(f"API Recent from {site}") + if site == "all": + api = f"{Config.SEARCH_API_LINK}/api/v1/all/recent?limit={Config.SEARCH_LIMIT}" + else: + api = f"{Config.SEARCH_API_LINK}/api/v1/recent?site={site}&limit={Config.SEARCH_LIMIT}" + try: + async with AsyncClient() as client: + response = await client.get(api) + search_results = response.json() + if "error" in search_results or search_results["total"] == 0: + await edit_message( + message, + f"No result found for {key}\nTorrent Site:- {SITES.get(site)}", + ) + return + msg = f"Found {min(search_results['total'], TELEGRAPH_LIMIT)}" + if method == "apitrend": + msg += f" trending result(s)\nTorrent Site:- {SITES.get(site)}" + elif method == "apirecent": + msg += f" recent result(s)\nTorrent Site:- {SITES.get(site)}" + else: + msg += f" result(s) for {key}\nTorrent Site:- {SITES.get(site)}" + search_results = search_results["data"] + except Exception as e: + await edit_message(message, str(e)) + return + else: + LOGGER.info(f"PLUGINS Searching: {key} from {site}") + search = await sync_to_async( + xnox_client.search_start, + pattern=key, + plugins=site, + category="all", + ) + search_id = search.id + while True: + result_status = await sync_to_async( + xnox_client.search_status, + search_id=search_id, + ) + status = result_status[0].status + if status != "Running": + break + dict_search_results = await sync_to_async( + xnox_client.search_results, + search_id=search_id, + limit=TELEGRAPH_LIMIT, + ) + search_results = dict_search_results.results + total_results = dict_search_results.total + if total_results == 0: + await edit_message( + message, + f"No result found for {key}\nTorrent Site:- {site.capitalize()}", + ) + return + msg = f"Found {min(total_results, TELEGRAPH_LIMIT)}" + msg += f" result(s) for {key}\nTorrent Site:- {site.capitalize()}" + await sync_to_async(xnox_client.search_delete, search_id=search_id) + link = await get_result(search_results, key, message, method) + buttons = ButtonMaker() + buttons.url_button("🔎 VIEW", link) + button = buttons.build_menu(1) + await edit_message(message, msg, button) + + +async def get_result(search_results, key, message, method): + telegraph_content = [] + if method == "apirecent": + msg = "

API Recent Results

" + elif method == "apisearch": + msg = f"

API Search Result(s) For {key}

" + elif method == "apitrend": + msg = "

API Trending Results

" + else: + msg = f"

PLUGINS Search Result(s) For {key}

" + for index, result in enumerate(search_results, start=1): + if method.startswith("api"): + try: + if "name" in result: + msg += f"{escape(result['name'])}
" + if "torrents" in result: + for subres in result["torrents"]: + msg += f"Quality: {subres['quality']} | Type: {subres['type']} | " + msg += f"Size: {subres['size']}
" + if "torrent" in subres: + msg += ( + f"Direct Link
" + ) + elif "magnet" in subres: + msg += "Share Magnet to " + msg += f"Telegram
" + msg += "
" + else: + msg += f"Size: {result['size']}
" + with contextlib.suppress(Exception): + msg += f"Seeders: {result['seeders']} | Leechers: {result['leechers']}
" + if "torrent" in result: + msg += ( + f"Direct Link

" + ) + elif "magnet" in result: + msg += "Share Magnet to " + msg += f"Telegram

" + else: + msg += "
" + except Exception: + continue + else: + msg += f"{escape(result.fileName)}
" + msg += f"Size: {get_readable_file_size(result.fileSize)}
" + msg += f"Seeders: {result.nbSeeders} | Leechers: {result.nbLeechers}
" + link = result.fileUrl + if link.startswith("magnet:"): + msg += f"Share Magnet to Telegram

" + else: + msg += f"Direct Link

" + + if len(msg.encode("utf-8")) > 39000: + telegraph_content.append(msg) + msg = "" + + if index == TELEGRAPH_LIMIT: + break + + if msg != "": + telegraph_content.append(msg) + + await edit_message( + message, + f"Creating {len(telegraph_content)} Telegraph pages.", + ) + path = [ + ( + await telegraph.create_page( + title="Mirror-leech-bot Torrent Search", + content=content, + ) + )["path"] + for content in telegraph_content + ] + if len(path) > 1: + await edit_message( + message, + f"Editing {len(telegraph_content)} Telegraph pages.", + ) + await telegraph.edit_telegraph(path, telegraph_content) + return f"https://telegra.ph/{path[0]}" + + +def api_buttons(user_id, method): + buttons = ButtonMaker() + for data, name in SITES.items(): + buttons.data_button(name, f"torser {user_id} {data} {method}") + buttons.data_button("Cancel", f"torser {user_id} cancel") + return buttons.build_menu(2) + + +async def plugin_buttons(user_id): + buttons = ButtonMaker() + if not PLUGINS: + pl = await sync_to_async(xnox_client.search_plugins) + for name in pl: + PLUGINS.append(name["name"]) + for siteName in PLUGINS: + buttons.data_button( + siteName.capitalize(), + f"torser {user_id} {siteName} plugin", + ) + buttons.data_button("All", f"torser {user_id} all plugin") + buttons.data_button("Cancel", f"torser {user_id} cancel") + return buttons.build_menu(2) + + +@new_task +async def torrent_search(_, message): + user_id = message.from_user.id + buttons = ButtonMaker() + key = message.text.split() + if SITES is None and not Config.SEARCH_PLUGINS: + await send_message( + message, + "No API link or search PLUGINS added for this function", + ) + elif len(key) == 1 and SITES is None: + await send_message(message, "Send a search key along with command") + elif len(key) == 1: + buttons.data_button("Trending", f"torser {user_id} apitrend") + buttons.data_button("Recent", f"torser {user_id} apirecent") + buttons.data_button("Cancel", f"torser {user_id} cancel") + button = buttons.build_menu(2) + await send_message(message, "Send a search key along with command", button) + elif SITES is not None and Config.SEARCH_PLUGINS: + buttons.data_button("Api", f"torser {user_id} apisearch") + buttons.data_button("Plugins", f"torser {user_id} plugin") + buttons.data_button("Cancel", f"torser {user_id} cancel") + button = buttons.build_menu(2) + await send_message(message, "Choose tool to search:", button) + elif SITES is not None: + button = api_buttons(user_id, "apisearch") + await send_message(message, "Choose site to search | API:", button) + else: + button = await plugin_buttons(user_id) + await send_message(message, "Choose site to search | Plugins:", button) + + +@new_task +async def torrent_search_update(_, query): + user_id = query.from_user.id + message = query.message + key = message.reply_to_message.text.split(maxsplit=1) + key = key[1].strip() if len(key) > 1 else None + data = query.data.split() + if user_id != int(data[1]): + await query.answer("Not Yours!", show_alert=True) + elif data[2].startswith("api"): + await query.answer() + button = api_buttons(user_id, data[2]) + await edit_message(message, "Choose site:", button) + elif data[2] == "plugin": + await query.answer() + button = await plugin_buttons(user_id) + await edit_message(message, "Choose site:", button) + elif data[2] != "cancel": + await query.answer() + site = data[2] + method = data[3] + if method.startswith("api"): + if key is None: + if method == "apirecent": + endpoint = "Recent" + elif method == "apitrend": + endpoint = "Trending" + await edit_message( + message, + f"Listing {endpoint} Items...\nTorrent Site:- {SITES.get(site)}", + ) + else: + await edit_message( + message, + f"Searching for {key}\nTorrent Site:- {SITES.get(site)}", + ) + else: + await edit_message( + message, + f"Searching for {key}\nTorrent Site:- {site.capitalize()}", + ) + await search(key, site, message, method) + else: + await query.answer() + await edit_message(message, "Search has been canceled!") diff --git a/bot/modules/services.py b/bot/modules/services.py new file mode 100644 index 000000000..f9611181f --- /dev/null +++ b/bot/modules/services.py @@ -0,0 +1,135 @@ +from html import escape +from time import time +from uuid import uuid4 + +from aiofiles import open as aiopen + +from bot import LOGGER, user_data +from bot.core.config_manager import Config +from bot.helper.ext_utils.bot_utils import new_task +from bot.helper.ext_utils.db_handler import database +from bot.helper.ext_utils.status_utils import get_readable_time +from bot.helper.telegram_helper.bot_commands import BotCommands +from bot.helper.telegram_helper.button_build import ButtonMaker +from bot.helper.telegram_helper.filters import CustomFilters +from bot.helper.telegram_helper.message_utils import ( + delete_message, + edit_message, + five_minute_del, + send_file, + send_message, +) + + +@new_task +async def start(client, message): + if len(message.command) > 1 and message.command[1] == "private": + await delete_message(message) + elif len(message.command) > 1 and len(message.command[1]) == 36: + userid = message.from_user.id + input_token = message.command[1] + stored_token = await database.get_user_token(userid) + if stored_token is None: + return await send_message( + message, + "This token is not for you!\n\nPlease generate your own.", + ) + if input_token != stored_token: + return await send_message( + message, + "Invalid token.\n\nPlease generate a new one.", + ) + if userid not in user_data: + return await send_message( + message, + "This token is not yours!\n\nKindly generate your own.", + ) + data = user_data[userid] + if "token" not in data or data["token"] != input_token: + return await send_message( + message, + "This token has already been used!\n\nPlease get a new one.", + ) + token = str(uuid4()) + token_time = time() + data["token"] = token + data["time"] = token_time + user_data[userid].update(data) + await database.update_user_tdata(userid, token, token_time) + msg = "Your token has been successfully generated!\n\n" + msg += f"It will be valid for {get_readable_time(int(Config.TOKEN_TIMEOUT), True)}" + return await send_message(message, msg) + elif await CustomFilters.authorized(client, message): + help_command = f"/{BotCommands.HelpCommand}" + start_string = f"This bot can mirror all your links|files|torrents to Google Drive or any rclone cloud or to telegram.\nType {help_command} to get a list of available commands" + await send_message(message, start_string) + else: + await send_message(message, "You are not a authorized user!") + await database.update_pm_users(message.from_user.id) + return None + + +@new_task +async def ping(_, message): + start_time = int(round(time() * 1000)) + reply = await send_message(message, "Starting Ping") + end_time = int(round(time() * 1000)) + await edit_message(reply, f"{end_time - start_time} ms") + + +@new_task +async def log(_, message): + buttons = ButtonMaker() + buttons.data_button("View log", f"log {message.from_user.id} view") + reply_message = await send_file( + message, + "log.txt", + buttons=buttons.build_menu(1), + ) + await delete_message(message) + await five_minute_del(reply_message) + + +@new_task +async def log_callback(_, query): + message = query.message + user_id = query.from_user.id + data = query.data.split() + if user_id != int(data[1]): + return await query.answer(text="This message not your's!", show_alert=True) + if data[2] == "view": + await query.answer() + async with aiopen("log.txt") as f: + log_file_lines = (await f.read()).splitlines() + + def parseline(line): + try: + return line.split("] ", 1)[1] + except IndexError: + return line + + ind, log_lines = 1, "" + try: + while len(log_lines) <= 3500: + log_lines = parseline(log_file_lines[-ind]) + "\n" + log_lines + if ind == len(log_file_lines): + break + ind += 1 + start_line = "
"
+            end_line = "
" + btn = ButtonMaker() + btn.data_button("Close", f"log {user_id} close") + reply_message = await send_message( + message, + start_line + escape(log_lines) + end_line, + btn.build_menu(1), + ) + await query.edit_message_reply_markup(None) + await delete_message(message) + await five_minute_del(reply_message) + except Exception as err: + LOGGER.error(f"TG Log Display : {err!s}") + else: + await query.answer() + await delete_message(message) + return None diff --git a/bot/modules/shell.py b/bot/modules/shell.py index 99957ad1c..455a97b9e 100644 --- a/bot/modules/shell.py +++ b/bot/modules/shell.py @@ -1,48 +1,30 @@ from io import BytesIO -from pyrogram.filters import command -from pyrogram.handlers import MessageHandler, EditedMessageHandler - -from bot import bot +from bot import LOGGER from bot.helper.ext_utils.bot_utils import cmd_exec, new_task -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.message_utils import sendFile, send_message +from bot.helper.telegram_helper.message_utils import send_file, send_message @new_task -async def shell(_, message): +async def run_shell(_, message): cmd = message.text.split(maxsplit=1) if len(cmd) == 1: - await send_message(message, "No command to execute was provided.") + await send_message(message, "No command to execute was given.") return cmd = cmd[1] stdout, stderr, _ = await cmd_exec(cmd, shell=True) reply = "" - if len(stdout) != 0: - reply += f"Stdout\n
{stdout}
\n" - + reply += f"*Stdout*\n{stdout}\n" + LOGGER.info(f"Shell - {cmd} - {stdout}") if len(stderr) != 0: - reply += f"Stderr\n
{stderr}
" - + reply += f"*Stderr*\n{stderr}" + LOGGER.error(f"Shell - {cmd} - {stderr}") if len(reply) > 3000: with BytesIO(str.encode(reply)) as out_file: out_file.name = "shell_output.txt" - await sendFile(message, out_file) + await send_file(message, out_file) elif len(reply) != 0: await send_message(message, reply) else: await send_message(message, "No Reply") - - -bot.add_handler( - MessageHandler( - shell, filters=command(BotCommands.ShellCommand) & CustomFilters.sudo - ) -) -bot.add_handler( - EditedMessageHandler( - shell, filters=command(BotCommands.ShellCommand) & CustomFilters.sudo - ) -) diff --git a/bot/modules/speedtest.py b/bot/modules/speedtest.py index 53d5dce67..7512850bd 100644 --- a/bot/modules/speedtest.py +++ b/bot/modules/speedtest.py @@ -1,15 +1,13 @@ from speedtest import Speedtest -from pyrogram.filters import command -from pyrogram.handlers import MessageHandler -from bot import LOGGER, bot -from bot.helper.ext_utils.bot_utils import new_task, get_readable_file_size -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands +from bot import LOGGER +from bot.core.aeon_client import TgClient +from bot.helper.ext_utils.bot_utils import new_task +from bot.helper.ext_utils.status_utils import get_readable_file_size from bot.helper.telegram_helper.message_utils import ( + delete_message, edit_message, send_message, - delete_message, ) @@ -24,7 +22,7 @@ def get_speedtest_results(): test.upload() return test.results - result = await bot.loop.run_in_executor(None, get_speedtest_results) + result = await TgClient.bot.loop.run_in_executor(None, get_speedtest_results) if not result: await edit_message(speed, "Speedtest failed to complete.") @@ -42,11 +40,3 @@ def get_speedtest_results(): except Exception as e: LOGGER.error(str(e)) await edit_message(speed, string_speed) - - -bot.add_handler( - MessageHandler( - speedtest, - filters=command(BotCommands.SpeedCommand) & CustomFilters.authorized, - ) -) diff --git a/bot/modules/stats.py b/bot/modules/stats.py new file mode 100644 index 000000000..617e5bf99 --- /dev/null +++ b/bot/modules/stats.py @@ -0,0 +1,107 @@ +from asyncio import gather +from re import search as research +from time import time + +from aiofiles.os import path as aiopath +from psutil import ( + boot_time, + cpu_count, + cpu_percent, + disk_usage, + net_io_counters, + swap_memory, + virtual_memory, +) + +from bot import bot_start_time +from bot.helper.ext_utils.bot_utils import cmd_exec, new_task +from bot.helper.ext_utils.status_utils import ( + get_readable_file_size, + get_readable_time, +) +from bot.helper.telegram_helper.message_utils import ( + delete_message, + one_minute_del, + send_message, +) + +commands = { + "aria2": (["xria", "--version"], r"aria2 version ([\d.]+)"), + "qBittorrent": (["xnox", "--version"], r"qBittorrent v([\d.]+)"), + "python": (["python3", "--version"], r"Python ([\d.]+)"), + "rclone": (["xone", "--version"], r"rclone v([\d.]+)"), + "yt-dlp": (["yt-dlp", "--version"], r"([\d.]+)"), + "ffmpeg": (["xtra", "-version"], r"ffmpeg version (n[\d.]+)"), + "7z": (["7z", "i"], r"7-Zip ([\d.]+)"), +} + + +@new_task +async def bot_stats(_, message): + total, used, free, disk = disk_usage("/") + swap = swap_memory() + memory = virtual_memory() + stats = f""" +Commit Date: {commands["commit"]} + +Bot Uptime: {get_readable_time(time() - bot_start_time)} +OS Uptime: {get_readable_time(time() - boot_time())} + +Total Disk Space: {get_readable_file_size(total)} +Used: {get_readable_file_size(used)} | Free: {get_readable_file_size(free)} + +Upload: {get_readable_file_size(net_io_counters().bytes_sent)} +Download: {get_readable_file_size(net_io_counters().bytes_recv)} + +CPU: {cpu_percent(interval=0.5)}% +RAM: {memory.percent}% +DISK: {disk}% + +Physical Cores: {cpu_count(logical=False)} +Total Cores: {cpu_count(logical=True)} +SWAP: {get_readable_file_size(swap.total)} | Used: {swap.percent}% + +Memory Total: {get_readable_file_size(memory.total)} +Memory Free: {get_readable_file_size(memory.available)} +Memory Used: {get_readable_file_size(memory.used)} + +python: {commands["python"]} +aria2: {commands["aria2"]} +qBittorrent: {commands["qBittorrent"]} +rclone: {commands["rclone"]} +yt-dlp: {commands["yt-dlp"]} +ffmpeg: {commands["ffmpeg"]} +7z: {commands["7z"]} +""" + reply_message = await send_message(message, stats) + await delete_message(message) + await one_minute_del(reply_message) + + +async def get_version_async(command, regex): + try: + out, err, code = await cmd_exec(command) + if code != 0: + return f"Error: {err}" + match = research(regex, out) + return match.group(1) if match else "Version not found" + except Exception as e: + return f"Exception: {e!s}" + + +@new_task +async def get_packages_version(): + tasks = [ + get_version_async(command, regex) for command, regex in commands.values() + ] + versions = await gather(*tasks) + commands.update(dict(zip(commands.keys(), versions, strict=False))) + if await aiopath.exists(".git"): + last_commit = await cmd_exec( + "git log -1 --date=short --pretty=format:'%cd From %cr'", + True, + ) + last_commit = last_commit[0] + else: + last_commit = "No UPSTREAM_REPO" + commands["commit"] = last_commit diff --git a/bot/modules/status.py b/bot/modules/status.py index ecb5dd256..6c5c5a890 100644 --- a/bot/modules/status.py +++ b/bot/modules/status.py @@ -1,74 +1,147 @@ from time import time -from psutil import disk_usage -from pyrogram.filters import regex, command -from pyrogram.handlers import MessageHandler, CallbackQueryHandler +from psutil import cpu_percent, disk_usage, virtual_memory -from bot import ( - Interval, - bot, - download_dict, - bot_start_time, - download_dict_lock, - status_reply_dict_lock, -) -from bot.helper.ext_utils.bot_utils import ( - SetInterval, - new_task, - turn_page, - get_readable_time, +from bot import bot_start_time, intervals, status_dict, task_dict, task_dict_lock +from bot.core.config_manager import Config +from bot.helper.ext_utils.bot_utils import new_task, sync_to_async +from bot.helper.ext_utils.status_utils import ( + MirrorStatus, get_readable_file_size, + get_readable_time, + speed_string_to_bytes, ) -from bot.helper.telegram_helper.filters import CustomFilters from bot.helper.telegram_helper.bot_commands import BotCommands +from bot.helper.telegram_helper.button_build import ButtonMaker from bot.helper.telegram_helper.message_utils import ( - send_message, + auto_delete_message, delete_message, - one_minute_del, - sendStatusMessage, - update_all_messages, + edit_message, + send_message, + send_status_message, + update_status_message, ) @new_task -async def mirror_status(_, message): - async with download_dict_lock: - count = len(download_dict) - +async def task_status(_, message): + async with task_dict_lock: + count = len(task_dict) if count == 0: - current_time = get_readable_time(time() - bot_start_time) - free = get_readable_file_size(disk_usage("/usr/src/app/downloads/").free) - msg = "No downloads are currently in progress.\n" - msg += f"\n• Bot uptime: {current_time}" - msg += f"\n• Free disk space: {free}" - + currentTime = get_readable_time(time() - bot_start_time) + free = get_readable_file_size(disk_usage(Config.DOWNLOAD_DIR).free) + msg = f"No Active Tasks!\nEach user can get status for his tasks by adding me or user_id after cmd: /{BotCommands.StatusCommand} me" + msg += ( + f"\nCPU: {cpu_percent()}% | FREE: {free}" + f"\nRAM: {virtual_memory().percent}% | UPTIME: {currentTime}" + ) reply_message = await send_message(message, msg) - await delete_message(message) - await one_minute_del(reply_message) + await auto_delete_message(message, reply_message) else: - await sendStatusMessage(message) + text = message.text.split() + if len(text) > 1: + user_id = message.from_user.id if text[1] == "me" else int(text[1]) + else: + user_id = 0 + sid = message.chat.id + if obj := intervals["status"].get(sid): + obj.cancel() + del intervals["status"][sid] + await send_status_message(message, user_id) await delete_message(message) - async with status_reply_dict_lock: - if Interval: - Interval[0].cancel() - Interval.clear() - Interval.append(SetInterval(1, update_all_messages)) @new_task async def status_pages(_, query): - await query.answer() data = query.data.split() - if data[1] == "ref": - await update_all_messages(True) - else: - await turn_page(data) + key = int(data[1]) + if data[2] == "ref": + await query.answer() + await update_status_message(key, force=True) + elif data[2] in ["nex", "pre"]: + await query.answer() + async with task_dict_lock: + if data[2] == "nex": + status_dict[key]["page_no"] += status_dict[key]["page_step"] + else: + status_dict[key]["page_no"] -= status_dict[key]["page_step"] + elif data[2] == "ps": + await query.answer() + async with task_dict_lock: + status_dict[key]["page_step"] = int(data[3]) + elif data[2] == "st": + await query.answer() + async with task_dict_lock: + status_dict[key]["status"] = data[3] + await update_status_message(key, force=True) + elif data[2] == "ov": + message = query.message + tasks = { + "Download": 0, + "Upload": 0, + "Seed": 0, + "Archive": 0, + "Extract": 0, + "Split": 0, + "QueueDl": 0, + "QueueUp": 0, + "Clone": 0, + "CheckUp": 0, + "Pause": 0, + "SamVid": 0, + "ConvertMedia": 0, + "FFmpeg": 0, + } + dl_speed = 0 + up_speed = 0 + seed_speed = 0 + async with task_dict_lock: + for download in task_dict.values(): + match await sync_to_async(download.status): + case MirrorStatus.STATUS_DOWNLOAD: + tasks["Download"] += 1 + dl_speed += speed_string_to_bytes(download.speed()) + case MirrorStatus.STATUS_UPLOAD: + tasks["Upload"] += 1 + up_speed += speed_string_to_bytes(download.speed()) + case MirrorStatus.STATUS_SEED: + tasks["Seed"] += 1 + seed_speed += speed_string_to_bytes(download.seed_speed()) + case MirrorStatus.STATUS_ARCHIVE: + tasks["Archive"] += 1 + case MirrorStatus.STATUS_EXTRACT: + tasks["Extract"] += 1 + case MirrorStatus.STATUS_SPLIT: + tasks["Split"] += 1 + case MirrorStatus.STATUS_QUEUEDL: + tasks["QueueDl"] += 1 + case MirrorStatus.STATUS_QUEUEUP: + tasks["QueueUp"] += 1 + case MirrorStatus.STATUS_CLONE: + tasks["Clone"] += 1 + case MirrorStatus.STATUS_CHECK: + tasks["CheckUp"] += 1 + case MirrorStatus.STATUS_PAUSED: + tasks["Pause"] += 1 + case MirrorStatus.STATUS_SAMVID: + tasks["SamVid"] += 1 + case MirrorStatus.STATUS_CONVERT: + tasks["ConvertMedia"] += 1 + case MirrorStatus.STATUS_FFMPEG: + tasks["FFMPEG"] += 1 + case _: + tasks["Download"] += 1 + dl_speed += speed_string_to_bytes(download.speed()) + msg = f"""DL: {tasks["Download"]} | UP: {tasks["Upload"]} | SD: {tasks["Seed"]} | AR: {tasks["Archive"]} +EX: {tasks["Extract"]} | SP: {tasks["Split"]} | QD: {tasks["QueueDl"]} | QU: {tasks["QueueUp"]} +CL: {tasks["Clone"]} | CK: {tasks["CheckUp"]} | PA: {tasks["Pause"]} | SV: {tasks["SamVid"]} +CM: {tasks["ConvertMedia"]} | FF: {tasks["FFmpeg"]} -bot.add_handler( - MessageHandler( - mirror_status, - filters=command(BotCommands.StatusCommand) & CustomFilters.authorized, - ) -) -bot.add_handler(CallbackQueryHandler(status_pages, filters=regex("^status"))) +ODLS: {get_readable_file_size(dl_speed)}/s +OULS: {get_readable_file_size(up_speed)}/s +OSDS: {get_readable_file_size(seed_speed)}/s +""" + button = ButtonMaker() + button.data_button("Back", f"status {data[1]} ref") + await edit_message(message, msg, button.build_menu()) diff --git a/bot/modules/torrent_search.py b/bot/modules/torrent_search.py deleted file mode 100644 index 7e7d8017d..000000000 --- a/bot/modules/torrent_search.py +++ /dev/null @@ -1,367 +0,0 @@ -import contextlib -from html import escape -from urllib.parse import quote - -from aiohttp import ClientSession -from pyrogram.filters import regex, command -from pyrogram.handlers import MessageHandler, CallbackQueryHandler - -from bot import LOGGER, bot, config_dict, xnox_client -from bot.helper.ext_utils.bot_utils import ( - new_task, - new_thread, - sync_to_async, - checking_access, - get_readable_file_size, -) -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.ext_utils.telegraph_helper import telegraph -from bot.helper.telegram_helper.bot_commands import BotCommands -from bot.helper.telegram_helper.button_build import ButtonMaker -from bot.helper.telegram_helper.message_utils import ( - isAdmin, - delete_links, - edit_message, - send_message, - one_minute_del, - five_minute_del, -) - -PLUGINS = [ - "piratebay", - "limetorrents", - "torrentscsv", - "torlock", - "eztv", - "solidtorrents", - "yts_am", - "nyaasi", - "ettv", - "thepiratebay", - "magnetdl", - "uniondht", - "yts", -] -SITES = None -TELEGRAPH_LIMIT = 300 -src_plugins = { - "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/piratebay.py", - "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/limetorrents.py", - "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torrentscsv.py", - "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torlock.py", - "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/eztv.py", - "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/solidtorrents.py", - "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/yts_am.py", - "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/nyaasi.py", - "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/ettv.py", - "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/thepiratebay.py", - "https://raw.githubusercontent.com/nindogo/qbtSearchScripts/master/magnetdl.py", - "https://raw.githubusercontent.com/msagca/qbittorrent_plugins/main/uniondht.py", - "https://raw.githubusercontent.com/khensolomon/leyts/master/yts.py", -} - - -async def initiate_search_tools(): - qb_plugins = await sync_to_async(xnox_client.search_plugins) - if qb_plugins: - names = [plugin["name"] for plugin in qb_plugins] - await sync_to_async(xnox_client.search_uninstall_plugin, names=names) - await sync_to_async(xnox_client.search_install_plugin, src_plugins) - - if SEARCH_API_LINK := config_dict["SEARCH_API_LINK"]: - global SITES # noqa: PLW0603 - try: - async with ( - ClientSession(trust_env=True) as c, - c.get(f"{SEARCH_API_LINK}/api/v1/sites") as res, - ): - data = await res.json() - SITES = { - str(site): str(site).capitalize() for site in data["supported_sites"] - } - SITES["all"] = "All" - except Exception as e: - LOGGER.error( - f"{e} Can't fetching sites from SEARCH_API_LINK make sure use latest version of API" - ) - SITES = None - - -async def __search(key, site, message, method): - if method.startswith("api"): - SEARCH_API_LINK = config_dict["SEARCH_API_LINK"] - SEARCH_LIMIT = config_dict["SEARCH_LIMIT"] - if method == "apisearch": - LOGGER.info(f"API Searching: {key} from {site}") - if site == "all": - api = f"{SEARCH_API_LINK}/api/v1/all/search?query={key}&limit={SEARCH_LIMIT}" - else: - api = f"{SEARCH_API_LINK}/api/v1/search?site={site}&query={key}&limit={SEARCH_LIMIT}" - elif method == "apitrend": - LOGGER.info(f"API Trending from {site}") - if site == "all": - api = f"{SEARCH_API_LINK}/api/v1/all/trending?limit={SEARCH_LIMIT}" - else: - api = f"{SEARCH_API_LINK}/api/v1/trending?site={site}&limit={SEARCH_LIMIT}" - elif method == "apirecent": - LOGGER.info(f"API Recent from {site}") - if site == "all": - api = f"{SEARCH_API_LINK}/api/v1/all/recent?limit={SEARCH_LIMIT}" - else: - api = f"{SEARCH_API_LINK}/api/v1/recent?site={site}&limit={SEARCH_LIMIT}" - try: - async with ClientSession(trust_env=True) as c, c.get(api) as res: - search_results = await res.json() - if "error" in search_results or search_results["total"] == 0: - await edit_message( - message, - f"No result found for {key}\nTorrent Site:- {SITES.get(site)}", - ) - return - msg = f"Found {min(search_results['total'], TELEGRAPH_LIMIT)}" - if method == "apitrend": - msg += ( - f" trending result(s)\nTorrent Site:- {SITES.get(site)}" - ) - elif method == "apirecent": - msg += f" recent result(s)\nTorrent Site:- {SITES.get(site)}" - else: - msg += ( - f" result(s) for {key}\nTorrent Site:- {SITES.get(site)}" - ) - search_results = search_results["data"] - except Exception as e: - await edit_message(message, str(e)) - return - else: - LOGGER.info(f"PLUGINS Searching: {key} from {site}") - search = await sync_to_async( - xnox_client.search_start, pattern=key, plugins=site, category="all" - ) - search_id = search.id - while True: - result_status = await sync_to_async( - xnox_client.search_status, search_id=search_id - ) - status = result_status[0].status - if status != "Running": - break - dict_search_results = await sync_to_async( - xnox_client.search_results, search_id=search_id, limit=TELEGRAPH_LIMIT - ) - search_results = dict_search_results.results - total_results = dict_search_results.total - if total_results == 0: - await edit_message( - message, - f"No result found for {key}\nTorrent Site:- {site.capitalize()}", - ) - return - msg = f"Found {min(total_results, TELEGRAPH_LIMIT)}" - msg += f" result(s) for {key}\nTorrent Site:- {site.capitalize()}" - await sync_to_async(xnox_client.search_delete, search_id=search_id) - link = await __getResult(search_results, key, message, method) - buttons = ButtonMaker() - buttons.url("View", link) - button = buttons.column(1) - await edit_message(message, msg, button) - - -async def __getResult(search_results, key, message, method): - telegraph_content = [] - if method == "apirecent": - msg = "

API Recent Results

" - elif method == "apisearch": - msg = f"

API Search Result(s) For {key}

" - elif method == "apitrend": - msg = "

API Trending Results

" - else: - msg = f"

PLUGINS Search Result(s) For {key}

" - for index, result in enumerate(search_results, start=1): - if method.startswith("api"): - try: - if "name" in result: - msg += f"{escape(result['name'])}
" - if "torrents" in result: - for subres in result["torrents"]: - msg += f"Quality: {subres['quality']} | Type: {subres['type']} | " - msg += f"Size: {subres['size']}
" - if "torrent" in subres: - msg += ( - f"Direct Link
" - ) - elif "magnet" in subres: - msg += "Share Magnet to " - msg += f"Telegram
" - msg += "
" - else: - msg += f"Size: {result['size']}
" - with contextlib.suppress(Exception): - msg += f"Seeders: {result['seeders']} | Leechers: {result['leechers']}
" - if "torrent" in result: - msg += ( - f"Direct Link

" - ) - elif "magnet" in result: - msg += "Share Magnet to " - msg += f"Telegram

" - else: - msg += "
" - except Exception: - continue - else: - msg += f"{escape(result.fileName)}
" - msg += f"Size: {get_readable_file_size(result.fileSize)}
" - msg += f"Seeders: {result.nbSeeders} | Leechers: {result.nbLeechers}
" - link = result.fileUrl - if link.startswith("magnet:"): - msg += f"Share Magnet to Telegram

" - else: - msg += f"Direct Link

" - - if len(msg.encode("utf-8")) > 39000: - telegraph_content.append(msg) - msg = "" - - if index == TELEGRAPH_LIMIT: - break - - if msg != "": - telegraph_content.append(msg) - - await edit_message( - message, f"Creating {len(telegraph_content)} Telegraph pages." - ) - path = [ - (await telegraph.create_page(title="Torrent Search", content=content))[ - "path" - ] - for content in telegraph_content - ] - if len(path) > 1: - await edit_message( - message, - f"Editing {len(telegraph_content)} Telegraph pages.", - ) - await telegraph.edit_telegraph(path, telegraph_content) - return f"https://telegra.ph/{path[0]}" - - -def __api_buttons(user_id, method): - buttons = ButtonMaker() - for data, name in SITES.items(): - buttons.callback(name, f"torser {user_id} {data} {method}") - buttons.callback("Cancel", f"torser {user_id} cancel") - return buttons.column(2) - - -async def __plugin_buttons(user_id): - buttons = ButtonMaker() - for siteName in PLUGINS: - buttons.callback( - siteName.capitalize(), f"torser {user_id} {siteName} plugin" - ) - buttons.callback("All", f"torser {user_id} all plugin") - buttons.callback("Cancel", f"torser {user_id} cancel") - return buttons.column(2) - - -@new_thread -async def torrentSearch(_, message): - user_id = message.from_user.id - buttons = ButtonMaker() - key = message.text.split() - if ( - not await isAdmin(message, user_id) - and message.chat.type != message.chat.type.PRIVATE - ): - msg, buttons = await checking_access(user_id, buttons) - if msg is not None: - reply_message = await send_message(message, msg, buttons.column(1)) - await delete_links(message) - await five_minute_del(reply_message) - return - if len(key) == 1 and SITES is None: - reply_message = await send_message( - message, "Send a search key along with command" - ) - await one_minute_del(reply_message) - await delete_links(message) - return - if len(key) == 1: - buttons.callback("Trending", f"torser {user_id} apitrend") - buttons.callback("Recent", f"torser {user_id} apirecent") - buttons.callback("Cancel", f"torser {user_id} cancel") - button = buttons.column(2) - reply_message = await send_message( - message, "Send a search key along with command", button - ) - elif SITES is not None: - buttons.callback("Api", f"torser {user_id} apisearch") - buttons.callback("Plugins", f"torser {user_id} plugin") - buttons.callback("Cancel", f"torser {user_id} cancel") - button = buttons.column(2) - reply_message = await send_message(message, "Choose tool to search:", button) - else: - button = await __plugin_buttons(user_id) - reply_message = await send_message( - message, "Choose site to search | Plugins:", button - ) - await five_minute_del(reply_message) - await delete_links(message) - - -@new_task -async def torrentSearchUpdate(_, query): - user_id = query.from_user.id - message = query.message - key = message.reply_to_message.text.split(maxsplit=1) - key = key[1].strip() if len(key) > 1 else None - data = query.data.split() - if user_id != int(data[1]): - await query.answer("Not Yours!", show_alert=True) - elif data[2].startswith("api"): - await query.answer() - button = __api_buttons(user_id, data[2]) - await edit_message(message, "Choose site:", button) - elif data[2] == "plugin": - await query.answer() - button = await __plugin_buttons(user_id) - await edit_message(message, "Choose site:", button) - elif data[2] != "cancel": - await query.answer() - site = data[2] - method = data[3] - if method.startswith("api"): - if key is None: - if method == "apirecent": - endpoint = "Recent" - elif method == "apitrend": - endpoint = "Trending" - await edit_message( - message, - f"Listing {endpoint} Items...\nTorrent Site:- {SITES.get(site)}", - ) - else: - await edit_message( - message, - f"Searching for {key}\nTorrent Site:- {SITES.get(site)}", - ) - else: - await edit_message( - message, - f"Searching for {key}\nTorrent Site:- {site.capitalize()}", - ) - await __search(key, site, message, method) - else: - await query.answer() - await edit_message(message, "Search has been canceled!") - - -bot.add_handler( - MessageHandler( - torrentSearch, - filters=command(BotCommands.SearchCommand) & CustomFilters.authorized, - ) -) -bot.add_handler(CallbackQueryHandler(torrentSearchUpdate, filters=regex("^torser"))) diff --git a/bot/modules/torrent_select.py b/bot/modules/torrent_select.py deleted file mode 100644 index 18818ff86..000000000 --- a/bot/modules/torrent_select.py +++ /dev/null @@ -1,98 +0,0 @@ -import contextlib - -from aiofiles.os import path as aiopath -from aiofiles.os import remove as aioremove -from pyrogram.filters import regex -from pyrogram.handlers import CallbackQueryHandler - -from bot import LOGGER, bot, aria2, xnox_client -from bot.helper.ext_utils.bot_utils import sync_to_async, get_task_by_gid -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.message_utils import sendStatusMessage - - -async def handle_query(client, query): - user_id = query.from_user.id - data = query.data.split() - message = query.message - download = await get_task_by_gid(data[2]) - - if not download: - await query.answer("This task has been cancelled!", show_alert=True) - await message.delete() - return - - listener = getattr(download, "listener", None) - if not listener: - await query.answer( - "Not in download state anymore! Keep this message to resume the seed if seed enabled!", - show_alert=True, - ) - return - - if ( - user_id != listener().message.from_user.id - and not await CustomFilters.sudo_user(client, query) - ): - await query.answer("This task is not for you!", show_alert=True) - return - - action = data[1] - if action == "pin": - await query.answer(data[3], show_alert=True) - elif action == "done": - await handle_done_action(data[3], download, message, query) - elif action == "rm": - await download.download().cancel_download() - await message.delete() - - -async def handle_done_action(id_, download, message, query): - await query.answer() - - if len(id_) > 20: - await handle_torrent_done(id_, download) - else: - await handle_aria2_done(id_, download) - - await sendStatusMessage(message) - await message.delete() - - -async def handle_torrent_done(torrent_hash, download): - client = xnox_client - torrent_info = ( - await sync_to_async(client.torrents_info, torrent_hash=torrent_hash) - )[0] - path = torrent_info.content_path.rsplit("/", 1)[0] - files = await sync_to_async(client.torrents_files, torrent_hash=torrent_hash) - - for file in files: - if file.priority == 0: - for file_path in [f"{path}/{file.name}", f"{path}/{file.name}.!qB"]: - if await aiopath.exists(file_path): - with contextlib.suppress(Exception): - await aioremove(file_path) - - if not download.queued: - await sync_to_async(client.torrents_resume, torrent_hashes=torrent_hash) - - -async def handle_aria2_done(gid, download): - files = await sync_to_async(aria2.client.get_files, gid) - - for file in files: - if file["selected"] == "false" and await aiopath.exists(file["path"]): - with contextlib.suppress(Exception): - await aioremove(file["path"]) - - if not download.queued: - try: - await sync_to_async(aria2.client.unpause, gid) - except Exception as e: - LOGGER.error( - f"{e} Error in resume, this mostly happens after abuse aria2. Try to use select cmd again!" - ) - - -bot.add_handler(CallbackQueryHandler(handle_query, filters=regex("^btsel"))) diff --git a/bot/modules/users_settings.py b/bot/modules/users_settings.py index 30cbf1e01..323fcffc2 100644 --- a/bot/modules/users_settings.py +++ b/bot/modules/users_settings.py @@ -1,373 +1,217 @@ +from asyncio import sleep +from functools import partial from io import BytesIO -from os import path as ospath from os import getcwd -from html import escape from time import time -from asyncio import sleep -from functools import partial -from PIL import Image +from aiofiles.os import makedirs, remove from aiofiles.os import path as aiopath -from aiofiles.os import mkdir -from aiofiles.os import remove as aioremove -from pyrogram.filters import regex, create, command -from pyrogram.handlers import MessageHandler, CallbackQueryHandler +from pyrogram.filters import create +from pyrogram.handlers import MessageHandler -from bot import DATABASE_URL, IS_PREMIUM_USER, bot, user_data, config_dict +from bot import extension_filter, user_data +from bot.core.aeon_client import TgClient +from bot.core.config_manager import Config from bot.helper.ext_utils.bot_utils import ( - new_thread, - sync_to_async, - is_gdrive_link, + new_task, update_user_ldata, ) -from bot.helper.ext_utils.db_handler import DbManager -from bot.helper.ext_utils.help_strings import uset_display_dict -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.telegram_helper.bot_commands import BotCommands +from bot.helper.ext_utils.db_handler import database +from bot.helper.ext_utils.media_utils import create_thumb from bot.helper.telegram_helper.button_build import ButtonMaker from bot.helper.telegram_helper.message_utils import ( - sendFile, - edit_message, - send_message, - sendCustomMsg, delete_message, + edit_message, five_minute_del, + send_file, + send_message, ) -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper handler_dict = {} -fname_dict = { - "rcc": "RClone", - "prefix": "Prefix", - "suffix": "Suffix", - "remname": "Remname", - "ldump": "Dump", - "user_tds": "User Custom TDs", - "lcaption": "Caption", - "thumb": "Thumbnail", - "metadata": "Metadata", - "attachment": "Attachment", - "yt_opt": "YT-DLP Options", -} - - -async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None): - user_id = from_user.id - name = from_user.mention(style="html") - buttons = ButtonMaker() - thumbpath = f"Thumbnails/{user_id}.jpg" - rclone_path = f"tanha/{user_id}.conf" - user_dict = user_data.get(user_id, {}) - if key is None: - buttons.callback("Universal", f"userset {user_id} universal") - buttons.callback("Mirror", f"userset {user_id} mirror") - buttons.callback("Leech", f"userset {user_id} leech") - if user_dict and any( - key in user_dict - for key in [ - "prefix", - "suffix", - "remname", - "ldump", - "yt_opt", - "media_group", - "rclone", - "thumb", - "as_doc", - "metadata", - "attachment", - ] - ): - buttons.callback("Reset", f"userset {user_id} reset_all") - buttons.callback("Close", f"userset {user_id} close") - text = f"User Settings for {name}" - button = buttons.column(2) - elif key == "universal": - buttons.callback("YT-DLP Options", f"userset {user_id} yt_opt") - ytopt = ( - "Not Exists" - if ( - val := user_dict.get("yt_opt", config_dict.get("YT_DLP_OPTIONS", "")) - ) - == "" - else val - ) - buttons.callback("Prefix", f"userset {user_id} prefix") - prefix = user_dict.get("prefix", "Not Exists") - - buttons.callback("Suffix", f"userset {user_id} suffix") - suffix = user_dict.get("suffix", "Not Exists") - - buttons.callback("Remname", f"userset {user_id} remname") - remname = user_dict.get("remname", "Not Exists") - - buttons.callback("Metadata", f"userset {user_id} metadata") - metadata = user_dict.get("metadata", "Not Exists") - - buttons.callback("Attachment", f"userset {user_id} attachment") - attachment = user_dict.get("attachment", "Not Exists") - - text = f"Universal Settings for {name}\n\n" - text += f"• YT-DLP Options: {ytopt}\n" - text += f"• Prefix: {prefix}\n" - text += f"• Suffix: {suffix}\n" - text += f"• Metadata: {metadata}\n" - text += f"• Attachment: {attachment}\n" - text += f"• Remname: {remname}" - buttons.callback("Back", f"userset {user_id} back", "footer") - buttons.callback("Close", f"userset {user_id} close", "footer") - button = buttons.column(2) - elif key == "mirror": - buttons.callback("RClone", f"userset {user_id} rcc") - rccmsg = "Exists" if await aiopath.exists(rclone_path) else "Not Exists" - tds_mode = "Enabled" if user_dict.get("td_mode") else "Disabled" - buttons.callback("User TDs", f"userset {user_id} user_tds") - - text = f"Mirror Settings for {name}\n\n" - text += f"• Rclone Config: {rccmsg}\n" - text += f"• User TD Mode: {tds_mode}" - - buttons.callback("Back", f"userset {user_id} back", "footer") - buttons.callback("Close", f"userset {user_id} close", "footer") - button = buttons.column(2) - elif key == "leech": - if ( - user_dict.get("as_doc", False) - or "as_doc" not in user_dict - and config_dict["AS_DOCUMENT"] - ): - ltype = "DOCUMENT" - buttons.callback("Send As Media", f"userset {user_id} doc") - else: - ltype = "MEDIA" - buttons.callback("Send As Document", f"userset {user_id} doc") +no_thumb = "https://graph.org/file/73ae908d18c6b38038071.jpg" - mediainfo = ( - "Enabled" - if user_dict.get("mediainfo", config_dict["SHOW_MEDIAINFO"]) - else "Disabled" - ) - buttons.callback( - "Disable MediaInfo" if mediainfo == "Enabled" else "Enable MediaInfo", - f"userset {user_id} mediainfo", - ) - if config_dict["SHOW_MEDIAINFO"]: - mediainfo = "Force Enabled" - buttons.callback("Thumbnail", f"userset {user_id} thumb") - thumbmsg = "Exists" if await aiopath.exists(thumbpath) else "Not Exists" - if user_dict.get("media_group", False) or ( - "media_group" not in user_dict and config_dict["MEDIA_GROUP"] - ): - buttons.callback("Disable Media Group", f"userset {user_id} mgroup") - else: - buttons.callback("Enable Media Group", f"userset {user_id} mgroup") - media_group = ( - "Enabled" - if user_dict.get("media_group", config_dict.get("MEDIA_GROUP")) - else "Disabled" - ) +async def get_user_settings(from_user): + user_id = from_user.id + buttons = ButtonMaker() - buttons.callback("Leech Caption", f"userset {user_id} lcaption") - lcaption = user_dict.get("lcaption", "Not Exists") - - buttons.callback("Leech Dump", f"userset {user_id} ldump") - ldump = "Not Exists" if (val := user_dict.get("ldump", "")) == "" else val - - SPLIT_SIZE = "4GB" if IS_PREMIUM_USER else "2GB" - text = f"Leech Settings for {name}\n\n" - text += f"• Leech split size: {SPLIT_SIZE}\n" - text += f"• Leech Type: {ltype}\n" - text += f"• Custom Thumbnail: {thumbmsg}\n" - text += f"• Media Group: {media_group}\n" - text += f"• Leech Caption: {escape(lcaption)}\n" - text += f"• Leech Dump: {ldump}\n" - text += f"• MediaInfo Mode: {mediainfo}" - - buttons.callback("Back", f"userset {user_id} back", "footer") - buttons.callback("Close", f"userset {user_id} close", "footer") - button = buttons.column(2) - elif edit_type: - text = f"{fname_dict[key]} Settings :\n\n" - if key == "rcc": - set_exist = await aiopath.exists(rclone_path) - text += f"rcl.conf File : {'' if set_exist else 'Not'} Exists\n\n" - elif key == "thumb": - set_exist = await aiopath.exists(thumbpath) - text += ( - f"Custom Thumbnail : {'' if set_exist else 'Not'} Exists\n\n" - ) - elif key == "yt_opt": - set_exist = ( - "Not Exists" - if ( - val := user_dict.get( - "yt_opt", config_dict.get("YT_DLP_OPTIONS", "") - ) - ) - == "" - else val - ) - text += f"YT-DLP Options : {escape(set_exist)}\n\n" - elif key in [ - "prefix", - "remname", - "suffix", - "lcaption", - "ldump", - "metadata", - "attachment", - ]: - set_exist = ( - "Not Exists" if (val := user_dict.get(key, "")) == "" else val - ) - text += f"{fname_dict[key]}: {set_exist}\n\n" - elif key == "user_tds": - set_exist = ( - len(val) if (val := user_dict.get(key, False)) else "Not Exists" - ) - tds_mode = "Enabled" if user_dict.get("td_mode") else "Disabled" - buttons.callback( - "Disable UserTDs" if tds_mode == "Enabled" else "Enable UserTDs", - f"userset {user_id} td_mode", - "header", - ) - text += f"User TD Mode: {tds_mode}\n" - else: - return None - text += f"Description : {uset_display_dict[key][0]}" - if edit_mode: - text += "\n\n" + uset_display_dict[key][1] - buttons.callback("Stop", f"userset {user_id} {key}") - elif key != "user_tds" or set_exist == "Not Exists": - buttons.callback( - "Change" if set_exist and set_exist != "Not Exists" else "Set", - f"userset {user_id} {key} edit", - ) - if set_exist and set_exist != "Not Exists": - if key == "user_tds": - buttons.callback("Show", f"userset {user_id} show_tds", "header") - buttons.callback("Delete", f"userset {user_id} d{key}") - buttons.callback("Back", f"userset {user_id} back {edit_type}", "footer") - buttons.callback("Close", f"userset {user_id} close", "footer") - button = buttons.column(2) - return text, button - - -async def update_user_settings( - query, key=None, edit_type=None, edit_mode=None, msg=None -): - msg, button = await get_user_settings(query.from_user, key, edit_type, edit_mode) - user_id = query.from_user.id - thumbnail = f"Thumbnails/{user_id}.jpg" - if not ospath.exists(thumbnail): - thumbnail = "https://graph.org/file/73ae908d18c6b38038071.jpg" - await edit_message(query.message, msg, button, thumbnail) + # Paths + thumbpath = f"Thumbnails/{user_id}.jpg" + rclone_conf = f"rclone/{user_id}.conf" + token_pickle = f"tokens/{user_id}.pickle" + # User Data + user_dict = user_data.get(user_id, {}) + thumbnail = thumbpath if await aiopath.exists(thumbpath) else no_thumb + ex_ex = user_dict.get("excluded_extensions", extension_filter or "None") + meta_msg = user_dict.get("metadata", Config.METADATA_KEY or "None") + wm_msg = user_dict.get("watermark", Config.WATERMARK_KEY or "None") + ns_msg = "Added" if user_dict.get("name_sub", False) else "None" + ytopt = user_dict.get("yt_opt", Config.YT_DLP_OPTIONS or "None") + ffc = ( + "Added" + if user_dict.get("ffmpeg_cmds") or Config.FFMPEG_CMDS + else "Not added" + ) -@new_thread -async def user_settings(_, message): - msg, button = await get_user_settings(message.from_user) - user_id = message.from_user.id - thumbnail = f"Thumbnails/{user_id}.jpg" - if not ospath.exists(thumbnail): - thumbnail = "https://graph.org/file/73ae908d18c6b38038071.jpg" - x = await send_message(message, msg, button, thumbnail) + # Conditions + rccmsg = "Exists" if await aiopath.exists(rclone_conf) else "Not Exists" + tokenmsg = "Exists" if await aiopath.exists(token_pickle) else "Not Exists" + default_upload = user_dict.get("default_upload", Config.DEFAULT_UPLOAD) + dur = "Gdrive API" if default_upload != "gd" else "Rclone" + user_tokens = user_dict.get("user_tokens", False) + trr = "OWNER" if user_tokens else "MY" + + # Buttons + buttons.data_button("Leech", f"userset {user_id} leech") + buttons.data_button("Rclone", f"userset {user_id} rclone") + buttons.data_button("Gdrive Tools", f"userset {user_id} gdrive") + buttons.data_button("Upload Paths", f"userset {user_id} upload_paths") + buttons.data_button(f"Use {dur}", f"userset {user_id} {default_upload}") + buttons.data_button( + f"Use {trr} token/config", + f"userset {user_id} user_tokens {user_tokens}", + ) + buttons.data_button("Excluded Extensions", f"userset {user_id} ex_ex") + buttons.data_button("Metadata key", f"userset {user_id} metadata_key") + buttons.data_button("Watermark text", f"userset {user_id} watermark_key") + buttons.data_button("Name Subtitute", f"userset {user_id} name_substitute") + buttons.data_button("YT-DLP Options", f"userset {user_id} yto") + buttons.data_button("Ffmpeg Cmds", f"userset {user_id} ffc") + + if user_dict: + buttons.data_button("Reset All", f"userset {user_id} reset") + buttons.data_button("Close", f"userset {user_id} close") + + # Text + text = f""">Settings + +**Rclone Config:** {rccmsg} +**Gdrive Token:** {tokenmsg} +**Name Substitution:** `{ns_msg}` +**FFmpeg Commands:** `{ffc}` +**Metadata Title:** `{meta_msg}` +**Watermark Text:** `{wm_msg}` +**Excluded extension:** `{ex_ex}` +**YT-DLP Options:** `{ytopt}` +""" + + return text, buttons.build_menu(2), thumbnail + + +async def update_user_settings(query): + msg, button, thumb = await get_user_settings(query.from_user) + await edit_message(query.message, msg, button, thumb, markdown=True) + + +@new_task +async def send_user_settings(_, message): + from_user = message.from_user + handler_dict[from_user.id] = False + msg, button, thumb = await get_user_settings(from_user) + x = await send_message(message, msg, button, thumb, markdown=True) await five_minute_del(message) await delete_message(x) -async def set_yt_options(_, message, pre_event): +@new_task +async def set_thumb(_, message, pre_event): user_id = message.from_user.id handler_dict[user_id] = False - value = message.text - update_user_ldata(user_id, "yt_opt", value) - await message.delete() - await update_user_settings(pre_event, "yt_opt", "universal") - if DATABASE_URL: - await DbManager().update_user_data(user_id) + des_dir = await create_thumb(message, user_id) + update_user_ldata(user_id, "thumb", des_dir) + await delete_message(message) + await update_user_settings(pre_event) + await database.update_user_doc(user_id, "thumb", des_dir) -async def set_custom(_, message, pre_event, key): +@new_task +async def add_rclone(_, message, pre_event): user_id = message.from_user.id handler_dict[user_id] = False - value = message.text - return_key = "leech" - n_key = key - user_dict = user_data.get(user_id, {}) - if key == "user_tds": - user_tds = user_dict.get(key, {}) - for td_item in value.split("\n"): - if td_item == "": - continue - split_ck = td_item.split() - td_details = td_item.rsplit( - maxsplit=( - 2 - if split_ck[-1].startswith("http") - and not is_gdrive_link(split_ck[-1]) - else 1 - if len(split_ck[-1]) > 15 - else 0 - ) - ) - for title in list(user_tds.keys()): - if td_details[0].casefold() == title.casefold(): - del user_tds[title] - if len(td_details) > 1: - if is_gdrive_link(td_details[1].strip()): - td_details[1] = GoogleDriveHelper.getIdFromUrl(td_details[1]) - if await sync_to_async( - GoogleDriveHelper().getFolderData, td_details[1] - ): - user_tds[td_details[0]] = { - "drive_id": td_details[1], - "index_link": td_details[2].rstrip("/") - if len(td_details) > 2 - else "", - } - value = user_tds - return_key = "mirror" - update_user_ldata(user_id, n_key, value) - await message.delete() - await update_user_settings(pre_event, key, return_key, msg=message) - if DATABASE_URL: - await DbManager().update_user_data(user_id) - - -async def set_thumb(_, message, pre_event, key): + rpath = f"{getcwd()}/rclone/" + await makedirs(rpath, exist_ok=True) + des_dir = f"{rpath}{user_id}.conf" + await message.download(file_name=des_dir) + update_user_ldata(user_id, "rclone_config", f"rclone/{user_id}.conf") + await delete_message(message) + await update_user_settings(pre_event) + await database.update_user_doc(user_id, "rclone_config", des_dir) + + +@new_task +async def add_token_pickle(_, message, pre_event): user_id = message.from_user.id handler_dict[user_id] = False - path = "Thumbnails/" - if not await aiopath.isdir(path): - await mkdir(path) - photo_dir = await message.download() - des_dir = ospath.join(path, f"{user_id}.jpg") - await sync_to_async(Image.open(photo_dir).convert("RGB").save, des_dir, "JPEG") - await aioremove(photo_dir) - update_user_ldata(user_id, "thumb", des_dir) - await message.delete() - await update_user_settings(pre_event, key, "leech", msg=message) - if DATABASE_URL: - await DbManager().update_user_doc(user_id, "thumb", des_dir) + tpath = f"{getcwd()}/tokens/" + await makedirs(tpath, exist_ok=True) + des_dir = f"{tpath}{user_id}.pickle" + await message.download(file_name=des_dir) + update_user_ldata(user_id, "token_pickle", f"tokens/{user_id}.pickle") + await delete_message(message) + await update_user_settings(pre_event) + await database.update_user_doc(user_id, "token_pickle", des_dir) -async def add_rclone(_, message, pre_event): +@new_task +async def delete_path(_, message, pre_event): user_id = message.from_user.id handler_dict[user_id] = False - path = f"{getcwd()}/tanha/" - if not await aiopath.isdir(path): - await mkdir(path) - des_dir = ospath.join(path, f"{user_id}.conf") - await message.download(file_name=des_dir) - update_user_ldata(user_id, "rclone", f"tanha/{user_id}.conf") - await message.delete() - await update_user_settings(pre_event, "rcc", "mirror") - if DATABASE_URL: - await DbManager().update_user_doc(user_id, "rclone", des_dir) + user_dict = user_data.get(user_id, {}) + names = message.text.split() + for name in names: + if name in user_dict["upload_paths"]: + del user_dict["upload_paths"][name] + new_value = user_dict["upload_paths"] + update_user_ldata(user_id, "upload_paths", new_value) + await delete_message(message) + await update_user_settings(pre_event) + await database.update_user_doc(user_id, "upload_paths", new_value) + + +@new_task +async def set_option(_, message, pre_event, option): + user_id = message.from_user.id + handler_dict[user_id] = False + value = message.text + if option == "excluded_extensions": + fx = value.split() + value = ["aria2", "!qB"] + for x in fx: + x = x.lstrip(".") + value.append(x.strip().lower()) + elif option == "upload_paths": + user_dict = user_data.get(user_id, {}) + user_dict.setdefault("upload_paths", {}) + lines = value.split("/n") + for line in lines: + data = line.split(maxsplit=1) + if len(data) != 2: + await send_message(message, "Wrong format! Add ") + await update_user_settings(pre_event) + return + name, path = data + user_dict["upload_paths"][name] = path + value = user_dict["upload_paths"] + elif option == "ffmpeg_cmds": + if value.startswith("{") and value.endswith("}"): + try: + value = eval(value) + except Exception as e: + await send_message(message, str(e)) + await update_user_settings(pre_event) + return + else: + await send_message(message, "It must be list of str!") + await update_user_settings(pre_event) + return + update_user_ldata(user_id, option, value) + await delete_message(message) + await update_user_settings(pre_event) + await database.update_user_data(user_id) -async def event_handler(client, query, pfunc, rfunc, photo=False, document=False): +async def event_handler(client, query, pfunc, photo=False, document=False): user_id = query.from_user.id handler_dict[user_id] = True start_time = time() @@ -381,346 +225,673 @@ async def event_filter(_, __, event): mtype = event.text user = event.from_user or event.sender_chat return bool( - user.id == user_id and event.chat.id == query.message.chat.id and mtype + user.id == user_id and event.chat.id == query.message.chat.id and mtype, ) handler = client.add_handler( - MessageHandler(pfunc, filters=create(event_filter)), group=-1 + MessageHandler(pfunc, filters=create(event_filter)), + group=-1, ) + while handler_dict[user_id]: await sleep(0.5) if time() - start_time > 60: handler_dict[user_id] = False - await rfunc() + await update_user_settings(query) client.remove_handler(*handler) -@new_thread +@new_task async def edit_user_settings(client, query): from_user = query.from_user user_id = from_user.id message = query.message data = query.data.split() + handler_dict[user_id] = False thumb_path = f"Thumbnails/{user_id}.jpg" - rclone_path = f"tanha/{user_id}.conf" + rclone_conf = f"rclone/{user_id}.conf" + token_pickle = f"tokens/{user_id}.pickle" user_dict = user_data.get(user_id, {}) if user_id != int(data[1]): await query.answer("Not Yours!", show_alert=True) - return None - if data[2] in ["universal", "mirror", "leech"]: - await query.answer() - await update_user_settings(query, data[2]) - return None - if data[2] == "doc": - update_user_ldata(user_id, "as_doc", not user_dict.get("as_doc", False)) - await query.answer() - await update_user_settings(query, "leech") - if DATABASE_URL: - await DbManager().update_user_data(user_id) - return None - if data[2] == "show_tds": - handler_dict[user_id] = False - user_tds = user_dict.get("user_tds", {}) - msg = "User TD Details\n\n" - for index_no, (drive_name, drive_dict) in enumerate( - user_tds.items(), start=1 + elif data[2] in [ + "as_doc", + "media_group", + "user_transmission", + "stop_duplicate", + "mixed_leech", + ]: + update_user_ldata(user_id, data[2], data[3] == "true") + await query.answer() + await update_user_settings(query) + await database.update_user_data(user_id) + elif data[2] in ["thumb", "rclone_config", "token_pickle"]: + if data[2] == "thumb": + fpath = thumb_path + elif data[2] == "rclone_config": + fpath = rclone_conf + else: + fpath = token_pickle + if await aiopath.exists(fpath): + await query.answer() + await remove(fpath) + update_user_ldata(user_id, data[2], "") + await update_user_settings(query) + await database.update_user_doc(user_id, data[2]) + else: + await query.answer("Old Settings", show_alert=True) + await update_user_settings(query) + elif data[2] in [ + "yt_opt", + "lprefix", + "lcaption", + "index_url", + "excluded_extensions", + "name_sub", + "metadata", + "watermark", + "thumb_layout", + "ffmpeg_cmds", + "session_string", + "user_dump", + ]: + await query.answer() + update_user_ldata(user_id, data[2], "") + await update_user_settings(query) + await database.update_user_data(user_id) + elif data[2] in ["leech_dest", "rclone_path", "gdrive_id"]: + await query.answer() + if data[2] in user_data.get(user_id, {}): + del user_data[user_id][data[2]] + await update_user_settings(query) + await database.update_user_data(user_id) + elif data[2] == "leech": + await query.answer() + thumbpath = f"Thumbnails/{user_id}.jpg" + buttons = ButtonMaker() + buttons.data_button("Thumbnail", f"userset {user_id} sthumb") + if user_dict.get("user_dump", False): + dump = user_dict["user_dump"] + else: + dump = "None" + buttons.data_button("Dump", f"userset {user_id} u_dump") + buttons.data_button("Session", f"userset {user_id} s_string") + if user_dict.get("session_string", False): + session_string = "Exists" + else: + session_string = "Not exists" + thumbmsg = "Exists" if await aiopath.exists(thumbpath) else "Not Exists" + split_size = Config.LEECH_SPLIT_SIZE + buttons.data_button("Leech Destination", f"userset {user_id} ldest") + buttons.data_button("Leech Prefix", f"userset {user_id} leech_prefix") + if user_dict.get("lprefix", False): + lprefix = user_dict["lprefix"] + elif "lprefix" not in user_dict and Config.LEECH_FILENAME_PREFIX: + lprefix = Config.LEECH_FILENAME_PREFIX + else: + lprefix = "None" + buttons.data_button("Leech Caption", f"userset {user_id} leech_caption") + if user_dict.get("lcaption", False): + lcaption = user_dict["lcaption"] + elif "lcaption" not in user_dict and Config.LEECH_FILENAME_CAPTION: + lcaption = Config.LEECH_FILENAME_CAPTION + else: + lcaption = "None" + if user_dict.get("as_doc", False) or ( + "as_doc" not in user_dict and Config.AS_DOCUMENT + ): + ltype = "DOCUMENT" + buttons.data_button("Send As Media", f"userset {user_id} as_doc false") + else: + ltype = "MEDIA" + buttons.data_button("Send As Document", f"userset {user_id} as_doc true") + if user_dict.get("media_group", False) or ( + "media_group" not in user_dict and Config.MEDIA_GROUP ): - msg += f"{index_no}: Name: {drive_name}\n" - msg += f" Drive ID: {drive_dict['drive_id']}\n" - msg += f" Index Link: {ind_url if (ind_url := drive_dict['index_link']) else 'Not Provided'}\n\n" - try: - await sendCustomMsg(user_id, msg) - await query.answer( - "User TDs Successfully Send in your PM", show_alert=True + buttons.data_button( + "Disable Media Group", + f"userset {user_id} media_group false", ) - except Exception: - await query.answer( - "Start the Bot in PM (Private) and Try Again", show_alert=True + media_group = "Enabled" + else: + buttons.data_button( + "Enable Media Group", + f"userset {user_id} media_group true", ) - await update_user_settings(query, "user_tds", "mirror") - return None - if data[2] == "dthumb": - handler_dict[user_id] = False - if await aiopath.exists(thumb_path): - await query.answer() - await aioremove(thumb_path) - update_user_ldata(user_id, "thumb", "") - await update_user_settings(query, "thumb", "leech") - if DATABASE_URL: - await DbManager().update_user_doc(user_id, "thumb") - return None - await query.answer("Old Settings", show_alert=True) - await update_user_settings(query, "leech") - return None - if data[2] == "thumb": - await query.answer() - edit_mode = len(data) == 4 - await update_user_settings(query, data[2], "leech", edit_mode) - if not edit_mode: - return None - pfunc = partial(set_thumb, pre_event=query, key=data[2]) - rfunc = partial(update_user_settings, query, data[2], "leech") - await event_handler(client, query, pfunc, rfunc, True) - return None - if data[2] == "yt_opt": - await query.answer() - edit_mode = len(data) == 4 - await update_user_settings(query, data[2], "universal", edit_mode) - if not edit_mode: - return None - pfunc = partial(set_yt_options, pre_event=query) - rfunc = partial(update_user_settings, query, data[2], "universal") - await event_handler(client, query, pfunc, rfunc) - return None - if data[2] == "dyt_opt": - handler_dict[user_id] = False - await query.answer() - update_user_ldata(user_id, "yt_opt", "") - await update_user_settings(query, "yt_opt", "universal") - if DATABASE_URL: - await DbManager().update_user_data(user_id) - return None - if data[2] == "td_mode": - handler_dict[user_id] = False - if data[2] == "td_mode" and not user_dict.get("user_tds", False): - return await query.answer( - "Set UserTD first to Enable User TD Mode !", show_alert=True + media_group = "Disabled" + if ( + TgClient.IS_PREMIUM_USER and user_dict.get("user_transmission", False) + ) or ("user_transmission" not in user_dict and Config.USER_TRANSMISSION): + buttons.data_button( + "Leech by Bot", + f"userset {user_id} user_transmission false", + ) + elif TgClient.IS_PREMIUM_USER: + buttons.data_button( + "Leech by User", + f"userset {user_id} user_transmission true", ) + + if (TgClient.IS_PREMIUM_USER and user_dict.get("mixed_leech", False)) or ( + "mixed_leech" not in user_dict and Config.MIXED_LEECH + ): + buttons.data_button( + "Disable Mixed Leech", + f"userset {user_id} mixed_leech false", + ) + elif TgClient.IS_PREMIUM_USER: + buttons.data_button( + "Enable Mixed Leech", + f"userset {user_id} mixed_leech true", + ) + + buttons.data_button("Thumbnail Layout", f"userset {user_id} tlayout") + if user_dict.get("thumb_layout", False): + thumb_layout = user_dict["thumb_layout"] + elif "thumb_layout" not in user_dict and Config.THUMBNAIL_LAYOUT: + thumb_layout = Config.THUMBNAIL_LAYOUT + else: + thumb_layout = "None" + + buttons.data_button("Back", f"userset {user_id} back") + buttons.data_button("Close", f"userset {user_id} close") + text = f""">Leech Settings + +**Leech Type:** {ltype} +**Custom Thumbnail:** {thumbmsg} +**Media Group:** {media_group} +**Leech Split Size:** {split_size} +**Session string:** {session_string} +**Thumbnail Layout:** {thumb_layout} +**Leech Prefix:** `{lprefix}` +**Leech dump chat:** `{dump}` +**Leech Caption:** `{lcaption}` +""" + # **User Custom Dump:** `{user_dump}` + + await edit_message(message, text, buttons.build_menu(2), markdown=True) + elif data[2] == "rclone": await query.answer() - update_user_ldata(user_id, data[2], not user_dict.get(data[2], False)) - await update_user_settings(query, "user_tds", "mirror") - if DATABASE_URL: - await DbManager().update_user_data(user_id) - return None - if data[2] == "mediainfo": - handler_dict[user_id] = False - if config_dict["SHOW_MEDIAINFO"]: - return await query.answer( - "Force Enabled! Can't Alter Settings", show_alert=True + buttons = ButtonMaker() + buttons.data_button("Rclone Config", f"userset {user_id} rcc") + buttons.data_button("Default Rclone Path", f"userset {user_id} rcp") + buttons.data_button("Back", f"userset {user_id} back") + buttons.data_button("Close", f"userset {user_id} close") + rccmsg = "Exists" if await aiopath.exists(rclone_conf) else "Not Exists" + if user_dict.get("rclone_path", False): + rccpath = user_dict["rclone_path"] + elif RP := Config.RCLONE_PATH: + rccpath = RP + else: + rccpath = "None" + text = f""">Rclone Settings + +**Rclone Config:** {rccmsg} +**Rclone Path:** `{rccpath}`""" + await edit_message(message, text, buttons.build_menu(1), markdown=True) + elif data[2] == "gdrive": + await query.answer() + buttons = ButtonMaker() + buttons.data_button("token.pickle", f"userset {user_id} token") + buttons.data_button("Default Gdrive ID", f"userset {user_id} gdid") + buttons.data_button("Index URL", f"userset {user_id} index") + if user_dict.get("stop_duplicate", False) or ( + "stop_duplicate" not in user_dict and Config.STOP_DUPLICATE + ): + buttons.data_button( + "Disable Stop Duplicate", + f"userset {user_id} stop_duplicate false", + ) + sd_msg = "Enabled" + else: + buttons.data_button( + "Enable Stop Duplicate", + f"userset {user_id} stop_duplicate true", ) + sd_msg = "Disabled" + buttons.data_button("Back", f"userset {user_id} back") + buttons.data_button("Close", f"userset {user_id} close") + tokenmsg = "Exists" if await aiopath.exists(token_pickle) else "Not Exists" + if user_dict.get("gdrive_id", False): + gdrive_id = user_dict["gdrive_id"] + elif GDID := Config.GDRIVE_ID: + gdrive_id = GDID + else: + gdrive_id = "None" + index = ( + user_dict["index_url"] if user_dict.get("index_url", False) else "None" + ) + text = f""">Gdrive Tools Settings + +**Gdrive Token:** {tokenmsg} +**Gdrive ID:** `{gdrive_id}` +**Index URL:** `{index}` +**Stop Duplicate:** {sd_msg}""" + await edit_message(message, text, buttons.build_menu(1), markdown=True) + elif data[2] == "sthumb": await query.answer() - update_user_ldata(user_id, data[2], not user_dict.get(data[2], False)) - await update_user_settings(query, "leech") - if DATABASE_URL: - await DbManager().update_user_data(user_id) - return None - if data[2] == "mgroup": - handler_dict[user_id] = False + buttons = ButtonMaker() + if await aiopath.exists(thumb_path): + buttons.data_button("Delete Thumbnail", f"userset {user_id} thumb") + buttons.data_button("Back", f"userset {user_id} leech") + buttons.data_button("Close", f"userset {user_id} close") + await edit_message( + message, + "Send a photo to save it as custom thumbnail. Timeout: 60 sec", + buttons.build_menu(1), + markdown=True, + ) + pfunc = partial(set_thumb, pre_event=query) + await event_handler(client, query, pfunc, True) + elif data[2] == "yto": await query.answer() - update_user_ldata( - user_id, "media_group", not user_dict.get("media_group", False) + buttons = ButtonMaker() + if user_dict.get("yt_opt", False) or Config.YT_DLP_OPTIONS: + buttons.data_button( + "Remove YT-DLP Options", + f"userset {user_id} yt_opt", + "header", + ) + buttons.data_button("Back", f"userset {user_id} back") + buttons.data_button("Close", f"userset {user_id} close") + rmsg = """ +Send YT-DLP Options. Timeout: 60 sec +Format: key:value|key:value|key:value. +Example: format:bv*+mergeall[vcodec=none]|nocheckcertificate:True +Check all yt-dlp api options from this FILE or use this script to convert cli arguments to api options. + """ + await edit_message(message, rmsg, buttons.build_menu(1), markdown=True) + pfunc = partial(set_option, pre_event=query, option="yt_opt") + await event_handler(client, query, pfunc) + elif data[2] == "ffc": + await query.answer() + buttons = ButtonMaker() + if user_dict.get("ffmpeg_cmds", False) or Config.FFMPEG_CMDS: + buttons.data_button( + "Remove FFMPEG Commands", + f"userset {user_id} ffmpeg_cmds", + "header", + ) + buttons.data_button("Back", f"userset {user_id} back") + buttons.data_button("Close", f"userset {user_id} close") + rmsg = "follow this url https://telegra.ph/Ffmpeg-guide-01-10" + await edit_message(message, rmsg, buttons.build_menu(1), markdown=True) + pfunc = partial(set_option, pre_event=query, option="ffmpeg_cmds") + await event_handler(client, query, pfunc) + elif data[2] == "rcc": + await query.answer() + buttons = ButtonMaker() + if await aiopath.exists(rclone_conf): + buttons.data_button( + "Delete rclone.conf", + f"userset {user_id} rclone_config", + ) + buttons.data_button("Back", f"userset {user_id} rclone") + buttons.data_button("Close", f"userset {user_id} close") + await edit_message( + message, + "Send rclone.conf. Timeout: 60 sec", + buttons.build_menu(1), + markdown=True, ) - await update_user_settings(query, "leech") - if DATABASE_URL: - await DbManager().update_user_data(user_id) - return None - if data[2] == "rcc": - await query.answer() - edit_mode = len(data) == 4 - await update_user_settings(query, data[2], "mirror", edit_mode) - if not edit_mode: - return None pfunc = partial(add_rclone, pre_event=query) - rfunc = partial(update_user_settings, query, data[2], "mirror") - await event_handler(client, query, pfunc, rfunc, document=True) - return None - if data[2] == "drcc": - handler_dict[user_id] = False - if await aiopath.exists(rclone_path): - await query.answer() - await aioremove(rclone_path) - update_user_ldata(user_id, "rclone", "") - await update_user_settings(query, "rcc", "mirror") - if DATABASE_URL: - await DbManager().update_user_doc(user_id, "rclone") - return None - await query.answer("Old Settings", show_alert=True) - await update_user_settings(query) - return None - if data[2] == "user_tds": - handler_dict[user_id] = False - await query.answer() - edit_mode = len(data) == 4 - await update_user_settings(query, data[2], "mirror", edit_mode) - if not edit_mode: - return None - pfunc = partial(set_custom, pre_event=query, key=data[2]) - rfunc = partial(update_user_settings, query, data[2], "mirror") - await event_handler(client, query, pfunc, rfunc) - return None - if data[2] in ["prefix", "suffix", "remname", "attachment", "metadata"]: - handler_dict[user_id] = False - await query.answer() - edit_mode = len(data) == 4 - await update_user_settings(query, data[2], "universal", edit_mode) - if not edit_mode: - return None - pfunc = partial(set_custom, pre_event=query, key=data[2]) - rfunc = partial(update_user_settings, query, data[2], "universal") - await event_handler(client, query, pfunc, rfunc) - return None - if data[2] in ["lcaption", "ldump"]: - handler_dict[user_id] = False - await query.answer() - edit_mode = len(data) == 4 - await update_user_settings(query, data[2], "leech", edit_mode) - if not edit_mode: - return None - pfunc = partial(set_custom, pre_event=query, key=data[2]) - rfunc = partial(update_user_settings, query, data[2], "leech") - await event_handler(client, query, pfunc, rfunc) - return None - if data[2] in ["dlcaption", "dldump"]: - handler_dict[user_id] = False - await query.answer() - update_user_ldata(user_id, data[2][1:], "") - await update_user_settings(query, data[2][1:], "leech") - if DATABASE_URL: - await DbManager().update_user_data(user_id) - return None - if data[2] in ["dprefix", "dsuffix", "dremname", "dmetadata", "dattachment"]: - handler_dict[user_id] = False - await query.answer() - update_user_ldata(user_id, data[2][1:], "") - await update_user_settings(query, data[2][1:], "universal") - if DATABASE_URL: - await DbManager().update_user_data(user_id) - return None - if data[2] == "duser_tds": - handler_dict[user_id] = False - await query.answer() - update_user_ldata(user_id, data[2][1:], {}) - if data[2] == "duser_tds": - update_user_ldata(user_id, "td_mode", False) - await update_user_settings(query, data[2][1:], "mirror") - if DATABASE_URL: - await DbManager().update_user_data(user_id) - return None - if data[2] == "back": - handler_dict[user_id] = False - await query.answer() - setting = data[3] if len(data) == 4 else None - await update_user_settings(query, setting) - return None - if data[2] == "reset_all": - handler_dict[user_id] = False + await event_handler(client, query, pfunc, document=True) + elif data[2] == "rcp": await query.answer() buttons = ButtonMaker() - buttons.callback("Yes", f"userset {user_id} reset_now y") - buttons.callback("No", f"userset {user_id} reset_now n") - buttons.callback("Close", f"userset {user_id} close", "footer") + if user_dict.get("rclone_path", False): + buttons.data_button( + "Reset Rclone Path", + f"userset {user_id} rclone_path", + ) + buttons.data_button("Back", f"userset {user_id} rclone") + buttons.data_button("Close", f"userset {user_id} close") + rmsg = "Send Rclone Path. Timeout: 60 sec" + await edit_message(message, rmsg, buttons.build_menu(1), markdown=True) + pfunc = partial(set_option, pre_event=query, option="rclone_path") + await event_handler(client, query, pfunc) + elif data[2] == "token": + await query.answer() + buttons = ButtonMaker() + if await aiopath.exists(token_pickle): + buttons.data_button( + "Delete token.pickle", + f"userset {user_id} token_pickle", + ) + buttons.data_button("Back", f"userset {user_id} gdrive") + buttons.data_button("Close", f"userset {user_id} close") await edit_message( - message, "Do you want to Reset Settings ?", buttons.column(2) + message, + "Send token.pickle. Timeout: 60 sec", + buttons.build_menu(1), + markdown=True, ) - return None - if data[2] == "reset_now": - handler_dict[user_id] = False - if data[3] == "n": - return await update_user_settings(query) - if await aiopath.exists(thumb_path): - await aioremove(thumb_path) - if await aiopath.exists(rclone_path): - await aioremove(rclone_path) + pfunc = partial(add_token_pickle, pre_event=query) + await event_handler(client, query, pfunc, document=True) + elif data[2] == "gdid": + await query.answer() + buttons = ButtonMaker() + if user_dict.get("gdrive_id", False): + buttons.data_button("Reset Gdrive ID", f"userset {user_id} gdrive_id") + buttons.data_button("Back", f"userset {user_id} gdrive") + buttons.data_button("Close", f"userset {user_id} close") + rmsg = "Send Gdrive ID. Timeout: 60 sec" + await edit_message(message, rmsg, buttons.build_menu(1), markdown=True) + pfunc = partial(set_option, pre_event=query, option="gdrive_id") + await event_handler(client, query, pfunc) + elif data[2] == "index": + await query.answer() + buttons = ButtonMaker() + if user_dict.get("index_url", False): + buttons.data_button("Remove Index URL", f"userset {user_id} index_url") + buttons.data_button("Back", f"userset {user_id} gdrive") + buttons.data_button("Close", f"userset {user_id} close") + rmsg = "Send Index URL. Timeout: 60 sec" + await edit_message(message, rmsg, buttons.build_menu(1), markdown=True) + pfunc = partial(set_option, pre_event=query, option="index_url") + await event_handler(client, query, pfunc) + elif data[2] == "leech_prefix": + await query.answer() + buttons = ButtonMaker() + if user_dict.get("lprefix", False) or ( + "lprefix" not in user_dict and Config.LEECH_FILENAME_PREFIX + ): + buttons.data_button("Remove Leech Prefix", f"userset {user_id} lprefix") + buttons.data_button("Back", f"userset {user_id} leech") + buttons.data_button("Close", f"userset {user_id} close") + await edit_message( + message, + "Send Leech Filename Prefix. You can add HTML tags. Timeout: 60 sec", + buttons.build_menu(1), + markdown=True, + ) + pfunc = partial(set_option, pre_event=query, option="lprefix") + await event_handler(client, query, pfunc) + elif data[2] == "leech_caption": + await query.answer() + buttons = ButtonMaker() + if user_dict.get("lcaption", False) or ( + "lcaption" not in user_dict and Config.LEECH_FILENAME_CAPTION + ): + buttons.data_button("Remove", f"userset {user_id} lcaption") + buttons.data_button("Back", f"userset {user_id} leech") + buttons.data_button("Close", f"userset {user_id} close") + await edit_message( + message, + "Send Leech Filename Caption. You can add HTML tags. Timeout: 60 sec", + buttons.build_menu(1), + markdown=True, + ) + pfunc = partial(set_option, pre_event=query, option="lcaption") + await event_handler(client, query, pfunc) + elif data[2] == "ldest": + await query.answer() + buttons = ButtonMaker() + if user_dict.get("leech_dest", False) or ( + "leech_dest" not in user_dict and Config.LEECH_DUMP_CHAT + ): + buttons.data_button( + "Reset Leech Destination", + f"userset {user_id} leech_dest", + ) + buttons.data_button("Back", f"userset {user_id} leech") + buttons.data_button("Close", f"userset {user_id} close") + await edit_message( + message, + "Send leech destination ID/USERNAME/PM. Timeout: 60 sec", + buttons.build_menu(1), + markdown=True, + ) + pfunc = partial(set_option, pre_event=query, option="leech_dest") + await event_handler(client, query, pfunc) + elif data[2] == "tlayout": + await query.answer() + buttons = ButtonMaker() + if user_dict.get("thumb_layout", False) or ( + "thumb_layout" not in user_dict and Config.THUMBNAIL_LAYOUT + ): + buttons.data_button( + "Reset Thumbnail Layout", + f"userset {user_id} thumb_layout", + ) + buttons.data_button("Back", f"userset {user_id} leech") + buttons.data_button("Close", f"userset {user_id} close") + await edit_message( + message, + "Send thumbnail layout (widthxheight, 2x2, 3x3, 2x4, 4x4, ...). Timeout: 60 sec", + buttons.build_menu(1), + markdown=True, + ) + pfunc = partial(set_option, pre_event=query, option="thumb_layout") + await event_handler(client, query, pfunc) + elif data[2] == "s_string": + await query.answer() + buttons = ButtonMaker() + if user_dict.get("session_string", False): + buttons.data_button( + "Remove session", + f"userset {user_id} session_string", + ) + buttons.data_button("Back", f"userset {user_id} leech") + buttons.data_button("Close", f"userset {user_id} close") + await edit_message( + message, + "Send your pyrogram V2 session string for download content from private channel or restricted channel. Timeout: 60 sec", + buttons.build_menu(1), + markdown=True, + ) + pfunc = partial(set_option, pre_event=query, option="session_string") + await event_handler(client, query, pfunc) + elif data[2] == "u_dump": + await query.answer() + buttons = ButtonMaker() + if user_dict.get("user_dump", False): + buttons.data_button( + "Remove dump", + f"userset {user_id} user_dump", + ) + buttons.data_button("Back", f"userset {user_id} leech") + buttons.data_button("Close", f"userset {user_id} close") + await edit_message( + message, + "Send your dump chat id, example: -10025638428. Timeout: 60 sec", + buttons.build_menu(1), + markdown=True, + ) + pfunc = partial(set_option, pre_event=query, option="user_dump") + await event_handler(client, query, pfunc) + elif data[2] == "ex_ex": await query.answer() - update_user_ldata(user_id, None, None) + buttons = ButtonMaker() + if user_dict.get("excluded_extensions", False) or ( + "excluded_extensions" not in user_dict and extension_filter + ): + buttons.data_button( + "Remove Excluded Extensions", + f"userset {user_id} excluded_extensions", + ) + buttons.data_button("Back", f"userset {user_id} back") + buttons.data_button("Close", f"userset {user_id} close") + await edit_message( + message, + "Send exluded extenions seperated by space without dot at beginning. Timeout: 60 sec", + buttons.build_menu(1), + markdown=True, + ) + pfunc = partial(set_option, pre_event=query, option="excluded_extensions") + await event_handler(client, query, pfunc) + elif data[2] == "name_substitute": + await query.answer() + buttons = ButtonMaker() + if user_dict.get("name_sub", False): + buttons.data_button( + "Remove Name Subtitute", + f"userset {user_id} name_sub", + ) + buttons.data_button("Back", f"userset {user_id} back") + buttons.data_button("Close", f"userset {user_id} close") + emsg = r"""Word Subtitions. You can add pattern instead of normal text. Timeout: 60 sec +NOTE: You must add \ before any character, those are the characters: \^$.|?*+()[]{}- +Example: script/code/s | mirror/leech | tea/ /s | clone | cpu/ | \[mltb\]/mltb | \\text\\/text/s +1. script will get replaced by code with sensitive case +2. mirror will get replaced by leech +4. tea will get replaced by space with sensitive case +5. clone will get removed +6. cpu will get replaced by space +7. [mltb] will get replaced by mltb +8. \text\ will get replaced by text with sensitive case +""" + emsg += ( + f"Your Current Value is {user_dict.get('name_sub') or 'not added yet!'}" + ) + await edit_message( + message, + emsg, + buttons.build_menu(1), + markdown=True, + ) + pfunc = partial(set_option, pre_event=query, option="name_sub") + await event_handler(client, query, pfunc) + elif data[2] == "metadata_key": + await query.answer() + buttons = ButtonMaker() + if user_dict.get("metadata", False): + buttons.data_button("Remove Metadata key", f"userset {user_id} metadata") + buttons.data_button("Back", f"userset {user_id} back") + buttons.data_button("Close", f"userset {user_id} close") + emsg = "Metadata will change MKV video files including all audio, streams, and subtitle titles." + emsg += ( + f"Your Current Value is {user_dict.get('metadata') or 'not added yet!'}" + ) + await edit_message(message, emsg, buttons.build_menu(1), markdown=True) + pfunc = partial(set_option, pre_event=query, option="metadata") + await event_handler(client, query, pfunc) + elif data[2] == "watermark_key": + await query.answer() + buttons = ButtonMaker() + if user_dict.get("watermark", False): + buttons.data_button( + "Remove Watermark text", + f"userset {user_id} watermark", + ) + buttons.data_button("Back", f"userset {user_id} back") + buttons.data_button("Close", f"userset {user_id} close") + emsg = "Sorry, No doc for now." + emsg += ( + f"Your Current Value is {user_dict.get('watermark') or 'not added yet!'}" + ) + await edit_message(message, emsg, buttons.build_menu(1), markdown=True) + pfunc = partial(set_option, pre_event=query, option="watermark") + await event_handler(client, query, pfunc) + elif data[2] in ["gd", "rc"]: + await query.answer() + du = "rc" if data[2] == "gd" else "gd" + update_user_ldata(user_id, "default_upload", du) await update_user_settings(query) - if DATABASE_URL: - await DbManager().update_user_data(user_id) - await DbManager().update_user_doc(user_id, "thumb") - await DbManager().update_user_doc(user_id, "rclone") - return None - if data[2] == "user_del": - user_id = int(data[3]) - await query.answer() - thumb_path = f"Thumbnails/{user_id}.jpg" - rclone_path = f"tanha/{user_id}.conf" - if await aiopath.exists(thumb_path): - await aioremove(thumb_path) - if await aiopath.exists(rclone_path): - await aioremove(rclone_path) - update_user_ldata(user_id, None, None) - if DATABASE_URL: - await DbManager().update_user_data(user_id) - await DbManager().update_user_doc(user_id, "thumb") - await DbManager().update_user_doc(user_id, "rclone") - await edit_message(message, f"Data Reset for {user_id}") - return None - handler_dict[user_id] = False - await query.answer() - await message.reply_to_message.delete() - await message.delete() - return None - - -async def get_user_info(client, id): - try: - return (await client.get_users(id)).mention(style="html") - except Exception: - return "" - - -async def send_users_settings(client, message): - text = message.text.split(maxsplit=1) - userid = text[1] if len(text) > 1 else None - if userid and not userid.isdigit(): - userid = None - elif ( - (reply_to := message.reply_to_message) - and reply_to.from_user - and not reply_to.from_user.is_bot - ): - userid = reply_to.from_user.id - if not userid: - msg = f"Total Users / Chats Data Saved : {len(user_data)}" + await database.update_user_data(user_id) + elif data[2] == "user_tokens": + await query.answer() + tr = data[3].lower() == "false" + update_user_ldata(user_id, "user_tokens", tr) + await update_user_settings(query) + await database.update_user_data(user_id) + elif data[2] == "upload_paths": + await query.answer() buttons = ButtonMaker() - buttons.callback("Close", f"userset {message.from_user.id} close") - button = buttons.column(1) - for user, data in user_data.items(): - msg += f"\n\n{user}:" - if data: - for key, value in data.items(): - if key in ["token", "time"]: - continue - msg += f"\n{key}: {escape(str(value))}" + buttons.data_button("New Path", f"userset {user_id} new_path") + if user_dict.get(data[2], False): + buttons.data_button("Show All Paths", f"userset {user_id} show_path") + buttons.data_button("Remove Path", f"userset {user_id} rm_path") + buttons.data_button("Back", f"userset {user_id} back") + buttons.data_button("Close", f"userset {user_id} close") + await edit_message( + message, + "Add or remove upload path.\n", + buttons.build_menu(1), + markdown=True, + ) + elif data[2] == "new_path": + await query.answer() + buttons = ButtonMaker() + buttons.data_button("Back", f"userset {user_id} upload_paths") + buttons.data_button("Close", f"userset {user_id} close") + await edit_message( + message, + "Send path name(no space in name) which you will use it as a shortcut and the path/id seperated by space. You can add multiple names and paths separated by new line. Timeout: 60 sec", + buttons.build_menu(1), + markdown=True, + ) + pfunc = partial(set_option, pre_event=query, option="upload_paths") + await event_handler(client, query, pfunc) + elif data[2] == "rm_path": + await query.answer() + buttons = ButtonMaker() + buttons.data_button("Back", f"userset {user_id} upload_paths") + buttons.data_button("Close", f"userset {user_id} close") + await edit_message( + message, + "Send paths names which you want to delete, separated by space. Timeout: 60 sec", + buttons.build_menu(1), + markdown=True, + ) + pfunc = partial(delete_path, pre_event=query) + await event_handler(client, query, pfunc) + elif data[2] == "show_path": + await query.answer() + buttons = ButtonMaker() + buttons.data_button("Back", f"userset {user_id} upload_paths") + buttons.data_button("Close", f"userset {user_id} close") + user_dict = user_data.get(user_id, {}) + msg = "".join( + f"{key}: {value}\n" + for key, value in user_dict["upload_paths"].items() + ) + await edit_message( + message, + msg, + buttons.build_menu(1), + markdown=True, + ) + elif data[2] == "reset": + await query.answer() + if ud := user_data.get(user_id, {}): + if ud and ("is_sudo" in ud or "is_auth" in ud): + for k in list(ud.keys()): + if k not in ["is_sudo", "is_auth"]: + del user_data[user_id][k] else: - msg += "\nUser's Data is Empty!" - if len(msg.encode()) > 4000: - with BytesIO(str.encode(msg)) as ofile: + user_data[user_id].clear() + await update_user_settings(query) + await database.update_user_data(user_id) + for fpath in [thumb_path, rclone_conf, token_pickle]: + if await aiopath.exists(fpath): + await remove(fpath) + elif data[2] == "back": + await query.answer() + await update_user_settings(query) + else: + await query.answer() + await delete_message(message.reply_to_message) + await delete_message(message) + + +@new_task +async def get_users_settings(_, message): + if user_data: + msg = "" + for u, d in user_data.items(): + kmsg = f"\n{u}:\n" + if vmsg := "".join( + f"{k}: {v}\n" for k, v in d.items() if f"{v}" + ): + msg += kmsg + vmsg + + msg_ecd = msg.encode() + if len(msg_ecd) > 4000: + with BytesIO(msg_ecd) as ofile: ofile.name = "users_settings.txt" - await sendFile(message, ofile) - else: - await send_message(message, msg, button) - elif int(userid) in user_data: - msg = f"{await get_user_info(client, userid)} ( {userid} ):" - if data := user_data[int(userid)]: - buttons = ButtonMaker() - buttons.callback( - "Delete", f"userset {message.from_user.id} user_del {userid}" - ) - buttons.callback("Close", f"userset {message.from_user.id} close") - button = buttons.column(1) - for key, value in data.items(): - if key in ["token", "time"]: - continue - msg += f"\n{key}: {escape(str(value))}" + await send_file(message, ofile) else: - msg += "\nThis User has not Saved anything." - button = None - await send_message(message, msg, button) + await send_message(message, msg) else: - await send_message(message, f"{userid} have not saved anything..") - - -bot.add_handler( - MessageHandler( - send_users_settings, - filters=command(BotCommands.UsersCommand) & CustomFilters.sudo, - ) -) -bot.add_handler( - MessageHandler( - user_settings, - filters=command(BotCommands.UserSetCommand) & CustomFilters.authorized_uset, - ) -) -bot.add_handler(CallbackQueryHandler(edit_user_settings, filters=regex("^userset"))) + await send_message(message, "No users data!") diff --git a/bot/modules/ytdlp.py b/bot/modules/ytdlp.py index 8de723032..fa0b90e16 100644 --- a/bot/modules/ytdlp.py +++ b/bot/modules/ytdlp.py @@ -1,48 +1,37 @@ -import contextlib -from time import time -from asyncio import Event, sleep, wait_for, wrap_future +from asyncio import Event, create_task, wait_for from functools import partial +from time import time +from httpx import AsyncClient +from pyrogram.filters import regex, user +from pyrogram.handlers import CallbackQueryHandler from yt_dlp import YoutubeDL -from aiohttp import ClientSession -from aiofiles.os import path as aiopath -from pyrogram.filters import user, regex, command -from pyrogram.handlers import MessageHandler, CallbackQueryHandler -from bot import LOGGER, bot, user_data, config_dict +from bot import LOGGER, bot_loop, task_dict_lock +from bot.core.config_manager import Config +from bot.helper.aeon_utils.access_check import error_check from bot.helper.ext_utils.bot_utils import ( - is_url, - new_task, + COMMAND_USAGE, arg_parser, - new_thread, + new_task, sync_to_async, - fetch_user_tds, - is_gdrive_link, - is_rclone_path, - get_readable_time, +) +from bot.helper.ext_utils.links_utils import is_url +from bot.helper.ext_utils.status_utils import ( get_readable_file_size, + get_readable_time, +) +from bot.helper.listeners.task_listener import TaskListener +from bot.helper.mirror_leech_utils.download_utils.yt_dlp_download import ( + YoutubeDLHelper, ) -from bot.helper.ext_utils.bulk_links import extract_bulk_links -from bot.helper.aeon_utils.nsfw_check import nsfw_precheck -from bot.helper.aeon_utils.send_react import send_react -from bot.helper.ext_utils.help_strings import YT_HELP_MESSAGE -from bot.helper.ext_utils.task_manager import task_utils -from bot.helper.telegram_helper.filters import CustomFilters -from bot.helper.listeners.tasks_listener import MirrorLeechListener -from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.button_build import ButtonMaker from bot.helper.telegram_helper.message_utils import ( delete_links, - edit_message, - send_message, delete_message, - one_minute_del, + edit_message, five_minute_del, -) -from bot.helper.mirror_leech_utils.rclone_utils.list import RcloneList -from bot.helper.mirror_leech_utils.upload_utils.gdriveTools import GoogleDriveHelper -from bot.helper.mirror_leech_utils.download_utils.yt_dlp_download import ( - YoutubeDLHelper, + send_message, ) @@ -69,7 +58,7 @@ async def select_format(_, query, obj): elif data[1] == "cancel": await edit_message(message, "Task has been cancelled.") obj.qual = None - obj.is_cancelled = True + obj.listener.is_cancelled = True obj.event.set() else: if data[1] == "sub": @@ -82,65 +71,59 @@ async def select_format(_, query, obj): class YtSelection: - def __init__(self, client, message): - self.__message = message - self.__user_id = message.from_user.id - self.__client = client - self.__is_m4a = False - self.__reply_to = None - self.__time = time() - self.__timeout = 120 - self.__is_playlist = False - self.is_cancelled = False - self.__main_buttons = None + def __init__(self, listener): + self.listener = listener + self._is_m4a = False + self._reply_to = None + self._time = time() + self._timeout = 120 + self._is_playlist = False + self._main_buttons = None self.event = Event() self.formats = {} self.qual = None - @new_thread - async def __event_handler(self): + async def _event_handler(self): pfunc = partial(select_format, obj=self) - handler = self.__client.add_handler( + handler = self.listener.client.add_handler( CallbackQueryHandler( - pfunc, filters=regex("^ytq") & user(self.__user_id) + pfunc, + filters=regex("^ytq") & user(self.listener.user_id), ), group=-1, ) try: - await wait_for(self.event.wait(), timeout=self.__timeout) + await wait_for(self.event.wait(), timeout=self._timeout) except Exception: - await edit_message( - self.__reply_to, "Timed Out. Task has been cancelled!" - ) + await edit_message(self._reply_to, "Timed Out. Task has been cancelled!") self.qual = None - self.is_cancelled = True + self.listener.is_cancelled = True self.event.set() finally: - self.__client.remove_handler(*handler) + self.listener.client.remove_handler(*handler) async def get_quality(self, result): - future = self.__event_handler() buttons = ButtonMaker() if "entries" in result: - self.__is_playlist = True + self._is_playlist = True for i in ["144", "240", "360", "480", "720", "1080", "1440", "2160"]: video_format = ( f"bv*[height<=?{i}][ext=mp4]+ba[ext=m4a]/b[height<=?{i}]" ) b_data = f"{i}|mp4" self.formats[b_data] = video_format - buttons.callback(f"{i}-mp4", f"ytq {b_data}") + buttons.data_button(f"{i}-mp4", f"ytq {b_data}") video_format = f"bv*[height<=?{i}][ext=webm]+ba/b[height<=?{i}]" b_data = f"{i}|webm" self.formats[b_data] = video_format - buttons.callback(f"{i}-webm", f"ytq {b_data}") - buttons.callback("MP3", "ytq mp3") - buttons.callback("Audio Formats", "ytq audio") - buttons.callback("Best Videos", "ytq bv*+ba/b") - buttons.callback("Best Audios", "ytq ba/b") - buttons.callback("Cancel", "ytq cancel", "footer") - self.__main_buttons = buttons.column(3) - msg = f"Choose Playlist Videos Quality:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time), True)}" + buttons.data_button(f"{i}-webm", f"ytq {b_data}") + buttons.data_button("MP3", "ytq mp3") + buttons.data_button("Audio Formats", "ytq audio") + buttons.data_button("Best Videos", "ytq bv*+ba/b") + buttons.data_button("Best Audios", "ytq ba/b") + buttons.data_button("Cancel", "ytq cancel", "footer") + self._main_buttons = buttons.build_menu(3) + msg = f"Choose Playlist Videos Quality:\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" else: format_dict = result.get("formats") if format_dict is not None: @@ -155,13 +138,15 @@ async def get_quality(self, result): else: size = 0 - if ( - item.get("video_ext") == "none" - and item.get("acodec") != "none" + if item.get("video_ext") == "none" and ( + item.get("resolution") == "audio only" + or item.get("acodec") != "none" ): if item.get("audio_ext") == "m4a": - self.__is_m4a = True - b_name = f"{item['acodec']}-{item['ext']}" + self._is_m4a = True + b_name = ( + f"{item.get('acodec') or format_id}-{item['ext']}" + ) v_format = format_id elif item.get("height"): height = item["height"] @@ -169,7 +154,7 @@ async def get_quality(self, result): fps = item["fps"] if item.get("fps") else "" b_name = f"{height}p{fps}-{ext}" ba_ext = ( - "[ext=m4a]" if self.__is_m4a and ext == "mp4" else "" + "[ext=m4a]" if self._is_m4a and ext == "mp4" else "" ) v_format = f"{format_id}+ba{ba_ext}/b[height=?{height}]" else: @@ -186,79 +171,81 @@ async def get_quality(self, result): buttonName = ( f"{b_name} ({get_readable_file_size(v_list[0])})" ) - buttons.callback(buttonName, f"ytq sub {b_name} {tbr}") + buttons.data_button(buttonName, f"ytq sub {b_name} {tbr}") else: - buttons.callback(b_name, f"ytq dict {b_name}") - buttons.callback("MP3", "ytq mp3") - buttons.callback("Audio Formats", "ytq audio") - buttons.callback("Best Video", "ytq bv*+ba/b") - buttons.callback("Best Audio", "ytq ba/b") - buttons.callback("Cancel", "ytq cancel", "footer") - self.__main_buttons = buttons.column(2) - msg = f"Choose Video Quality:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time), True)}" - self.__reply_to = await send_message( - self.__message, msg, self.__main_buttons + buttons.data_button(b_name, f"ytq dict {b_name}") + buttons.data_button("MP3", "ytq mp3") + buttons.data_button("Audio Formats", "ytq audio") + buttons.data_button("Best Video", "ytq bv*+ba/b") + buttons.data_button("Best Audio", "ytq ba/b") + buttons.data_button("Cancel", "ytq cancel", "footer") + self._main_buttons = buttons.build_menu(2) + msg = f"Choose Video Quality:\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + self._reply_to = await send_message( + self.listener.message, + msg, + self._main_buttons, ) - await wrap_future(future) - if not self.is_cancelled: - await delete_message(self.__reply_to) + await self._event_handler() + if not self.listener.is_cancelled: + await delete_message(self._reply_to) return self.qual async def back_to_main(self): - if self.__is_playlist: - msg = f"Choose Playlist Videos Quality:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time), True)}" + if self._is_playlist: + msg = f"Choose Playlist Videos Quality:\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" else: - msg = f"Choose Video Quality:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time), True)}" - await edit_message(self.__reply_to, msg, self.__main_buttons) + msg = f"Choose Video Quality:\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + await edit_message(self._reply_to, msg, self._main_buttons) async def qual_subbuttons(self, b_name): buttons = ButtonMaker() tbr_dict = self.formats[b_name] for tbr, d_data in tbr_dict.items(): button_name = f"{tbr}K ({get_readable_file_size(d_data[0])})" - buttons.callback(button_name, f"ytq sub {b_name} {tbr}") - buttons.callback("Back", "ytq back", "footer") - buttons.callback("Cancel", "ytq cancel", "footer") - subbuttons = buttons.column(2) - msg = f"Choose Bit rate for {b_name}:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time), True)}" - await edit_message(self.__reply_to, msg, subbuttons) + buttons.data_button(button_name, f"ytq sub {b_name} {tbr}") + buttons.data_button("Back", "ytq back", "footer") + buttons.data_button("Cancel", "ytq cancel", "footer") + subbuttons = buttons.build_menu(2) + msg = f"Choose Bit rate for {b_name}:\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + await edit_message(self._reply_to, msg, subbuttons) async def mp3_subbuttons(self): - i = "s" if self.__is_playlist else "" + i = "s" if self._is_playlist else "" buttons = ButtonMaker() audio_qualities = [64, 128, 320] for q in audio_qualities: audio_format = f"ba/b-mp3-{q}" - buttons.callback(f"{q}K-mp3", f"ytq {audio_format}") - buttons.callback("Back", "ytq back") - buttons.callback("Cancel", "ytq cancel") - subbuttons = buttons.column(3) - msg = f"Choose mp3 Audio{i} Bitrate:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time), True)}" - await edit_message(self.__reply_to, msg, subbuttons) + buttons.data_button(f"{q}K-mp3", f"ytq {audio_format}") + buttons.data_button("Back", "ytq back") + buttons.data_button("Cancel", "ytq cancel") + subbuttons = buttons.build_menu(3) + msg = f"Choose mp3 Audio{i} Bitrate:\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + await edit_message(self._reply_to, msg, subbuttons) async def audio_format(self): - i = "s" if self.__is_playlist else "" + i = "s" if self._is_playlist else "" buttons = ButtonMaker() for frmt in ["aac", "alac", "flac", "m4a", "opus", "vorbis", "wav"]: audio_format = f"ba/b-{frmt}-" - buttons.callback(frmt, f"ytq aq {audio_format}") - buttons.callback("Back", "ytq back", "footer") - buttons.callback("Cancel", "ytq cancel", "footer") - subbuttons = buttons.column(3) - msg = f"Choose Audio{i} Format:\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time), True)}" - await edit_message(self.__reply_to, msg, subbuttons) + buttons.data_button(frmt, f"ytq aq {audio_format}") + buttons.data_button("Back", "ytq back", "footer") + buttons.data_button("Cancel", "ytq cancel", "footer") + subbuttons = buttons.build_menu(3) + msg = f"Choose Audio{i} Format:\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + await edit_message(self._reply_to, msg, subbuttons) async def audio_quality(self, format): - i = "s" if self.__is_playlist else "" + i = "s" if self._is_playlist else "" buttons = ButtonMaker() for qual in range(11): audio_format = f"{format}{qual}" - buttons.callback(qual, f"ytq {audio_format}") - buttons.callback("Back", "ytq aq back") - buttons.callback("Cancel", "ytq aq cancel") - subbuttons = buttons.column(5) - msg = f"Choose Audio{i} Qaulity:\n0 is best and 10 is worst\nTimeout: {get_readable_time(self.__timeout-(time()-self.__time), True)}" - await edit_message(self.__reply_to, msg, subbuttons) + buttons.data_button(qual, f"ytq {audio_format}") + buttons.data_button("Back", "ytq aq back") + buttons.data_button("Cancel", "ytq aq cancel") + subbuttons = buttons.build_menu(5) + msg = f"Choose Audio{i} Qaulity:\n0 is best and 10 is worst\nTimeout: {get_readable_time(self._timeout - (time() - self._time))}" + await edit_message(self._reply_to, msg, subbuttons) def extract_info(link, options): @@ -271,305 +258,256 @@ def extract_info(link, options): async def _mdisk(link, name): key = link.split("/")[-1] - async with ( - ClientSession() as session, - session.get( - f"https://diskuploader.entertainvideo.com/v1/file/cdnurl?param={key}" - ) as resp, + async with AsyncClient(verify=False) as client: + resp = await client.get( + f"https://diskuploader.entertainvideo.com/v1/file/cdnurl?param={key}", + ) + if resp.status_code == 200: + resp_json = resp.json() + link = resp_json["source"] + if not name: + name = resp_json["filename"] + return name, link + + +class YtDlp(TaskListener): + def __init__( + self, + client, + message, + _=None, + is_leech=False, + __=None, + ___=None, + same_dir=None, + bulk=None, + multi_tag=None, + options="", ): - if resp.status == 200: - resp_json = await resp.json() - link = resp_json["source"] - if not name: - name = resp_json["filename"] - return name, link - - -@new_task -async def _ytdl(client, message, is_leech=False, same_dir=None, bulk=[]): - await send_react(message) - text = message.text.split("\n") - input_list = text[0].split(" ") - qual = "" - arg_base = { - "link": "", - "-m": "", - "-n": "", - "-opt": "", - "-up": "", - "-rcf": "", - "-id": "", - "-index": "", - "-t": "", - "-s": False, - "-b": False, - "-z": False, - "-i": "0", - "-ss": "0", - } - args = arg_parser(input_list[1:], arg_base) - i = args["-i"] - select = args["-s"] - isBulk = args["-b"] - opt = args["-opt"] - folder_name = args["-m"] - name = args["-n"] - up = args["-up"] - rcf = args["-rcf"] - link = args["link"] - compress = args["-z"] - thumb = args["-t"] - drive_id = args["-id"] - index_link = args["-index"] - ss = args["-ss"] - multi = int(i) if i.isdigit() else 0 - sshots = min(int(ss) if ss.isdigit() else 0, 10) - bulk_start = 0 - bulk_end = 0 - - if not isinstance(isBulk, bool): - dargs = isBulk.split(":") - bulk_start = dargs[0] or None - if len(dargs) == 2: - bulk_end = dargs[1] or None - isBulk = True - - if drive_id and is_gdrive_link(drive_id): - drive_id = GoogleDriveHelper.getIdFromUrl(drive_id) - - if folder_name and not isBulk: - folder_name = f"/{folder_name}" if same_dir is None: - same_dir = {"total": multi, "tasks": set(), "name": folder_name} - same_dir["tasks"].add(message.id) + same_dir = {} + if bulk is None: + bulk = [] + self.message = message + self.client = client + self.multi_tag = multi_tag + self.options = options + self.same_dir = same_dir + self.bulk = bulk + super().__init__() + self.is_ytdlp = True + self.is_leech = is_leech + + async def new_event(self): + text = self.message.text.split("\n") + input_list = text[0].split(" ") + qual = "" + error_msg, error_button = await error_check(self.message) + if error_msg: + await delete_links(self.message) + error = await send_message(self.message, error_msg, error_button) + return await five_minute_del(error) + args = { + "-doc": False, + "-med": False, + "-s": False, + "-b": False, + "-z": False, + "-sv": False, + "-ss": False, + "-f": False, + "-fd": False, + "-fu": False, + "-ml": False, + "-i": 0, + "-sp": 0, + "link": "", + "-m": "", + "-opt": "", + "-n": "", + "-up": "", + "-rcf": "", + "-t": "", + "-ca": "", + "-cv": "", + "-ns": "", + "-md": "", + "-tl": "", + "-ff": set(), + } + + arg_parser(input_list[1:], args) - if isBulk: try: - bulk = await extract_bulk_links(message, bulk_start, bulk_end) - if len(bulk) == 0: - raise ValueError("Bulk Empty!") + self.multi = int(args["-i"]) except Exception: - await send_message( - message, - "Reply to text file or tg message that have links seperated by new line!", - ) - return None - b_msg = input_list[:1] - b_msg.append(f"{bulk[0]} -i {len(bulk)}") - nextmsg = await send_message(message, " ".join(b_msg)) - nextmsg = await client.get_messages( - chat_id=message.chat.id, message_ids=nextmsg.id - ) - nextmsg.from_user = message.from_user - _ytdl(client, nextmsg, is_leech, same_dir, bulk) - return None + self.multi = 0 - if len(bulk) != 0: - del bulk[0] - - @new_task - async def __run_multi(): - if multi <= 1: - return - await sleep(5) - if len(bulk) != 0: - msg = input_list[:1] - msg.append(f"{bulk[0]} -i {multi - 1}") - nextmsg = await send_message(message, " ".join(msg)) + try: + if args["-ff"]: + if isinstance(args["-ff"], set): + self.ffmpeg_cmds = args["-ff"] + else: + self.ffmpeg_cmds = eval(args["-ff"]) + except Exception as e: + self.ffmpeg_cmds = None + LOGGER.error(e) + + self.select = args["-s"] + self.name = args["-n"] + self.up_dest = args["-up"] + self.rc_flags = args["-rcf"] + self.link = args["link"] + self.compress = args["-z"] + self.thumb = args["-t"] + self.split_size = args["-sp"] + self.sample_video = args["-sv"] + self.screen_shots = args["-ss"] + self.force_run = args["-f"] + self.force_download = args["-fd"] + self.force_upload = args["-fu"] + self.convert_audio = args["-ca"] + self.convert_video = args["-cv"] + self.name_sub = args["-ns"] + self.mixed_leech = args["-ml"] + self.thumbnail_layout = args["-tl"] + self.as_doc = args["-doc"] + self.as_med = args["-med"] + self.metadata = args["-md"] + self.folder_name = f"/{args['-m']}" if len(args["-m"]) > 0 else "" + + is_bulk = args["-b"] + + bulk_start = 0 + bulk_end = 0 + reply_to = None + opt = args["-opt"] + + if not isinstance(is_bulk, bool): + dargs = is_bulk.split(":") + bulk_start = dargs[0] or None + if len(dargs) == 2: + bulk_end = dargs[1] or None + is_bulk = True + + if not is_bulk: + if self.multi > 0: + if self.folder_name: + async with task_dict_lock: + if self.folder_name in self.same_dir: + self.same_dir[self.folder_name]["tasks"].add(self.mid) + for fd_name in self.same_dir: + if fd_name != self.folder_name: + self.same_dir[fd_name]["total"] -= 1 + elif self.same_dir: + self.same_dir[self.folder_name] = { + "total": self.multi, + "tasks": {self.mid}, + } + for fd_name in self.same_dir: + if fd_name != self.folder_name: + self.same_dir[fd_name]["total"] -= 1 + else: + self.same_dir = { + self.folder_name: { + "total": self.multi, + "tasks": {self.mid}, + }, + } + elif self.same_dir: + async with task_dict_lock: + for fd_name in self.same_dir: + self.same_dir[fd_name]["total"] -= 1 else: - msg = [s.strip() for s in input_list] - index = msg.index("-i") - msg[index + 1] = f"{multi - 1}" - nextmsg = await client.get_messages( - chat_id=message.chat.id, message_ids=message.reply_to_message_id + 1 - ) - nextmsg = await send_message(nextmsg, " ".join(msg)) - nextmsg = await client.get_messages( - chat_id=message.chat.id, message_ids=nextmsg.id - ) - if folder_name: - same_dir["tasks"].add(nextmsg.id) - nextmsg.from_user = message.from_user - await sleep(5) - _ytdl(client, nextmsg, is_leech, same_dir, bulk) - - path = f"/usr/src/app/downloads/{message.id}{folder_name}" - - if len(text) > 1 and text[1].startswith("Tag: "): - tag, id_ = text[1].split("Tag: ")[1].split() - message.from_user = await client.get_users(id_) - with contextlib.suppress(Exception): - await message.unpin() - - user_id = message.from_user.id - user_dict = user_data.get(user_id, {}) - opt = opt or user_dict.get("yt_opt") or config_dict["YT_DLP_OPTIONS"] - - if username := message.from_user.username: - tag = f"@{username}" - else: - tag = message.from_user.mention + await self.init_bulk(input_list, bulk_start, bulk_end, YtDlp) + return None - if not link and (reply_to := message.reply_to_message): - link = reply_to.text.split("\n", 1)[0].strip() + if len(self.bulk) != 0: + del self.bulk[0] - if not is_url(link): - reply_message = await send_message(message, YT_HELP_MESSAGE) - await delete_message(message) - await one_minute_del(reply_message) - return None + path = f"{Config.DOWNLOAD_DIR}{self.mid}{self.folder_name}" - error_msg = [] - error_button = None - if await nsfw_precheck(message): - error_msg.extend(["NSFW detected"]) - task_utilis_msg, error_button = await task_utils(message) - if task_utilis_msg: - error_msg.extend(task_utilis_msg) - if error_msg: - final_msg = f"Hey, {tag}!\n" - for __i, __msg in enumerate(error_msg, 1): - final_msg += f"\n
{__i}: {__msg}
" - if error_button is not None: - error_button = error_button.column(2) - await delete_links(message) - force_m = await send_message(message, final_msg, error_button) - await five_minute_del(force_m) - return None + await self.get_tag(text) - if not is_leech: - if config_dict["DEFAULT_UPLOAD"] == "rc" and not up or up == "rc": - up = config_dict["RCLONE_PATH"] - if not up and config_dict["DEFAULT_UPLOAD"] == "gd": - up = "gd" - user_tds = await fetch_user_tds(message.from_user.id) - if not drive_id and len(user_tds) == 1: - drive_id, index_link = next(iter(user_tds.values())).values() - if drive_id and not await sync_to_async( - GoogleDriveHelper().getFolderData, drive_id - ): - return await send_message( - message, "Google Drive ID validation failed!!" - ) - if up == "gd" and not config_dict["GDRIVE_ID"] and not drive_id: - await send_message(message, "GDRIVE_ID not Provided!") - await delete_links(message) - return None - if not up: - await send_message(message, "No Rclone Destination!") - await delete_links(message) - return None - if up not in ["rcl", "gd"]: - if up.startswith("mrcc:"): - config_path = f"tanha/{message.from_user.id}.conf" - else: - config_path = "rcl.conf" - if not await aiopath.exists(config_path): - await send_message( - message, f"Rclone Config: {config_path} not Exists!" - ) - await delete_links(message) - return None - if up != "gd" and not is_rclone_path(up): - await send_message(message, "Wrong Rclone Upload Destination!") - await delete_links(message) + opt = opt or self.user_dict.get("yt_opt") or Config.YT_DLP_OPTIONS + + if not self.link and (reply_to := self.message.reply_to_message): + self.link = reply_to.text.split("\n", 1)[0].strip() + + if not is_url(self.link): + await send_message( + self.message, + COMMAND_USAGE["yt"][0], + COMMAND_USAGE["yt"][1], + ) + await self.remove_from_same_dir() return None - if up == "rcl" and not is_leech: - up = await RcloneList(client, message).get_rclone_path("rcu") - if not is_rclone_path(up): - await send_message(message, up) - await delete_links(message) + if "mdisk.me" in self.link: + self.name, self.link = await _mdisk(self.link, self.name) + + try: + await self.before_start() + except Exception as e: + await send_message(self.message, e) + await self.remove_from_same_dir() return None - listener = MirrorLeechListener( - message, - compress, - is_leech=is_leech, - tag=tag, - same_dir=same_dir, - rc_flags=rcf, - upPath=up, - drive_id=drive_id, - index_link=index_link, - is_ytdlp=True, - files_utils={"screenshots": sshots, "thumb": thumb}, - ) - - if "mdisk.me" in link: - name, link = await _mdisk(link, name) - - options = {"usenetrc": True, "cookiefile": "cookies.txt"} - if opt: - yt_opt = opt.split("|") - for ytopt in yt_opt: - key, value = map(str.strip, ytopt.split(":", 1)) - if key == "format" and value.startswith("ba/b-"): - if select: - qual = "" - elif value.startswith("ba/b-"): - qual = value + options = {"usenetrc": True, "cookiefile": "cookies.txt"} + if opt: + yt_opts = opt.split("|") + for ytopt in yt_opts: + key, value = map(str.strip, ytopt.split(":", 1)) + if key in ["postprocessors", "download_ranges"]: continue - if value.startswith("^"): - if "." in value or value == "^inf": - value = float(value.split("^")[1]) - else: - value = int(value.split("^")[1]) - elif value.lower() == "true": - value = True - elif value.lower() == "false": - value = False - elif value.startswith(("{", "[", "(")) and value.endswith( - ("}", "]", ")") - ): - value = eval(value) - options[key] = value - + if key == "format" and not self.select: + if value.startswith("ba/b-"): + qual = value + continue + qual = value + if value.startswith("^"): + if "." in value or value == "^inf": + value = float(value.split("^")[1]) + else: + value = int(value.split("^")[1]) + elif value.lower() == "true": + value = True + elif value.lower() == "false": + value = False + elif value.startswith(("{", "[", "(")) and value.endswith( + ("}", "]", ")"), + ): + value = eval(value) + options[key] = value options["playlist_items"] = "0" - try: - result = await sync_to_async(extract_info, link, options) - except Exception as e: - msg = str(e).replace("<", " ").replace(">", " ") - x = await send_message(message, f"{tag} {msg}") - __run_multi() - await delete_links(message) - await five_minute_del(x) - return None - - __run_multi() - - if not select and (not qual and "format" in options): - qual = options["format"] - - if not qual: - qual = await YtSelection(client, message).get_quality(result) - if qual is None: + try: + result = await sync_to_async(extract_info, self.link, options) + except Exception as e: + msg = str(e).replace("<", " ").replace(">", " ") + await send_message(self.message, f"{self.tag} {msg}") + await self.remove_from_same_dir() return None - await delete_links(message) - LOGGER.info(f"Downloading with YT-DLP: {link}") - playlist = "entries" in result - ydl = YoutubeDLHelper(listener) - await ydl.add_download(link, path, name, qual, playlist, opt) - return None + finally: + await self.run_multi(input_list, YtDlp) + if not qual: + qual = await YtSelection(self).get_quality(result) + if qual is None: + await self.remove_from_same_dir() + return None -async def ytdl(client, message): - _ytdl(client, message) + LOGGER.info(f"Downloading with YT-DLP: {self.link}") + playlist = "entries" in result + ydl = YoutubeDLHelper(self) + create_task(ydl.add_download(path, qual, playlist, opt)) # noqa: RUF006 + await delete_links(self.message) + return None -async def ytdlleech(client, message): - _ytdl(client, message, is_leech=True) +async def ytdl(client, message): + bot_loop.create_task(YtDlp(client, message).new_event()) -bot.add_handler( - MessageHandler( - ytdl, filters=command(BotCommands.YtdlCommand) & CustomFilters.authorized - ) -) -bot.add_handler( - MessageHandler( - ytdlleech, - filters=command(BotCommands.YtdlLeechCommand) & CustomFilters.authorized, - ) -) +async def ytdl_leech(client, message): + bot_loop.create_task(YtDlp(client, message, is_leech=True).new_event()) diff --git a/config_sample.py b/config_sample.py new file mode 100644 index 000000000..343880dcb --- /dev/null +++ b/config_sample.py @@ -0,0 +1,107 @@ +# REQUIRED CONFIG +BOT_TOKEN = "" +OWNER_ID = 0 +TELEGRAM_API = 0 +TELEGRAM_HASH = "" + +# SEMI-REQUIRED, WE SUGGEST TO FILL IT FROM MONGODB +DATABASE_URL = "" + +# OPTIONAL CONFIG +USER_SESSION_STRING = "" +DOWNLOAD_DIR = "/usr/src/app/downloads/" +CMD_SUFFIX = "" +AUTHORIZED_CHATS = "" +SUDO_USERS = "" +DEFAULT_UPLOAD = "rc" +FILELION_API = "" +STREAMWISH_API = "" +EXTENSION_FILTER = "" +INCOMPLETE_TASK_NOTIFIER = False +YT_DLP_OPTIONS = "" +USE_SERVICE_ACCOUNTS = False +NAME_SUBSTITUTE = "" +FFMPEG_CMDS = {} + +# INKYPINKY +DELETE_LINKS = False +FSUB_IDS = "" +TOKEN_TIMEOUT = 0 +PAID_CHANNEL_ID = 0 +PAID_CHANNEL_LINK = "" +SET_COMMANDS = True +METADATA_KEY = "" +WATERMARK_KEY = "" +LOG_CHAT_ID = 0 +LEECH_FILENAME_CAPTION = "" + +# GDrive Tools +GDRIVE_ID = "" +IS_TEAM_DRIVE = False +STOP_DUPLICATE = False +INDEX_URL = "" + +# Rclone +RCLONE_PATH = "" +RCLONE_FLAGS = "" +RCLONE_SERVE_URL = "" +RCLONE_SERVE_PORT = 0 +RCLONE_SERVE_USER = "" +RCLONE_SERVE_PASS = "" + +# Mega credentials +MEGA_EMAIL = "" +MEGA_PASSWORD = "" + +# Update +UPSTREAM_REPO = "https://github.com/AeonOrg/Aeon-MLTB" +UPSTREAM_BRANCH = "main" + +# Leech +LEECH_SPLIT_SIZE = 0 +AS_DOCUMENT = False +MEDIA_GROUP = False +USER_TRANSMISSION = False +MIXED_LEECH = False +LEECH_FILENAME_PREFIX = "" +LEECH_DUMP_CHAT = "" +THUMBNAIL_LAYOUT = "" + +# qBittorrent/Aria2c +TORRENT_TIMEOUT = 0 +BASE_URL = "" +BASE_URL_PORT = 80 +WEB_PINCODE = False + +# Queueing system +QUEUE_ALL = 0 +QUEUE_DOWNLOAD = 0 +QUEUE_UPLOAD = 0 + +# RSS +RSS_DELAY = 600 +RSS_CHAT = "" +RSS_SIZE_LIMIT = 0 + +# Torrent Search +SEARCH_API_LINK = "" +SEARCH_LIMIT = 0 +SEARCH_PLUGINS = [ + "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/piratebay.py", + "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/limetorrents.py", + "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torlock.py", + "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torrentscsv.py", + "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/eztv.py", + "https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/torrentproject.py", + "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/kickass_torrent.py", + "https://raw.githubusercontent.com/MaurizioRicci/qBittorrent_search_engines/master/yts_am.py", + "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/linuxtracker.py", + "https://raw.githubusercontent.com/MadeOfMagicAndWires/qBit-plugins/master/engines/nyaasi.py", + "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/ettv.py", + "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/glotorrents.py", + "https://raw.githubusercontent.com/LightDestory/qBittorrent-Search-Plugins/master/src/engines/thepiratebay.py", + "https://raw.githubusercontent.com/v1k45/1337x-qBittorrent-search-plugin/master/leetx.py", + "https://raw.githubusercontent.com/nindogo/qbtSearchScripts/master/magnetdl.py", + "https://raw.githubusercontent.com/msagca/qbittorrent_plugins/main/uniondht.py", + "https://raw.githubusercontent.com/khensolomon/leyts/master/yts.py", +] diff --git a/default.otf b/default.otf new file mode 100644 index 0000000000000000000000000000000000000000..a599581038f554d4201b6870c4e85248e485ace6 GIT binary patch literal 26792 zcmeHvcVHAn`~U3S?cUvNvLQJSLc-=kZvhf&=)FlN^cF%0AwUYLbV5@QR65c_?^SwN zQF>93CLkapA}T02d(OrCeRdNtD!#t&_xFB(fBkOq?99$Q)1P@}=6Rk?hb~<@5Se5X zGYN0nqD5$jQQ3_N37<+xWN6Ed9ipQ4%0~$C+e}E2Yb~R?HhHgH@tcH{olJ<`)FG-$ z)tC`Ini3*}5<>dNCdQ=P>HhYQgh=%WG0z?x7c;O#vle5Jb_h^&Fe2y(Yb%7G0ZI){ zOdm1+>uPIJ)0^kdk5&HSfpD%}y9g_v=km*@e2|fysW*~D_Cc%N`QbvuM|dqi z>`$yhD8@~&*wLcNLfQPVKthB~`C$`Lgj4xpGif4R%@12hn5k%fSR_HF=J{a>@zMEV zD;aE>o*%Z6!ls||!*){GY|jt-keX&!ez*_`GTO^EFr7oRaKCbfFyi14a)_35koht&8=`3SF$?G~4smXMqjYGi*kO2OFB zgtX9@(Dc-pfpLj3sY64P2fe!0onuCZwv9=R8ITZHAUHTZJteJ9l`82kJ32`3XXV)B z#DbONH<&af$s~o0B&j5U#FN1!9i&%|#FFyxBS>w-RRTeW1F9mf3vg4AXCR3~o;-^%k7Ld zbw*q~$v{al$Xgxw5k_n3qAfkK7CVr-e_Mi{ztW#d_`BL(h1<<&XBt|g_b;?S?f*$B ze}@~2a`h330i>gEF~+DS8vPuKxMVU2&wqc!J0s^vo{3W48aUCx4tHok?LW8en?CF^cJ{6a4KpA_*GO?1a7Kx-Yn| zp6~B}CX`?Szm>rAY{*Z+r+mQM6yiq;5q}au9M}Pz;BZAqASp_Mz`KeOm4px%2BkPD zK}wQRq%heOmGcSlhh)SApANYta_wAX+Ro+TQ>&J zZAzM92wISqq!q~gb1IVxw2099p)s=K3-ANCSW-rp4^dWsoKN3y)qg&&# zmrf*;!HZ{+1!M_XOqP=6WEsgJE6HlIimV}P$$GMmY#g!6jNUCmw=^1$O`g483lqce!^gQ zLrA930;DmTj3YA%Cmx};P)BGeG!p6x^~g?O91v8YtqXi3JB zDP#6W=n$dr(uLthIm z5n3iRJhWP9ozP~XDWM}nr-g1VUZi-5;$4dOD-lqlRf+y3;=EqeQ3UgygYjICQQRv~ zp|B7lltF81qK}QypN{C&KvSUU6Vo-6ogMO1s3SDcD7$KCZKLe;(CqxOql)*=FMFWZ z>wV??LZ=?_Te2@B7{z-fiBk-p$@k-VNS$-ZfsI-v<2F`8RrR$Gzrv z%iJw_H}r0iyM^!i-?iPjb?3&N%XfC(X?3T;9p#Sg%dTIBerf+w%HLH9=;y!xiz9=C zB&@v8i7Jf8JnGP#R2PmAf1EW`(;8CJ{3EGgPQZ%zg7^#3q&uL3aFJ9nWxy{MqM9cm{~16##P81w3n`=-@>Dj(=9Odk-ynS^sbEPa zviUe^XkHB5L-0ESssoPUeG_RYM3E+@o4}cgxFFKd@*VLr&nEROWsrW8)Hi%Dsc-%W z`#z}3e1p`~%d|Wuz83HTi-p+GCK>0a4tkq~hNOzAKF${_5uQdI|Ig}AAT5Q8q#ssj z6;mxz?#0-f=8&r9N;smToksdr5@e=m-+EF~IEi^;q$1i`MVO9!`W!qVWrX|0fw;ED zJX8Qb;^Z~c1X2z7mCegAE_*Pp#WB|3kYdJI=;QGx5LaSM^)c7Sy8sALUN^?PDu5#V zy~Q76nm^w9SQiAer?D~q7;}BxUw|>@M@U2Kn{)_tl z>>GX8`~PRqX%L|I+vt0Nu_-uCFT*;Wz6wCQ`Sk0=Sn7cBG#~=L4o!5hRLKK_zVm5Z zr}MuB%!NK5e+LEUM-Pkl^1!UGkynAfZe9iYS}F(y=J&-o|0~~EfBG8K=O7>Sb@)nn zQGb4Y!ZOf%K9F0u`;5nX4N}%v2ggD8X3(8p_NySjF4J)E3GfP?j}-)+*Zl#uV14aE z`KL);^Ky(;2k^ezxYHcM+2T3Q49`hrA=uknc#8GtA;pB-q@>A&@Q36zoL=(6NYmHg z%eDD8c;=DBYWBz4ZbC}vbZ7Aa?R*Ho8K5-6SMvEI_yg9K+2E7lnfgYc^9#&Vka3#A zyLYsH$9=^QCo7zX!f_@sHEYqf3sHu}q$eStMaRXSsHguP7KmwRTxt?gjHh0ro}Tv< zfM-J3OnhjnZ+{~AH!^ar5xNMyg#N-PAzN4^>=Ql|&I+Fi*MvvHGgDzxF;fLoJyQ!) zTT?gFU{jK5rfI%upXt2mj@e{>%^YHGWNu;ZVjg5pF=v{y&9lw#nGcwcneUl33$=t< zs#+o~4J^$puUq<9k}TPl*_PFoJ(gpZ)0WGY+m?rxpDZ5HB-%tpd`+w=HWFKiJ;Wj6 z81Wr3N8BkM5q4blL zSQTrawS+ayTGQIf+RHl3I?g)Ny3)Gdy4AYNdcbew3DTG-m!I@<=@#@TXgYi*lt=WS2ylD(L{s=bxHpFQ0^-@ea&#D3g<+J4dg$o|Cs z3oS$|&~RFl)}u{mE83oRp}i=^l!ST5=RYM9;C)0K-c=+Jeo^mX5(GaOpku}0wEK7I zR-~LouCvH-7CFw6Qr=6X3?R%q3Ulc1J%Z34go=7MA^i)KSk(Iv&%)mBn9~67LgZeE z+zXMqus{wQ^1h4ouaN!~(!WCbS1A2Ul>Q~kbf64>?+MUIfOiFORsd&(QQ~#HUB}yX z+%efp@+3hroRZ+=sx`TjlS4j&r@gx5ghz4MxeOFe+sLVcwAZnsq!E@YbP> zbtq#U%2)@y9l+ZGym~le`g=zL-}RMy`(mOi7n+_i;~u&q*DKXr1D4ipGPW> zVg;eCcM18!6XO{3igEla4gR-c3R3^?>)<~P!&^w|C+D%X*ng&=g8lyIKKwJrtK0GaHuj%;{m&Tryx8Ep|NeR7pHC%!e;(0miNpS^ z^ZNYfUw4APi?LqD{s7GmDGbR>Rh%OSn2tl9X2YqbBu*=(;g<(Q0IK4IQw@G~_%#4c zk=`8dtpS|?oe>uWzYF}XfYC@F0~iY!2N(~?2221<1m4?#cL47KW&&mb<^UEU?_%IA z0W1Y911tyR09F820%r?gD_|R7J75RkeZWDa9|9Z(9042!90PoSv=0Fv0geMc23!JM z27C^<0=Npe24DaU;0F8(-~bOG7w!5D;PqY-2tWXs0A_#%AOa+S6<`D4R49}Nel?=t z3|I(I()%8%f)ib&cNwYey+-PIFO$07^%%+bNC(9CKzvWc_dUhNur+DM%h4lZo-@Fsx zc3`y-?`7{dzoX;6l!v8L*}M4|KOp?@cP#JcFUvx^{{Ht8D+c);-of4^?^@icFkgS1 zRqr#s3@1`V26NbqW;u>xDEg9&pQk02mdzt zZxXzZ|0dy2(ZDl7(_}IBHam1y0<={Dh|_gcR_LgTBCaGHg;uHz^jYQLdG9hp^ zlrkNz7Bn=opxc^5a-i4R1lI;yrX48jeR2$?oFJ!3H2Io5fS!rLh9QpR3X~)WzCsYm zf;LDcQ-o5`^}iuR2=&QZ&=xf&3xpQXBQ1i?rWILi=!%v=dlN;Lp|6sml`Bk&BDF1g z9hyH9y7o>Vi9(Pdl*ITz^AAm%FZ$RT+Pb#Tnw2(sQ`6{8Eu%NJjo#ETdQ%sAv`LVF zPk|1qDf%@FS~ZJ|k93k}mI^eGyB`WOn-6XYUPo0rHn5}k@H81L3-Nn1ESXfoTOCGFv?23^^p)$E9tbb_-(yV)7^83kvDmZ}R{(iP4Z z+RkohNq0CuXgzzNB|YK%F$=xWlHPC*%x538M5oWf#*79UGa6{jXkpCdM6_)ZoY|OB zvq7tN=t*awcAaAFnB6&e&V}C8kKoLKG8e+xp#@!pHmrm*L7TW1CFoRbhyHXk^6Ip0 z2OYeR=Pt4rdH2CN$bL9GIRNJ%AAm|s2Ibqy8PJN^pcT$stbdDt<9WEu522L5FEN~)(nn8RkL(6jnGG5;8x$iO6jR8c7_&h!z6QmZ3s8)oK`|zSVnl;t1cPEEgJKkeVk`!& zSPfe7H)zG*pcN}t{{-|{rx>$AAB7D1FdOt?Ht0h%=)+=Ah97k7D}b-lhuNTsLIzEk z4Vo|;G$9%^VKJz|LJoot+6|h>qXV-s>xD4u9+dF=tos?WZZc+FG-h2gX5C`Ux1TZJ zCS$%OW4;eP_saLj<@-7L zettgFy}Wm`Dc=vx_jQXvlxNP*Z`Q5+^!J(h{>FSizZ_`m^3osV`#awhetM=!yZS*uN;b7XB>V1(C-|?*r!d#?xolJd(OhZ2!+!*$REr9=e&WK zi#%G6{2$~F0!_SFTXp^i`HO)w;EMWf<&RFkWBe!m%A>eEUJavt&=&St&>At$J+Mzed3iep0v5ap zLXkh1FTXUo){`o0g@`s!(B_-81!gVK!b|#?m-|?FZ9b=%HR0FhVzGcIzhaJ0{kIgW z=pR7F>ZRLIYhm1@_4{WyPP_GSrfr8)XNX ztK=)33V+6#@HaseWWiqu6kI}i*g96iDX+QE2IqsmLV}PiWC){$3BudLLSebEMp!Rw z5_Ssvg-?V_!gb+G;X8o|xhAv8Zt^n~F}X~oO%+X5Ow~-aO$}j5(ascQ>S^j{iZ!K} z#+oLZro*CQg=w>Cuj#PqBhxw4Rnu3dA5FiQa?Lig0vn4UbEvtzxvDwR+`!!2+}0dr z?q%+0j)TR;2=fH<+pxKqYrbaw!JM0sln@>sQ7dUcT1rf8T;s%;*wkcvPhohC2c*Vj z#>E)V#>w%?NpVAC^1P-ou^H)cvBtCMz~uCpSlu)}_GPGPYz&HtHJ;6k=fM1MYI02a zK-3YF-aM}oShed_Hh-Bn?q#TXUS)B4UUQ=aR0`~tMw{Y|XUmuP@h?LyVc(t@lh>;F z0^wEzVp0bK^j=3)ZJdx-cWa}Z1moE{ud;+Zul36kTfd$#q)ma6h8767HO66>@oZZl zeprEUTch2>jAwfTGs$?iFMyL&Al%--OfsI$lj7r&;ydIuHaXAhFgPPA9vw*>7L$>l zT=1o{Q7#Vrn4!)EN=z*f?rfBsYCNM1%rxT}6&p7&Vc4*kw0ysdfst-JyI|bX0bSD* zh7F9%FrHoW8k3Rdbu~sX!+3U0O-PE*&>vk3HZG&!OV_+X&dBq+$0nr4W+V<87B?d6 z_h8T8gCl{t5!A7tJXXtHCYcvMAV8&Nl8r}5jQ*| zW>`{ky5SFtOG`5X@u_h!>8L_ateliFY*^FIzkjUKr(A5G|H9`gFS%nuo*CLXo*dO# z-zW$Kr$BtNjC@Cah3Rz_T(Q~;Jzx|)Q&=q=6pjl&3z|tVSxt(mhiN64&Q8?TeYr-}1m!gWc!Bi@&)OLe6TX_WMa^p><+IxL-# z&P$i1`__uqD%M)o(bhMui>%kJkF2>i(I(rf*y`Cj+Pc|#*Jw0OC{>0L9`r=fWcNB!(gj79Y6=sVKjw~qEqNJ`VO5-m(rDV9o{@dW+tp z59qh_d-|(vl4bcdIaIDFSCi|=4dteCJGr~uTkbCploRDtd9*x1p5i#>zIDPGy?R^9 zvEZZoH=J7PVvV&sBJU@)i2cf@u}_a`0={$Sg>J*!;5u+AU8rTe!q(>*qw^$1*-vt;9jpbw%qbPw_5zN507<@Dr?cec4wNA@1V%~knS-X$pssrpkA}DvNv^1}frb0-Oxa&e?K^Nit<-Z6uf`K%HvJHy`SJlfRo;)e%CIZ4 z)(Mr0k!K%$_vLwJak2Z|R8K+`%I^2nn_F6y-EXI;)u|Tdi4u7#8_0%Rwc=VAQFiv- zk#eA#+9z(2DhHIHj!@aj4oJ^7?)vEc^fgIyT`S^M9w)V~F0#$i!bNY49yNB#7#A-m zb!{rLK2n;>M@q<1MEZ8FT2AUxMw}=`JrT=FO7Hadwrp9s;Gjzzh^AbZcyz7WJV2hK z3hPvJDJna8$!6WS%zZxV#!9jhpB-a9Om23JmvHgLmSt=TVTre$9i`NfZ@o3vHGj_Hc}o{<-w?R?y#@2;%(1}~_O4w)tO_5P8-Zc$V%;=t z^IIE3R;F)Cy4ZpBPjog=Z=Zg1 z%w8A&*g7`4?HiH7N@z1xW-H%NwJDRR_LX&q-f0Y-Jm+)zm=%4VW28T1VHoDM80Bsl*FBkwN!wrw#RSJgZUURL|+H zd53!Zv`bCt#_Ue2JT!ymXRWFae}>z)iTcZamEAi<-3{6MPQFsQ(|YB|^_k1Z6@5D` zaYjmTe~J4w+Q|x?p*_dN4NY^gRB749c{_Ipu}M7gIlm6_OkvHrm1Tu!wC@X7 zrE>17+3M_-D|ZH+?6Ij%h&+JLVU2(Kh&mo?wQ4(g5N|B9FbP~T^eOA-a+5*SakDPf zs(Gl`WNg!99vZClSFf@X)m5IMU5cY_7EbRRQr%<0VTba8Jb?`q`TG)!;a#4UUz5Ii zWYN?SNol1k@g&|ikj<0Wcs87k;KN0>PvS#ZUrwGRu1i~7boH{0Yaf2e;#t=~g_XLp zmJO6c`8(`2rd(Cs<1VX}q_LA`&z(DC#;iFm7R}xhJ?9>1RisK(J1cc+Bqk4@n3fdO zXJ72+ArB-)U!n3y`t|JzswTNV&sNX8*r5ugWr%B zLrQ}XB}omC^_3u0po&v(TfD?wRXRm~ZK2h4vhv*bDUS{DL`qx*EnSw*Y)|UbchImlF3)BDhz$VlPZId3 z?+>h;OUcNi{($DHqfmc+H8R_Nl~)`~104MuP)80M97CP_NF+NVDd3*i13aF?Dl0jb z3T(|~wfr7B=MzBcNMpei_Ujp^VoQ!dkv`l72lo;rNVZtd|r>X=C6OxB#$W|c5| z?BkUxa*RgD=d&i?T)MnnZQ?0$d7!^I=5pX}YWa%Zp{uuhc|H?5wsCRpD6fm6$7qr_G77v$sw ztr)*NZ9#}fkKl@xJ>@m|Q(i-q4XQW=l2D}8s=ju9mXvv!?s{3W^R{~W)H2$xZO{02 zA##;Q;^5>l!?3_))-sdb!%#$P#TQYT=iJ?jy?!G`tl@UnQ2v4n9aUv5J+w>3xaVZa zQ`B25T~ydw<`kGsE9)%7ouX|0;TV-A7Ju?fX8xkj(N0TTa+cusf`tsgT<%w8kFQmB zu2ntZg_qDnbXit_JSubLJ=XBv68*6-D`58tRh~eFvtZXz8C1S|@Z5oWGKfIlM87|+ z-eWnEEUjNZVMs!9cA`sulM1rVOk|x}L*=z}-9A+>^e}bY0UdorWzPdnGFkCVcl|6@ z94Y)U7MA5&rgjEfLw|WC6?UJcW_C}`+?7!f7fu#cbw?^1Wkd zhM@U>Z)RiUlc&|w(#dOdAHHt;?nm=c6oNW6^E3JGfpf?130ohVS)eQ>rp({6dELDA z;4wMXt>sUvig6|gkgv0%*JZ9aW$t7??)uC(K=wRAD}Sw=CCDvQLH5j=4^p$ZFw#lP z!iHtZ2PsB?KfaPY9}nX=R{57q;u_bTXR=2W05vpePkSv6Ev2-&qM49lBobz8N}jsNaN#W+?9P zT`!p+kjVs}_Tsx`DDs5IHVDNa4};kSWM>eM3C{@xWd#3&11hLr2$W-x0F#jdq*`RC z04bW#qLAPg^nW+e3~D+vw15H^p&JyE^-Jh1LtA^0oDmw~Yoc=|+2n6(YRWKeFda0V zHeE8^H2rL@VQy!B*L={TTDn=*Sgwm@#W%%`xGr83pNP+;`qB<*ue4t}A{~=Hkxt?Z zpR>3!UX(t=mp)gdYxvsdj&x7DFMTV0Cq0#ZmYz#Ya!cG=%39kRZJlVHV_jxlZCz{K zWZhzY-@4nn*Sg<&(0bT<)cS$-BkRZ3GuCs~3)WAqSFG2px2<=q_pJA=5AbDBV_SRM zD%(NZ5!;vcYW5NKWAiYd4X{)FpaD>+K;Bgf0>@(6jVd{O@1$Ks>RZk?+_$Q4b>Div4Sb_~d-_KE#`wni#`(tkCio8ZP4gY;JHdC7?^NHnec$z+tHOz2i5} zZ-L)JzZHHP{C4^s@cY>BjNfIy>we$){p^=pNG?>Z(6~ZN3vDm-S)uENek}B?klWwl z@9SU0zl497e;xm3{_XvH_{aMX_aEgy(SL^jJpYyc8~t|%lnF=<*cR|{z&8Q8jv|f- zM@L7r;~mGBj_(|9hsWW4tsftS15X$mygZHCpN7)=5pV%9viy0 zLx?9!>M%5=n=AL0wcDoQCqh`fR^K^g+SJK!xu%Ze?LU~mTl`?*)=eKC*_U)Mc-evl ztCx+Ln>@$Gr(R^;Sj4S#ku~diu4;`SE;X#mEg>ztuNV`|6~4yVu)mT;66XTmM;^x|giwG_2>)jkq0)v;66Iv;gmzOKtHc_S%|`-*HiC$|_mEU{9b zeEi+rwtKncYIv5FksdKWkx!9M@omm6izlY0jh>L;YIi~6#UD8nM$cKjYVn*cu1{W< zl%u=Vi<4AsC|li5b@V^h(c=@LDz#VPvec6eV|C5$XOL8k{DzsNU(kBb`xjLf-c9A3 zH&d;ah9 z#u{v9rCGTkR(>!m*@V>%Q9R?>%}n+i&LW-ens;kcoy<43ap?;^uKaYh^uKVTUm$gr* zk{blt5q49>ll2*8Ke@X(`A@htGpEG9!?jr%g*%m?CG4YoJ1>bFCu9gEL)mt?lE1J? zxVx2n&bPZf3oSgdBcubEp|vcMMSilMVkVP$BxrP&ekMOf7ig25td?gIi*-lvX_At4 zl?n%0AFVCxW4?+sZO28*Ka+T^YVEn=;>qql)_9s*^pLb=<-&s@>@$fqx^azJSwO8@ zJlf?cCMnUl9hSrGaBgAk?lx7w9@dn2Q5@-yA5{5aDHN2dm6mWh>?bKRb27hRky@+C`bQ`6r{iw#nu&>wdh*4un7MfH@@W?K)g z9GfsWeQa-+CmhYHrYg8oS$DJ6QXi(1 zkWV#zo${%y1}E$!OJsfbvbM~({23nR^3;`%Z#=i{Rxs|Dna)Ei#tw}en86$I@ES|* zr@9nw${TO@UTwN_)w;Q>R|Or6+to0{a|Pl(uk>vH+wbgf4Np%_Y*C-z^oW6Mp`=XH z*FsmVFY9V{zgC#VpU@AieUElr;ySQ~@=rHYc9Sh}?p;1QX;{|iXcxa_tO9*exY!*w z+PQumj-V+xg7U8}s>LXV4`6;>^9bywfbwPUX>qu-2?pA^5O&YJk2tUaadEaQ> zmivC+23+;lzS(mQxjH0u=ot~rN3apjUKxuv?%bJ!J8AgW*)?-qlRLytTsq zvB$P{?Rw~lH`T>B)84?v$AY_)MbPfB#m;q9^R=v_NL-G@!Km#VtHg`WR0FhytheKu zPRd#fkgHYVQvcqyT)1JrYjy0KH*40ykS8G2>jt4l>V(R-OLGkl+{iWZPvHWOhFQ$&{7&Xx@aE;XQayR-YGU*)FXxScqBFrF{|H`YnboklF9E zDlWEyZ*Rild5To7eXE)^Pq%#R(i%#hpl_Z16Ibupx?$}e*Lm(QDI=*+TvZ0DL#NkQ zmC-bGo?4nJbLmL+J@q|03+I{*YD=1_#;HPas*F_!sHy6l>EWu91EG&lQdMTsR_ajo z4H~QF`hTw8U^ri}E{;2F75EqL@>^xeaglXlK9b{3Gk1Nwd?p!u#NExpzRTkx{AZnu zvjso% z1tJT^4zh&@el57`#yE59M!4%D&nNEmB(S^VLwf6P_*FqdWo!5s!i_My|(8ZFhM@MXqdoN_i zqGg+o1Yr|){IqcSocBYNA<1y9MR%qjszlc96cf;m;2IBeZ5YM|hS(VEAkUHFTNDPrR~|klWdI@!(YzHZgCU8Wo0J!Bg_Opt!GCcw`I~QkW~Ay@I4mkP!b_CATZJjE zPg!MW_PjS{z45MX^~^;}R|V~jo;Nt8lQe9g_)uCr?~QR2vL}ymRc?4-=N4(pK=G8Z zq$b`L*!Q|X(Q{m*H*s_k4%}p=zc6FF{?2)Db!xA^scF4?rLQ`8aOJ8)uA7$9J$jTb zeWb_Jr$>%FecIzlY0Uc#U9fQfh8m!sLLAxJooQgJjuF~y%U0@%X3x)2?Qs)yaVUF! zo{m8vZy{^6ubLagrs#(f_O&yI=9b>0=62B_9uc1WY+kNQyA*z&ZFtN&ekUAZ5<9~r zGyB|K#>rbnbmD=$$k!cME7tPs6RZdejQE(h1Pius>wrZ!ZtPlw^MkZ&Ouc#o#^4Nz zkzP&N^`=fPHWn*eSrbOZ2UgyM`HNvzC23UV;g8ib7pOMi$G22%!4Wz(H?pSs#;DBo z`_w~-zN$yRbAZd$-hB_h*c!ja2^7-6ov& zpviRUR6MTqRo|lNv>I)xR;7@+_%){Ro4dP2(B8DQ+8Wvg7MQ8Uff27`ot?ZgtBk#| zQoS3zlI{?ykL|Fs?rzl2OXHPAr4_BG@=Mw~jdg#Kb}51;(r!?=@_VdT1RY0z?SGXn zRj+NN4h<@i2ikXqOH$>3uM0C-w>!-81%}zt$T$Rp{6am2*{%cBk?k33$-zJF_}2}N zqRDEq+7+%l?M@TmV$>LQ0CdOw)P8C;4B%*Ww3+6OL{ z#?rUdx7EdTF&(FlQzyb@sJFM#JFFqZbm6?CuDg1Vj#tO4z2TaI@4|YG2xy z_J`}G_EHDK4T9?q*B-7H{_RK~xGr#=;JTC)3HaE8GaUXf;}W9WDW` zAMHmQ!3~CsqMhN|sBP4?aGl{M!+CbNqgfcl*F#H?RQ}vq4rn)2ES3R>nrEDr|q=ycuy9@ttTD=RYtKwOCNM&t~n^{_l znkEh7t+{_3^B=guQ$<`OZPKbRzn#3$4lwh@%QM|o(fRuK%q$~CO_zpp+-ZkT%VbKt z$a`D4?=UgT%9LT7u!dGj8?c@VZ4{SVx!*>S^|CU*P5KL>6wegaK9fDfHJ^RutaP>E zx8Gj9`t7$3uU4wmuwkVTB^k%mfZZ5BcB=3|iboFB%I#KDPpHdr_pO(I_bsm9eI5P7 zwJ0_pm*Cvt^VJW*eCKF&HmLz0Lj7>6aEH_CLy0-A(cE5ZfYEC;hOhc(RFmD^+TkG4 zkNtRpzKc9j8a+w1Vl`FIaIkaL-E=ro+p!Qnb&zsdTa9ZNt1&n zOrJPoV#pw=Uu*G^#LMhw70R%#!K{Bz%DH!Aj-C_w0qLz-(`U~Lnm=RS z+w(%$ZRyB)kw1~%-+FxYg1bDPoKYL#{4ntYMnB3wKa@Sg zVlT1)93d`he$ER$4pa-_s?;w^WF4g4E7S4MY|~;~d;l+V<{PF;Ume+Z!R1*Uouztw zwaS?+S(2#VEe{)X&nMlHvhr z&oi;aDXTJcqssh#*7iTcJ~bKp)Y#l;4CT$ED*Nq6tV*>qysbRJD1C z_90qpoerR<(ps|T*b#?kpRTO$Guw?j7eo}eZg~QKd7rM+Pp{@iIKDo{@zs=#^Q)!N zE!8oYyu&Bx2U?<^XbFzA{KlwE)+z7a##&&rx}&XjRt4p<79uVWPToTFSaJN@BXKjY zB=VNr@;y|3yc28!FmIhi#&LSwBry}KB(nOF@)7QWj_jpTdRsoi)eonZJ-mhf_FD)2 z06hmfWGKLjG6lDFU34)9q6>COE90!9c3<3Y%MjO)#4$15g1GfY1J)=cj2<1mYv=~o z#*K^i9uG3^{Ix>(Z+wnZH$>UIV)s55`$59}Js}|@wvS86p6`yBFR><+^^xzb&RMp? z$7jcxFP8fF;@|$g_G<^{76&<^OV}ex45aNN@x|0k2=>2)eQG4^QU~DwcTF>FQnv~R zgcHIO6E!t6bu#rf#h4OIqfBp`W}9+M8}Yv}KR4Yo2bzmR3jaDRN$*)YT4F7U`0rdZ zEekB0Er%?}E!QmHSbh~{F<7iDHpBnq>LVtKBgJ>cP2%^WCizPdQe&x`6f2E}QP*^7 zv9w9rE*+IVf{gwykRxCu7%bd>l*7u>$leLtv_3TwbiuM zwGFckw`JKz+s4DdYpHD=B=1LUAKOmbeztk-HoFf@yXx4R*jw7$**n?e?Wy(*`)K<_ z`y2LY_IK^`?91#&?Pu+u*>Biy!O-iuozucF@~Vo{P+e?pT@3@Tp*ZD?q+{s>75Qd zD?v{5)YP7eyORcNZWsLe02rydJdHh1MK(=h=e3T^hc{m_blrfZu*ezOXL@WfuP1ST zNIqn3ox7eh5`2qA<_XPog9k%sW|-8Rfvl`iG9Rd$6cwj(6*Z55TwyeiXrMCFiL4~F zkE?Wpl`7Us)Ex*bD(ABeu!+i8lQ`G45;jp)rMf+awja16edT0V(!})Pu|ZKA20ROa zJg$fnR#A~s#)R367SEf#!o}(^n{(kpm`;t;O{eNfW5&%|vUuUlg)YT$r!o=H%f-+61io)f)^Hc^?4=1O{f)+27$HIzPJejnqW5yso~?v$Q1ZbZ?s%cD+auHw~M`#`=7=4A_KO&Ra%VI8lZ%_~Xc zC(l~oI%1tad-j45)@8i&qrvN1gw&Q=BqT?<+FGNwq+ATSC0*RQ_9GWWGI;|7Q-N?9 zM;;rFJfSPV^X`P_A-Bz0D5aL2CkRb8H=tfQt{WyhTg zRCbTdcAofP`+b*oWDv+>O^OOTgIG!awG8{#Ih%&?i|?pfm!XtRgI!<^t;=-k!qmV> z2v$~(LUGz};kxm{MM(NFP6I{7$yP}#7fgZa2&_SPHK}%VVmlW^kec=UU6Eg1DJ@wr zXJrVhCf$91=ke1qyGpxw4~&K*6i2~A&1m%alN(%RrS4nGJ_egFY z-AgL^T#x1GC&KOnc6Sd>OrDrI!WC&9?xSp(^+t;I8q>{nK2bymQA=!+XK*GX22t z!ySLJDZdQQ+?GkVWt~)c;Grr%M!7n7In?LMWpViBud|sc_{9ni?D?SFN3P=^_Bha} zsko|X_ySIXAp`FnGvnO^7rVhOh{w}bMb{0=Th}3c+Z*aI^}w!aQ@4cRVthf2T9v-< ziy+<1Z5ys|>vdDNNZ!d^xQr_CF|d>g6#M`M z2PSB75wN8*;$S)#4og&Z7sc&_eYIW%=|$!?Va3X*x}^}B6N#pHzR~Xj{ngus{vK9g zX05Gmqo?ONzFxP`i_EoY^UA1Ss4#^Se${a5-$QZU52fyQE7T6U-5whaJBkI=QOxt) z?Tb#wQ^OX?Xa(vBW9|f@dTH3;NMrR&$J?{!z?^}6C9*af9)8avgJ7;Y zaHm>UUA%b4jKv}KT8o1(QP_U$&odAK-LkL=D%&+g9Wa5Oge_D>R8s`iR75q$JV9CQ zD1^(wVDRi6!;{eDyXZMA zVoXq|rvd8~PEnIFdZ;U$j~Gu!pouU&vD|G@sbXXsZgug=azJ@ihA|%ttFC(N+NE%s zHy4aGT%dYJ>Lz?1PXWfGUry!zhtyiCGSo0ntPlHL9=%z0zPISz1q*^!k60cT!gm{n zxUWk`S5AL>nQL73*f9wM@qtd;Kp5c4b5+mUFJ$AkJ7T?x0wejp#_IHRu<|DCJJ=Cv zqvxsGsT8%%0DRox^3?WxZS1B`wMpVJ&m?ydT|jsqCYlnXHB?ySJ-H2oyuI3vY;`)U z5Ccwt71m^(WSA@p4Pl=2yb|njz&Yi2ax$-@ib|{j6t$-dV^x%A-%4k<4}+ms((5i> z#jyQiP{_c}>nSUz8-;z(<6QjnZ(%T}xZ6Zf-I%M8oJV*tC$6Mh%<-WTx7Tb{fh%8h zg#jFSvf{zXUYi=Yc)YZJ(dMPQgBdufbM^d5W3om}sNNuY&c5*wg~ejWf(ept90WNi zn+ri`X_yDG>U=G>&9&z~p}g)l`l8L90l}!gj;lz@fO&7K^5sKn1xVw^g3<46OnHsZ zsWwHo;e`FyG0%HEx*dgi8Vft34jQ87X;;d!Zn`DcJGjj`a)8#~gDlP^bXhwL7uIGW z+1)Bal~=05{1jEIh0{yS`dvd#1)tfqZRdKI)(GbtK2_r9JstT6T1RLS)Aqx<75CRN zF_d-X^;s|<2kS3U=E3OEG*$~1pQlq)!|-^T2BWJ0R!j8ExTl4kQk^&X^q<*1?kCBK z+3g30jFn(MRhEJtga+7LVbkh-1aTYp7sDZZ<5YZT<>Kce(x2QuqVCt_k84#0hU3er ze0b~0Q$4qVqKnD{DHOT`jsz=r_g&h1Pu{P7!%BX23cFvv$yRlC-N_|GqSrJ}qSw@r z=*dg=?Y$Ai{7RkT)k9!twSFb+q4sc#wnvf|!MbtJzJ*7QxLBmMW4{5JaoCUgXrpix zpRX=SQE|hoUaB4UtG6%uz$jP8QGL6ExtmYaV(}sJoU1SpOJc*gg%5*+g%aK@uvJQu z6jsi#zB(qeAS_z#rt@Uatyt-U+qXL$f9mw%ZKpyMkY2IOTUQ#faDGI)eml#SPqyEV{=+hB-!iD^MvU&~ zlFz58`4@TEpuvLh3YBw9)z5T4$`Zb1CNt}gv7~d+u_q9{%euLlu=NB4HgC%xL!ENy zM>9KwMJnLzx{vly str: + dt: datetime = datetime.fromtimestamp( + record.created, + tz=timezone("Asia/Dhaka"), + ) + return dt.strftime(datefmt) + + def format(self, record: LogRecord) -> str: return super().format(record).replace(record.levelname, record.levelname[:1]) formatter = CustomFormatter( - "[%(asctime)s] [%(levelname)s] - %(message)s", datefmt="%d-%b-%y %I:%M:%S %p" + "[%(asctime)s] %(levelname)s - %(message)s [%(module)s:%(lineno)d]", + datefmt="%d-%b %I:%M:%S %p", ) file_handler = FileHandler("log.txt") @@ -42,72 +64,79 @@ def format(self, record): basicConfig(handlers=[file_handler, stream_handler], level=INFO) -CONFIG_FILE_URL = environ.get("CONFIG_FILE_URL") +# Attempt to load from config.py try: - if len(CONFIG_FILE_URL) == 0: - raise TypeError - try: - res = get(CONFIG_FILE_URL) - if res.status_code == 200: - with open("config.env", "wb+") as f: - f.write(res.content) - else: - error(f"Failed to download config.env {res.status_code}") - except Exception as e: - error(f"CONFIG_FILE_URL: {e}") -except Exception: - pass - -load_dotenv("config.env", override=True) - -BOT_TOKEN = environ.get("BOT_TOKEN", "") -if len(BOT_TOKEN) == 0: - error("BOT_TOKEN variable is missing! Exiting now") - sys.exit(1) + settings = import_module("config") + config_file = { + key: value.strip() if isinstance(value, str) else value + for key, value in vars(settings).items() + if not key.startswith("__") + } +except ModuleNotFoundError: + log_error( + "The 'config.py' file is missing! Falling back to environment variables.", + ) + config_file = {} -bot_id = BOT_TOKEN.split(":", 1)[0] +# Fallback to environment variables if BOT_TOKEN is not set +BOT_TOKEN = config_file.get("BOT_TOKEN") or os.getenv("BOT_TOKEN") +if not BOT_TOKEN: + log_error("BOT_TOKEN variable is missing! Exiting now.") + exit(1) -DATABASE_URL = environ.get("DATABASE_URL", "") -if len(DATABASE_URL) == 0: - DATABASE_URL = None +BOT_ID = BOT_TOKEN.split(":", 1)[0] -if DATABASE_URL: - conn = MongoClient(DATABASE_URL) - db = conn.luna - if config_dict := db.settings.config.find_one({"_id": bot_id}): - environ["UPSTREAM_REPO"] = config_dict["UPSTREAM_REPO"] - environ["UPSTREAM_BRANCH"] = config_dict["UPSTREAM_BRANCH"] - conn.close() - -UPSTREAM_REPO = environ.get("UPSTREAM_REPO", "") -if len(UPSTREAM_REPO) == 0: - UPSTREAM_REPO = "https://github.com/5hojib/Aeon" - -UPSTREAM_BRANCH = environ.get("UPSTREAM_BRANCH", "") -if len(UPSTREAM_BRANCH) == 0: - UPSTREAM_BRANCH = "main" - -if path.exists(".git"): - run(["rm", "-rf", ".git"], check=False) - -update = run( - [ - f"git init -q \ - && git config --global user.email yesiamshojib@gmail.com \ - && git config --global user.name 5hojib \ - && git add . \ - && git commit -sm update -q \ - && git remote add origin {UPSTREAM_REPO} \ - && git fetch origin -q \ - && git reset --hard origin/{UPSTREAM_BRANCH} -q" - ], - shell=True, - check=False, +# Fallback to environment variables for DATABASE_URL +DATABASE_URL = ( + config_file.get("DATABASE_URL", "").strip() + or os.getenv("DATABASE_URL", "").strip() ) -if update.returncode == 0: - info("Successfully updated with latest commit from UPSTREAM_REPO") -else: - error( - "Something went wrong while updating, check UPSTREAM_REPO if valid or not!" +if DATABASE_URL: + try: + conn = MongoClient(DATABASE_URL, server_api=ServerApi("1")) + db = conn.luna + old_config = db.settings.deployConfig.find_one({"_id": BOT_ID}, {"_id": 0}) + config_dict = db.settings.config.find_one({"_id": BOT_ID}) + if ( + (old_config is not None and old_config == config_file) + or old_config is None + ) and config_dict is not None: + config_file["UPSTREAM_REPO"] = config_dict["UPSTREAM_REPO"] + config_file["UPSTREAM_BRANCH"] = config_dict["UPSTREAM_BRANCH"] + conn.close() + except Exception as e: + log_error(f"Database ERROR: {e}") + +UPSTREAM_REPO = config_file.get( + "UPSTREAM_REPO", + "https://github.com/AeonOrg/Aeon-MLTB", +).strip() + +UPSTREAM_BRANCH = config_file.get("UPSTREAM_BRANCH", "").strip() or "main" + +if UPSTREAM_REPO: + if path.exists(".git"): + srun(["rm", "-rf", ".git"], check=False) + + update = srun( + [ + f"git init -q \ + && git config --global user.email e.anastayyar@gmail.com \ + && git config --global user.name mltb \ + && git add . \ + && git commit -sm update -q \ + && git remote add origin {UPSTREAM_REPO} \ + && git fetch origin -q \ + && git reset --hard origin/{UPSTREAM_BRANCH} -q", + ], + shell=True, + check=False, ) + + if update.returncode == 0: + log_info("Successfully updated with latest commit from UPSTREAM_REPO") + else: + log_error( + "Something went wrong while updating, check UPSTREAM_REPO if valid or not!", + ) diff --git a/web/nodes.py b/web/nodes.py index 8a493d2d4..7d2dddd42 100644 --- a/web/nodes.py +++ b/web/nodes.py @@ -2,6 +2,8 @@ from anytree import NodeMixin +DOWNLOAD_DIR = "/usr/src/app/downloads/" + class TorNode(NodeMixin): def __init__( @@ -23,7 +25,7 @@ def __init__( if parent is not None: self.parent = parent if size is not None: - self.size = size + self.fsize = size if priority is not None: self.priority = priority if file_id is not None: @@ -37,13 +39,14 @@ def qb_get_folders(path): def get_folders(path): - fs = re_findall("/usr/src/app/downloads/[0-9]+/(.+)", path)[0] + fs = re_findall(f"{DOWNLOAD_DIR}[0-9]+/(.+)", path)[0] return fs.split("/") -def make_tree(res, aria2=False): - parent = TorNode("Torrent") - if not aria2: +def make_tree(res, tool=False): + if tool == "qbittorrent": + parent = TorNode("QBITTORRENT") + folder_id = 0 for i in res: folders = qb_get_folders(i.name) if len(folders) > 1: @@ -55,8 +58,12 @@ def make_tree(res, aria2=False): ) if current_node is None: previous_node = TorNode( - folders[j], parent=previous_node, is_folder=True + folders[j], + is_folder=True, + parent=previous_node, + file_id=folder_id, ) + folder_id += 1 else: previous_node = current_node TorNode( @@ -78,7 +85,9 @@ def make_tree(res, aria2=False): file_id=i.id, progress=round(i.progress * 100, 5), ) - else: + elif tool == "aria2": + parent = TorNode("ARIA2") + folder_id = 0 for i in res: folders = get_folders(i["path"]) priority = 1 @@ -93,64 +102,100 @@ def make_tree(res, aria2=False): ) if current_node is None: previous_node = TorNode( - folders[j], parent=previous_node, is_folder=True + folders[j], + is_folder=True, + parent=previous_node, + file_id=folder_id, ) + folder_id += 1 else: previous_node = current_node + try: + progress = round( + (int(i["completedLength"]) / int(i["length"])) * 100, + 5, + ) + except Exception: + progress = 0 TorNode( folders[-1], is_file=True, parent=previous_node, - size=i["length"], + size=int(i["length"]), priority=priority, file_id=i["index"], - progress=round( - (int(i["completedLength"]) / int(i["length"])) * 100, 5 - ), + progress=progress, ) else: + try: + progress = round( + (int(i["completedLength"]) / int(i["length"])) * 100, + 5, + ) + except Exception: + progress = 0 TorNode( folders[-1], is_file=True, parent=parent, - size=i["length"], + size=int(i["length"]), priority=priority, file_id=i["index"], - progress=round( - (int(i["completedLength"]) / int(i["length"])) * 100, 5 - ), + progress=progress, ) - return create_list(parent, ["", 0]) + + result = create_list(parent) + return {"files": result, "engine": tool} -def create_list(par, msg): - if par.name != ".unwanted": - msg[0] += "
    " - for i in par.children: +""" +def print_tree(parent): + for pre, _, node in RenderTree(parent): + treestr = u"%s%s" % (pre, node.name) + print(treestr.ljust(8), node.is_folder, node.is_file) +""" + + +def create_list(parent, contents=None): + if contents is None: + contents = [] + for i in parent.children: if i.is_folder: - msg[0] += "
  • " - if i.name != ".unwanted": - msg[0] += ( - f' ' - ) - create_list(i, msg) - msg[0] += "
  • " - msg[1] += 1 + children = [] + create_list(i, children) + contents.append( + { + "id": f"folderNode_{i.file_id}", + "name": i.name, + "type": "folder", + "children": children, + }, + ) else: - msg[0] += "
  • " - if i.priority == 0: - msg[0] += ( - f' / {i.progress}%' - ) - else: - msg[0] += ( - f' / {i.progress}%' - ) - msg[0] += ( - f'' + contents.append( + { + "id": i.file_id, + "name": i.name, + "size": i.fsize, + "type": "file", + "selected": bool(i.priority), + "progress": i.progress, + }, ) - msg[0] += "
  • " + return contents + - if par.name != ".unwanted": - msg[0] += "
" - return msg +def extract_file_ids(data): + selected_files = [] + unselected_files = [] + for item in data: + if item.get("type") == "file": + if item.get("selected"): + selected_files.append(str(item["id"])) + else: + unselected_files.append(str(item["id"])) + if item.get("children"): + child_selected, child_unselected = extract_file_ids(item["children"]) + selected_files.extend(child_selected) + unselected_files.extend(child_unselected) + return selected_files, unselected_files diff --git a/web/templates/page.html b/web/templates/page.html new file mode 100644 index 000000000..57ea9b023 --- /dev/null +++ b/web/templates/page.html @@ -0,0 +1,958 @@ + + + + + + + Torrent Selector + + + + + + +
+
+
+

Torrent + file selector

+
+
+ 🌙 + +
+
+
+
+
+ +
+
+

Enter PIN

+ + +
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/web/wserver.py b/web/wserver.py index b4b8c0eba..a46ffccde 100644 --- a/web/wserver.py +++ b/web/wserver.py @@ -1,18 +1,16 @@ +from logging import INFO, FileHandler, StreamHandler, basicConfig, getLogger from time import sleep -from logging import INFO, FileHandler, StreamHandler, getLogger, basicConfig -from flask import Flask, request from aria2p import API from aria2p import Client as ariaClient +from flask import Flask, jsonify, render_template, request from qbittorrentapi import Client as qbClient from qbittorrentapi import NotFound404Error -from web.nodes import make_tree +from web.nodes import extract_file_ids, make_tree app = Flask(__name__) -aria2 = API(ariaClient(host="http://localhost", port=6800, secret="")) - xnox_client = qbClient( host="localhost", port=8090, @@ -21,6 +19,8 @@ HTTPADAPTER_ARGS={"pool_maxsize": 200, "pool_block": True}, ) +aria2 = API(ariaClient(host="http://localhost", port=6800, secret="")) + basicConfig( format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", handlers=[FileHandler("log.txt"), StreamHandler()], @@ -29,640 +29,8 @@ LOGGER = getLogger(__name__) -page = """ - - - - - - Torrent File Selector - - - - - - - - - - - -
- - -
-
-

Selected files: 0 of 0

-

Selected files size: 0 of 0

-
-
-
- {My_content} - -
-
- - - - - - - -""" - -code_page = """ - - - - - - Torrent Code Checker - - - - - - - - - -
- - -
-
-
-
- - -
- -
- * Dont mess around. Your download will get messed up. -
- - -""" - - -def re_verfiy(paused, resumed, hash_id): +def re_verify(paused, resumed, hash_id): paused = paused.strip() resumed = resumed.strip() if paused: @@ -687,7 +55,9 @@ def re_verfiy(paused, resumed, hash_id): sleep(1) try: xnox_client.torrents_file_priority( - torrent_hash=hash_id, file_ids=paused, priority=0 + torrent_hash=hash_id, + file_ids=paused, + priority=0, ) except NotFound404Error as e: raise NotFound404Error from e @@ -695,7 +65,9 @@ def re_verfiy(paused, resumed, hash_id): LOGGER.error(f"{e} Errored in reverification paused!") try: xnox_client.torrents_file_priority( - torrent_hash=hash_id, file_ids=resumed, priority=1 + torrent_hash=hash_id, + file_ids=resumed, + priority=1, ) except NotFound404Error as e: raise NotFound404Error from e @@ -708,94 +80,165 @@ def re_verfiy(paused, resumed, hash_id): return True -@app.route("/app/files/", methods=["GET"]) -def list_torrent_contents(id_): - if "pin_code" not in request.args: - return code_page.replace("{form_url}", f"/app/files/{id_}") - - pincode = "" - for nbr in id_: +@app.route("/app/files") +def files(): + return render_template("page.html") + + +@app.route("/app/files/torrent", methods=["GET", "POST"]) +def handle_torrent(): + if not (gid := request.args.get("gid")): + return jsonify( + { + "files": [], + "engine": "", + "error": "GID is missing", + "message": "GID not specified", + }, + ) + + if not (pin := request.args.get("pin")): + return jsonify( + { + "files": [], + "engine": "", + "error": "Pin is missing", + "message": "PIN not specified", + }, + ) + code = "" + for nbr in gid: if nbr.isdigit(): - pincode += str(nbr) - if len(pincode) == 4: + code += str(nbr) + if len(code) == 4: break - if request.args["pin_code"] != pincode: - return "

Incorrect pin code

" - - if len(id_) > 20: - res = xnox_client.torrents_files(torrent_hash=id_) - cont = make_tree(res) - else: - res = aria2.client.get_files(id_) - cont = make_tree(res, True) - return page.replace("{My_content}", cont[0]).replace( - "{form_url}", f"/app/files/{id_}?pin_code={pincode}" - ) - - -@app.route("/app/files/", methods=["POST"]) -def set_priority(id_): - data = dict(request.form) - resume = "" - if len(id_) > 20: - pause = "" - - for i, value in data.items(): - if "filenode" in i: - node_no = i.split("_")[-1] - - if value == "on": - resume += f"{node_no}|" - else: - pause += f"{node_no}|" - - pause = pause.strip("|") - resume = resume.strip("|") - - try: - xnox_client.torrents_file_priority( - torrent_hash=id_, file_ids=pause, priority=0 + if code != pin: + return jsonify( + { + "files": [], + "engine": "", + "error": "Invalid pin", + "message": "The PIN you entered is incorrect", + }, + ) + if request.method == "POST": + if not (mode := request.args.get("mode")): + return jsonify( + { + "files": [], + "engine": "", + "error": "Mode is not specified", + "message": "Mode is not specified", + }, ) - except NotFound404Error as e: - raise NotFound404Error from e - except Exception as e: - LOGGER.error(f"{e} Errored in paused") + data = request.get_json(cache=False, force=True) + if mode == "rename": + if len(gid) > 20: + handle_rename(gid, data) + content = { + "files": [], + "engine": "", + "error": "", + "message": "Rename successfully.", + } + else: + content = { + "files": [], + "engine": "", + "error": "Rename failed.", + "message": "Cannot rename aria2c torrent file", + } + else: + selected_files, unselected_files = extract_file_ids(data) + if len(gid) > 20: + selected_files = "|".join(selected_files) + unselected_files = "|".join(unselected_files) + set_qbittorrent(gid, selected_files, unselected_files) + else: + selected_files = ",".join(selected_files) + set_aria2(gid, selected_files) + content = { + "files": [], + "engine": "", + "error": "", + "message": "Your selection has been submitted successfully.", + } + else: try: - xnox_client.torrents_file_priority( - torrent_hash=id_, file_ids=resume, priority=1 - ) - except NotFound404Error as e: - raise NotFound404Error from e + if len(gid) > 20: + res = xnox_client.torrents_files(torrent_hash=gid) + content = make_tree(res, "qbittorrent") + else: + res = aria2.client.get_files(gid) + content = make_tree(res, "aria2") except Exception as e: - LOGGER.error(f"{e} Errored in resumed") - sleep(1) - if not re_verfiy(pause, resume, id_): - LOGGER.error(f"Verification Failed! Hash: {id_}") - else: - for i, value in data.items(): - if "filenode" in i and value == "on": - node_no = i.split("_")[-1] - resume += f"{node_no}," + LOGGER.error(str(e)) + content = { + "files": [], + "engine": "", + "error": "Error getting files", + "message": str(e), + } + return jsonify(content) - resume = resume.strip(",") - res = aria2.client.change_option(id_, {"select-file": resume}) - if res == "OK": - LOGGER.info(f"Verified! Gid: {id_}") +def handle_rename(gid, data): + try: + _type = data["type"] + del data["type"] + if _type == "file": + xnox_client.torrents_rename_file(torrent_hash=gid, **data) else: - LOGGER.info(f"Verification Failed! Report! Gid: {id_}") - return list_torrent_contents(id_) + xnox_client.torrents_rename_folder(torrent_hash=gid, **data) + except NotFound404Error as e: + raise NotFound404Error from e + except Exception as e: + LOGGER.error(f"{e} Errored in renaming") + + +def set_qbittorrent(gid, selected_files, unselected_files): + try: + xnox_client.torrents_file_priority( + torrent_hash=gid, + file_ids=unselected_files, + priority=0, + ) + except NotFound404Error as e: + raise NotFound404Error from e + except Exception as e: + LOGGER.error(f"{e} Errored in paused") + try: + xnox_client.torrents_file_priority( + torrent_hash=gid, + file_ids=selected_files, + priority=1, + ) + except NotFound404Error as e: + raise NotFound404Error from e + except Exception as e: + LOGGER.error(f"{e} Errored in resumed") + sleep(1) + if not re_verify(unselected_files, selected_files, gid): + LOGGER.error(f"Verification Failed! Hash: {gid}") + + +def set_aria2(gid, selected_files): + res = aria2.client.change_option(gid, {"select-file": selected_files}) + if res == "OK": + LOGGER.info(f"Verified! Gid: {gid}") + else: + LOGGER.info(f"Verification Failed! Report! Gid: {gid}") @app.route("/") def homepage(): - return "

See WZML-X @GitHub By Code With Weeb

" + return "

See mirror-leech-telegram-bot @GitHub By Anas

" @app.errorhandler(Exception) def page_not_found(e): return ( - f"

404: Torrent not found! Mostly wrong input.

Error: {e}

", + f"

404: Task not found! Mostly wrong input.

Error: {e}

", 404, )