Skip to content

Commit

Permalink
PEP-8 Compliance
Browse files Browse the repository at this point in the history
Signed-off-by: nanometer5088 <[email protected]>
  • Loading branch information
nanometer5088 committed Jun 29, 2023
1 parent ba844d7 commit a9fa31a
Show file tree
Hide file tree
Showing 8 changed files with 95 additions and 51 deletions.
15 changes: 8 additions & 7 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
silent = True
if len(sys.argv) <= 1:
silent = False

a = init(silent)

if a == -1:
Expand Down Expand Up @@ -87,12 +87,11 @@ def main():
username = str(input("Enter the tiktok username here: "))
log(f"The creator chosen was: @{username}\n")
links = proxitok_scraper(username)
downloadtiktoks(links) # add handling for when zero links are passed
downloadtiktoks(links) # add handling for when zero links are passed
sys.exit()

## Stream
if question == 2:

watchquestion = int(
input(
"""Do you want to watch your liked videos, a creator or trending videos?
Expand All @@ -107,7 +106,6 @@ def main():

## Stream liked videos
if watchquestion == 1:

randomquestion = int(
input(
"""Do you want to watch the tiktoks in randomized order?
Expand Down Expand Up @@ -188,6 +186,7 @@ def main():
# Warning, this section is experimental and will only run if you use any launch arguments
# GUI Code:


def arguments(args):
log("Running using launch arguments")

Expand All @@ -199,7 +198,7 @@ def arguments(args):
username = args.downloadcreator
log(f"The creator chosen was: @{username}\n")
links = proxitok_scraper(username)
downloadtiktoks(links) # add handling for when zero links are passed
downloadtiktoks(links) # add handling for when zero links are passed

elif args.streamlikedrandom:
log("The user chose to stream liked videos in shuffled mode\n")
Expand All @@ -223,8 +222,10 @@ def arguments(args):
log("The user chose to stream trending videos\n")
videos = int(args.streamtrending)
if videos >= 1:
streamtrending(videos)
streamtrending(videos)


if silent:
arguments(args)
else:
main()
main()
8 changes: 5 additions & 3 deletions src/byuser.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from log import logtofile as log
from src.streaming import getVideoInfo, mpv


def streamuser(username):
links = proxitok_scraper(username)

Expand All @@ -20,6 +21,7 @@ def streamuser(username):

def proxitok_scraper(username: str) -> list[str]:
from src.constants import OPTIONS

log("Scraper started")
print("\nObtaining URLs - this can take a while with users with many posts.")
session = requests.Session()
Expand All @@ -29,7 +31,7 @@ def proxitok_scraper(username: str) -> list[str]:
url = f"{OPTIONS['proxitok_instance']}/@{username}{next_href}"
response = session.get(url)
log(f"Scraping {url}")

if OPTIONS["ratelimit"] != 0:
log(f'Sleeping for {OPTIONS["ratelimit"]}s')
time.sleep(OPTIONS["ratelimit"])
Expand All @@ -39,11 +41,11 @@ def proxitok_scraper(username: str) -> list[str]:
log(error_msg)
print(error_msg)
return direct_links

soup = BeautifulSoup(response.text, "html.parser")

posts = soup.find_all("article", class_="media")

if not posts:
error_msg = "No posts found. The specified account is likely private or has no published videos"
log(error_msg)
Expand Down
7 changes: 4 additions & 3 deletions src/downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,15 @@
from yt_dlp.utils import DownloadError

from log import logtofile as log
from src.functions import url_redirection
from src.constants import OPTIONS
from src.functions import url_redirection


def downloader(url):
ydl_opts = {
"format": "bestvideo*+bestaudio/best",
"outtmpl": os.getcwd() + "/video/%(creator)s/%(id)s.%(ext)s",
"download_archive": os.getcwd() + "/video/.video_archive"
"download_archive": os.getcwd() + "/video/.video_archive",
}
YoutubeDL(ydl_opts).download(url)

Expand Down Expand Up @@ -42,7 +43,7 @@ def downloadtiktoks(urls):
if OPTIONS["ratelimit"] != 0:
log(f'Sleeping for {OPTIONS["ratelimit"]}s')
time.sleep(OPTIONS["ratelimit"])

try:
downloader(url)
log(f"Video {url} was downloaded")
Expand Down
11 changes: 7 additions & 4 deletions src/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,22 @@
import random

import requests

from log import logtofile as log


def listas():
# Retrieves tiktok likes and dates from user_data.json
import json
f = open('user_data.json')

f = open("user_data.json")
linklist = []
datelist = []
data = json.load(f)

for i in data['Activity']["Like List"]['ItemFavoriteList']:
linklist.append(i['Link'])
datelist.append(i['Date'])
for i in data["Activity"]["Like List"]["ItemFavoriteList"]:
linklist.append(i["Link"])
datelist.append(i["Date"])
f.close()
log("user_data.json file was processed sucessfully")
return linklist, datelist
Expand Down
67 changes: 48 additions & 19 deletions src/init.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import os
import platform
import sys

import requests

from log import logtofile as log
Expand All @@ -23,19 +24,25 @@ def clear_screen():

def init(silent):
clear_screen()

if not silent:

print("Welcome to CLI TikTok, an open-source TikTok archiver and viewer!")
input("Press ENTER to proceed")

log("Started dependency test")
required_libraries = ["requests", "yt_dlp", "distro", "bs4"]
missing_libraries = [library for library in required_libraries if not _library_exists(library)]
missing_libraries = [
library for library in required_libraries if not _library_exists(library)
]
if missing_libraries:
log("Dependency test failed - Missing libraries: " + ", ".join(missing_libraries))
log(
"Dependency test failed - Missing libraries: "
+ ", ".join(missing_libraries)
)
clear_screen()
input("The program detected dependencies are not installed.\nPress ENTER to install the necessary libraries.\n(You will need to open the program again afterwards)")
input(
"The program detected dependencies are not installed.\nPress ENTER to install the necessary libraries.\n(You will need to open the program again afterwards)"
)
log("User accepted automatic installation, running it.\n")
os.system("pip install -r requirements.txt --user")
clear_screen()
Expand All @@ -50,36 +57,56 @@ def init(silent):

log("Started update / networking test")
try:
link = requests.get("https://raw.githubusercontent.com/nanometer5088/CLI-TikTok/main/src/constants.py").text.strip()
link = requests.get(
"https://raw.githubusercontent.com/nanometer5088/CLI-TikTok/main/src/constants.py"
).text.strip()

userversion = _read_user_version()

version_line = next(line for line in link.split('\n')
if line.startswith(' "version": '))
data = version_line.split(': ')[1]


version_line = next(
line for line in link.split("\n") if line.startswith(' "version": ')
)
data = version_line.split(": ")[1]

if userversion < data:
log(f"New version detected! User version is {userversion}, but {data} was found on Github.")
log(
f"New version detected! User version is {userversion}, but {data} was found on Github."
)
clear_screen()
input("\tThere's a new version available!\n\tUpdates bring performance and feature improvements!\n\tDownload the new version here:\n\thttps://github.com/nanometer5088/CLI-TikTok/archive/refs/heads/main.zip\n\n\tPress ENTER to proceed")
input(
"\tThere's a new version available!\n\tUpdates bring performance and feature improvements!\n\tDownload the new version here:\n\thttps://github.com/nanometer5088/CLI-TikTok/archive/refs/heads/main.zip\n\n\tPress ENTER to proceed"
)
clear_screen()
else:
log("The user has internet access and the software is up-to-date.\n")
clear_screen()

except requests.exceptions.ConnectionError:
log("A connection error was detected when trying to connect to https://raw.githubusercontent.com/ to check for updates.")
log(
"A connection error was detected when trying to connect to https://raw.githubusercontent.com/ to check for updates."
)
try:
for site in ["https://www.eff.org", "https://freedom.press", "https://www.torproject.org", "https://www.privacyguides.org"]:
for site in [
"https://www.eff.org",
"https://freedom.press",
"https://www.torproject.org",
"https://www.privacyguides.org",
]:
requests.get(site)
log("The user seems to be connected to the internet, but Github is not accessible. Dazed and confused, but trying to continue.")
log(
"The user seems to be connected to the internet, but Github is not accessible. Dazed and confused, but trying to continue."
)
clear_screen()
except requests.exceptions.ConnectionError:
clear_screen()
print("CLI-TikTok detected your device isn't connected to the internet.")
print("This software requires a reliable and uncensored internet connection to properly work.")
print(
"This software requires a reliable and uncensored internet connection to properly work."
)
print("Please try again with an internet connection.")
log("The software exited, and the user was notified of the connection problem.")
log(
"The software exited, and the user was notified of the connection problem."
)
sys.exit()


Expand All @@ -99,6 +126,7 @@ def _get_os_info():
return f"Mac OS {platform.mac_ver()}"
elif system == "Linux":
import distro

return f"{distro.name()} {distro.version()} - {os.uname().release}"
else:
return f"{system} - {platform.machine()}"
Expand All @@ -110,4 +138,5 @@ def _get_python_version():

def _read_user_version():
from src.constants import APP
return str(APP["version"])

return str(APP["version"])
20 changes: 12 additions & 8 deletions src/logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,34 +2,38 @@
import os
import time


class Logging:
# logFileOpened is a variable that keeps track of the log file status.
# logFileOpened is a variable that keeps track of the log file status.
# It is initialized as False to represent the log file wasn't open.
def __init__(self):
self.logFileOpened = False

# `log_string` is a string that is the log message that will be written to the log file.
# `log_string` is a string that is the log message that will be written to the log file.
def log(self, log_string: str):
logs_directory = os.getcwd() + "/logs"

if not os.path.exists(logs_directory):
os.mkdir(logs_directory)

log_files = glob.glob(r"logs/log-*.txt")

# The log file numbers are extracted from the filenames
log_file_numbers = [int(file[9:-4]) for file in log_files]

# If log_file_numbers list is empty, append the value 0 to it.
# This ensures that the list always contains at least one value.
if not log_file_numbers:
log_file_numbers.append(0)

log_file_name = f"logs/log-{max(log_file_numbers) + 1 if not self.logFileOpened else max(log_file_numbers)}.txt"


with open(log_file_name, "a" if self.logFileOpened else "w") as log_file:
self.logFileOpened = True

current_time = time.strftime("%Y.%m.%d-%H.%M.%S", time.localtime(time.time()))
log_file.write(f"[{current_time}] {log_string.encode('ascii', 'replace').decode()}\n")
current_time = time.strftime(
"%Y.%m.%d-%H.%M.%S", time.localtime(time.time())
)
log_file.write(
f"[{current_time}] {log_string.encode('ascii', 'replace').decode()}\n"
)
6 changes: 4 additions & 2 deletions src/streaming.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
from yt_dlp.utils import DownloadError

from log import logtofile as log
from src.functions import url_redirection
from src.constants import OPTIONS
from src.functions import url_redirection


def getVideoInfo(url):
Expand All @@ -16,7 +16,9 @@ def getVideoInfo(url):


def mpv(url):
subprocess.check_output(f'{OPTIONS["player_command"]} "{getVideoInfo(url)}"', shell=True)
subprocess.check_output(
f'{OPTIONS["player_command"]} "{getVideoInfo(url)}"', shell=True
)


def playbackrandom(urls, datas):
Expand Down
12 changes: 7 additions & 5 deletions src/trending.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
from log import logtofile as log
from src.constants import OPTIONS

def streamtrending(amount:int = 24):

def streamtrending(amount: int = 24):
links = proxitok_trending(amount)

if len(links) == 0:
Expand All @@ -23,6 +24,7 @@ def streamtrending(amount:int = 24):

def proxitok_trending(amount: int = 24) -> list[str]:
from src.constants import OPTIONS

log("Scraper started")
print("\nObtaining URLs - this can take a while when requesting many posts.")
session = requests.Session()
Expand All @@ -34,7 +36,7 @@ def proxitok_trending(amount: int = 24) -> list[str]:

response = session.get(url)
log(f"Scraping {url}")

if OPTIONS["ratelimit"] != 0:
log(f'Sleeping for {OPTIONS["ratelimit"]}s')
time.sleep(OPTIONS["ratelimit"])
Expand All @@ -44,11 +46,11 @@ def proxitok_trending(amount: int = 24) -> list[str]:
log(error_msg)
print(error_msg)
return direct_links

soup = BeautifulSoup(response.text, "html.parser")

posts = soup.find_all("article", class_="media")

if not posts:
error_msg = "No posts found for trending."
log(error_msg)
Expand All @@ -69,4 +71,4 @@ def proxitok_trending(amount: int = 24) -> list[str]:
return direct_links

next_button = soup.find("a", class_="button", text="Next")
next_href = next_button["href"]
next_href = next_button["href"]

0 comments on commit a9fa31a

Please sign in to comment.