initial
This commit is contained in:
15
.gitignore
vendored
Normal file
15
.gitignore
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
# Python-generated files
|
||||||
|
__pycache__/
|
||||||
|
*.py[oc]
|
||||||
|
build/
|
||||||
|
dist/
|
||||||
|
wheels/
|
||||||
|
*.egg-info
|
||||||
|
|
||||||
|
# Virtual environments
|
||||||
|
.venv
|
||||||
|
log/
|
||||||
|
.ruff*
|
||||||
|
test*.py
|
||||||
|
.env
|
||||||
|
app.sqlite
|
||||||
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
|||||||
|
3.12
|
||||||
403
discord_torrent.py
Normal file
403
discord_torrent.py
Normal file
@@ -0,0 +1,403 @@
|
|||||||
|
import discord
|
||||||
|
from discord.ext import commands
|
||||||
|
from discord import app_commands
|
||||||
|
from lib.torrent_creator import TorrentUpload
|
||||||
|
from lib.logging_data import logger
|
||||||
|
from lib.sonarr import Sonarr_API
|
||||||
|
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
from typing import Optional
|
||||||
|
import os
|
||||||
|
import asyncio
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
load_dotenv(".env")
|
||||||
|
# Bot configuration
|
||||||
|
intents = discord.Intents.default()
|
||||||
|
intents.message_content = True
|
||||||
|
intents.members = True
|
||||||
|
|
||||||
|
console = logger(app_name="torrent_uploader",log_dir="./log")
|
||||||
|
|
||||||
|
class DownloadBot(commands.Bot):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__(
|
||||||
|
command_prefix='!',
|
||||||
|
intents=intents,
|
||||||
|
help_command=None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Initialize data storage (in-memory for this example)
|
||||||
|
# In production, you'd want to use a database
|
||||||
|
self.upload_queue = []
|
||||||
|
|
||||||
|
async def setup_hook(self):
|
||||||
|
"""Called when the bot is starting up"""
|
||||||
|
console.log(f"Logged in as {self.user} (ID: {self.user.id})")
|
||||||
|
console.log("------")
|
||||||
|
|
||||||
|
# Sync slash commands
|
||||||
|
try:
|
||||||
|
synced = await self.tree.sync()
|
||||||
|
console.log(f"Synced {len(synced)} command(s)")
|
||||||
|
# threading.Thread(target=vt_worker).start() # Start the download worker in the background
|
||||||
|
except Exception as e:
|
||||||
|
console.error(f"Failed to sync commands: {e}")
|
||||||
|
bot = DownloadBot()
|
||||||
|
console.client=bot
|
||||||
|
sonarr=Sonarr_API(os.getenv("sonarr_ip"),os.getenv("sonarr_key"))
|
||||||
|
@bot.event
|
||||||
|
async def on_ready():
|
||||||
|
console.log(f'{bot.user} has connected to Discord!')
|
||||||
|
activity = discord.Game(name="Managing upload | /help")
|
||||||
|
await bot.change_presence(activity=activity)
|
||||||
|
asyncio.create_task(torrent_worker())
|
||||||
|
|
||||||
|
@bot.tree.error
|
||||||
|
async def on_app_command_error(interaction: discord.Interaction, error: app_commands.AppCommandError):
|
||||||
|
if isinstance(error, app_commands.CheckFailure):
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="❌ Permission Denied",
|
||||||
|
description="You don't have permission to use this command.",
|
||||||
|
color=0xff0000
|
||||||
|
)
|
||||||
|
if not interaction.response.is_done():
|
||||||
|
await interaction.response.send_message(embed=embed, ephemeral=True)
|
||||||
|
return
|
||||||
|
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="❌ Error",
|
||||||
|
description=f"An error occurred: {str(error)}",
|
||||||
|
color=0xff0000
|
||||||
|
)
|
||||||
|
|
||||||
|
channel = bot.get_channel(interaction.channel_id)
|
||||||
|
if interaction.response.is_done():
|
||||||
|
await interaction.followup.send(embed=embed, ephemeral=True)
|
||||||
|
else:
|
||||||
|
await channel.send(embed=embed)
|
||||||
|
console.error(error)
|
||||||
|
|
||||||
|
|
||||||
|
# /upload command
|
||||||
|
@bot.tree.command(name="sonarr_upload", description="Create and Upload a torrent file to Torrent server from Sonarr")
|
||||||
|
@app_commands.describe(
|
||||||
|
sonarr_id="Sonarr title id",
|
||||||
|
season="Sonarr season",
|
||||||
|
episode="Sonarr episode",
|
||||||
|
category="Category of the torrent (e.g., Anime, Western Series, Hi-Def Movie)",
|
||||||
|
source_type="Source type of the torrent (e.g., Blu-ray, WEB-DL, Encode)",
|
||||||
|
source="Source of the torrent (e.g., Master, Zoom, TV)",
|
||||||
|
country="Country of the torrent (e.g., Thai, Western, Korean)",
|
||||||
|
original_platform="Original platform of the torrent (e.g., DVD, Hi-def, TV)",
|
||||||
|
is_subdir="Is the torrent in a subdirectory?",
|
||||||
|
bearbit="Bearbit flag (True/False)",
|
||||||
|
torrentdd="TorrentDD flag (True/False)",
|
||||||
|
is_movie="Is this a movie upload? (True/False)",
|
||||||
|
pack="Pack flag (True/False)",
|
||||||
|
# single_season="Single Season (True/False)"
|
||||||
|
)
|
||||||
|
@app_commands.choices(category=[
|
||||||
|
### Anime
|
||||||
|
app_commands.Choice(name="Anime", value="Anime"),
|
||||||
|
### Series
|
||||||
|
app_commands.Choice(name="Western Series", value="Western Series"),
|
||||||
|
app_commands.Choice(name="Korean Series", value="Korean Series"),
|
||||||
|
app_commands.Choice(name="Japanese Series", value="Japanese Series"),
|
||||||
|
app_commands.Choice(name="Chinese Series", value="Chinese Series"),
|
||||||
|
app_commands.Choice(name="Thai Series", value="Thai Series"),
|
||||||
|
app_commands.Choice(name="Other Series", value="Other Series"),
|
||||||
|
### Movies
|
||||||
|
app_commands.Choice(name="Hi-Def Movie", value="Hi-Def Movie"),
|
||||||
|
app_commands.Choice(name="4K", value="4K"),
|
||||||
|
### Documentaries
|
||||||
|
app_commands.Choice(name="Documentary", value="สารคดี"),
|
||||||
|
])
|
||||||
|
@app_commands.choices(source_type=[
|
||||||
|
app_commands.Choice(name="Blu-ray", value="Blu-ray"),
|
||||||
|
app_commands.Choice(name="CD", value="CD"),
|
||||||
|
app_commands.Choice(name="DVD5", value="DVD5"),
|
||||||
|
app_commands.Choice(name="DVD9", value="DVD9"),
|
||||||
|
app_commands.Choice(name="Encode", value="Encode"),
|
||||||
|
app_commands.Choice(name="HD DVD", value="HD DVD"),
|
||||||
|
app_commands.Choice(name="HDTV", value="HDTV"),
|
||||||
|
app_commands.Choice(name="MiniBD", value="MiniBD"),
|
||||||
|
app_commands.Choice(name="Remux", value="Remux"),
|
||||||
|
app_commands.Choice(name="Track", value="Track"),
|
||||||
|
app_commands.Choice(name="WEB-DL", value="WEB-DL"),
|
||||||
|
app_commands.Choice(name="Image", value="Image")
|
||||||
|
])
|
||||||
|
@app_commands.choices(source=[
|
||||||
|
app_commands.Choice(name="Master", value="Master"),
|
||||||
|
app_commands.Choice(name="หนังซูม", value="Zoom"),
|
||||||
|
app_commands.Choice(name="V2D From Master", value="V2D"),
|
||||||
|
app_commands.Choice(name="From TV", value="TV"),
|
||||||
|
app_commands.Choice(name="From HD-TV", value="HD-TV"),
|
||||||
|
app_commands.Choice(name="Hi-def rip from Master", value="Hi-def"),
|
||||||
|
### Anime
|
||||||
|
app_commands.Choice(name="V2D From Master/DVD Modified", value="From Master/DVD"),
|
||||||
|
app_commands.Choice(name="อัดจาก TV", value="Rip TV"),
|
||||||
|
app_commands.Choice(name="rip from Master", value="Rip Master"),
|
||||||
|
app_commands.Choice(name="Scan", value="scan")
|
||||||
|
|
||||||
|
])
|
||||||
|
@app_commands.choices(country=[
|
||||||
|
app_commands.Choice(name="Thai", value="Thai"),
|
||||||
|
app_commands.Choice(name="Western", value="Western"),
|
||||||
|
app_commands.Choice(name="Korean", value="Korean"),
|
||||||
|
app_commands.Choice(name="Japanese", value="Japanese"),
|
||||||
|
app_commands.Choice(name="Chinese", value="Chinese"),
|
||||||
|
app_commands.Choice(name="Other", value="Other")
|
||||||
|
])
|
||||||
|
@app_commands.choices(original_platform=[
|
||||||
|
app_commands.Choice(name="DVD", value="DVD"),
|
||||||
|
app_commands.Choice(name="Hi-def", value="Hi-def"),
|
||||||
|
app_commands.Choice(name="TV", value="TV"),
|
||||||
|
app_commands.Choice(name="Books", value="Books"),
|
||||||
|
app_commands.Choice(name="Other", value="Other"),
|
||||||
|
app_commands.Choice(name="Netflix", value="Netflix")
|
||||||
|
])
|
||||||
|
@app_commands.choices(is_subdir=[
|
||||||
|
app_commands.Choice(name="True", value='True'),
|
||||||
|
app_commands.Choice(name="False", value='False')
|
||||||
|
])
|
||||||
|
@app_commands.choices(bearbit=[
|
||||||
|
app_commands.Choice(name="True", value='True'),
|
||||||
|
app_commands.Choice(name="False", value='False')
|
||||||
|
])
|
||||||
|
@app_commands.choices(torrentdd=[
|
||||||
|
app_commands.Choice(name="True", value='True'),
|
||||||
|
app_commands.Choice(name="False", value='False')
|
||||||
|
])
|
||||||
|
@app_commands.choices(is_movie=[
|
||||||
|
app_commands.Choice(name="True", value='True'),
|
||||||
|
app_commands.Choice(name="False", value='False')
|
||||||
|
])
|
||||||
|
@app_commands.choices(pack=[
|
||||||
|
app_commands.Choice(name="True", value='True'),
|
||||||
|
app_commands.Choice(name="False", value='False')
|
||||||
|
])
|
||||||
|
# @app_commands.choices(single_season=[
|
||||||
|
# app_commands.Choice(name="True", value='True'),
|
||||||
|
# app_commands.Choice(name="False", value='False')
|
||||||
|
# ])
|
||||||
|
|
||||||
|
async def sonarr_upload_command(interaction: discord.Interaction,
|
||||||
|
sonarr_id: str,
|
||||||
|
season: Optional[str],
|
||||||
|
episode: Optional[str],
|
||||||
|
category: Optional[str],
|
||||||
|
source_type: Optional[str],
|
||||||
|
source: Optional[str],
|
||||||
|
country: Optional[str],
|
||||||
|
original_platform: Optional[str],
|
||||||
|
is_subdir: Optional[str] = "False",
|
||||||
|
bearbit: Optional[str] = "True",
|
||||||
|
torrentdd: Optional[str] = "True",
|
||||||
|
is_movie: Optional[str] = "False",
|
||||||
|
pack: Optional[str] = "False",
|
||||||
|
# single_season: Optional[str] = "False",
|
||||||
|
):
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="Torrent Upload",
|
||||||
|
description="Creating and uploading torrent file...",
|
||||||
|
color=discord.Color.blue()
|
||||||
|
)
|
||||||
|
embed.add_field(name="Sonarr title id", value=sonarr_id, inline=False)
|
||||||
|
embed.add_field(name="Season", value=season, inline=False)
|
||||||
|
embed.add_field(name="Episode", value=episode, inline=False)
|
||||||
|
embed.add_field(name="Category", value=category if category else "Not provided", inline=False)
|
||||||
|
embed.add_field(name="Source Type", value=source_type if source_type else "Not provided", inline=False)
|
||||||
|
embed.add_field(name="Source", value=source if source else "Not provided", inline=False)
|
||||||
|
embed.add_field(name="Country", value=country if country else "Not provided", inline=False)
|
||||||
|
embed.add_field(name="Original Platform", value=original_platform if original_platform else "Not provided", inline=False)
|
||||||
|
embed.add_field(name="Is Subdirectory", value=is_subdir if is_subdir else "Not provided", inline=False)
|
||||||
|
embed.add_field(name="BearBit", value=bearbit if bearbit else "Not provided", inline=False)
|
||||||
|
embed.add_field(name="TorrentDD", value=torrentdd if torrentdd else "Not provided", inline=False)
|
||||||
|
embed.add_field(name="Is Movie", value=is_movie if is_movie else "Not provided", inline=False)
|
||||||
|
embed.add_field(name="Pack", value=pack if pack else "Not provided", inline=False)
|
||||||
|
# embed.add_field(name="Single Season", value=pack if pack else "Not provided", inline=False)
|
||||||
|
embed.set_footer(text="This may take a while, please be patient...")
|
||||||
|
await interaction.response.send_message(embed=embed)
|
||||||
|
|
||||||
|
console.log("Starting torrent creation and upload process...")
|
||||||
|
print_detail=('Sonarr title id:', sonarr_id, '| Sonarr season:', season if season else "Not provided", '| Sonarr episode:', episode if episode else "Not provided", '| category:', category if category else "Not provided", '| source_type:', source_type if source_type else "Not provided", '| source:', source if source else "Not provided", '| country:', country if country else "Not provided", '| original_platform:', original_platform if original_platform else "Not provided", '| is_subdir:', is_subdir, '| bearbit:', bearbit, '| torrentdd:', torrentdd, '| is_movie:', is_movie, '| pack:', pack)
|
||||||
|
console.log("".join(print_detail))
|
||||||
|
|
||||||
|
if not sonarr_id:
|
||||||
|
await interaction.followup.send("❌ Sonarr id is required for torrent creation.")
|
||||||
|
return
|
||||||
|
|
||||||
|
if not category:
|
||||||
|
await interaction.followup.send("❌ Category is required for torrent creation.")
|
||||||
|
return
|
||||||
|
if not source_type:
|
||||||
|
await interaction.followup.send("❌ Source type is required for torrent creation.")
|
||||||
|
return
|
||||||
|
if not source:
|
||||||
|
await interaction.followup.send("❌ Source is required for torrent creation.")
|
||||||
|
return
|
||||||
|
if not country:
|
||||||
|
await interaction.followup.send("❌ Country is required for torrent creation.")
|
||||||
|
return
|
||||||
|
if not original_platform:
|
||||||
|
await interaction.followup.send("❌ Original platform is required for torrent creation.")
|
||||||
|
return
|
||||||
|
if is_subdir not in ['True', 'False']:
|
||||||
|
await interaction.followup.send("❌ is_subdir must be either 'True' or 'False'.")
|
||||||
|
return
|
||||||
|
if bearbit not in ['True', 'False']:
|
||||||
|
await interaction.followup.send("❌ bearbit must be either 'True' or 'False'.")
|
||||||
|
return
|
||||||
|
if torrentdd not in ['True', 'False']:
|
||||||
|
await interaction.followup.send("❌ torrentdd must be either 'True' or 'False'.")
|
||||||
|
return
|
||||||
|
if is_movie not in ['True', 'False']:
|
||||||
|
await interaction.followup.send("❌ is_movie must be either 'True' or 'False'.")
|
||||||
|
return
|
||||||
|
if pack not in ['True', 'False']:
|
||||||
|
await interaction.followup.send("❌ pack must be either 'True' or 'False'.")
|
||||||
|
return
|
||||||
|
|
||||||
|
is_subdir = is_subdir == 'True'
|
||||||
|
bearbit = bearbit == 'True'
|
||||||
|
torrentdd = torrentdd == 'True'
|
||||||
|
is_movie = is_movie == 'True'
|
||||||
|
pack = pack == 'True'
|
||||||
|
file_path=get_file_path(sonarr,sonarr_id,season=season,episode=episode)
|
||||||
|
entry= {
|
||||||
|
"file_path": file_path["path"],
|
||||||
|
"imdb_id": file_path["imdbId"],
|
||||||
|
"tmdb_id": file_path["tmdbId"],
|
||||||
|
"category": category,
|
||||||
|
"source_type": source_type,
|
||||||
|
"source": source,
|
||||||
|
"country": country,
|
||||||
|
"original_platform": original_platform,
|
||||||
|
"is_subdir": is_subdir,
|
||||||
|
"bearbit": bearbit,
|
||||||
|
"torrentdd": torrentdd,
|
||||||
|
"is_movie": is_movie,
|
||||||
|
'channel_id': interaction.channel_id,
|
||||||
|
'pack': pack,
|
||||||
|
# 'single_season' : single_season
|
||||||
|
}
|
||||||
|
# Add the entry to the upload queue
|
||||||
|
bot.upload_queue.append(entry)
|
||||||
|
console.log(f"Added to upload queue: {entry}")
|
||||||
|
# Start the torrent worker if not already running
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="Torrent Upload",
|
||||||
|
description="Torrent creation and upload process has been started. You will be notified once the process is completed.",
|
||||||
|
color=discord.Color.green()
|
||||||
|
)
|
||||||
|
embed.set_footer(text="Please wait while the bot processes your request...")
|
||||||
|
await interaction.followup.send(embed=embed)
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_path(sonarr:Sonarr_API,sonarr_id,season,episode):
|
||||||
|
sonarr_series=sonarr.get_series_detail(sonarr_id)
|
||||||
|
tmdbId=sonarr_series["tmdbId"]
|
||||||
|
imdbId=sonarr_series["imdbId"]
|
||||||
|
sonarr_seasons=sonarr_series["seasons"]
|
||||||
|
|
||||||
|
if season is not None and episode is None:
|
||||||
|
for ss in sonarr_seasons:
|
||||||
|
if int(season)==int(ss["seasonNumber"]):
|
||||||
|
if season>0:
|
||||||
|
file_path=Path.joinpath(Path(sonarr_series["path"]),f"S{season:02}").__str__()
|
||||||
|
return {"path":file_path,"tmdbId":tmdbId,"imdbId":imdbId}
|
||||||
|
else:
|
||||||
|
file_path=Path.joinpath(Path(sonarr_series["path"]),"Specials").__str__()
|
||||||
|
return {"path":file_path,"tmdbId":tmdbId,"imdbId":imdbId}
|
||||||
|
elif season is not None and episode is not None:
|
||||||
|
season_episode_list=sonarr.get_episode_detail_from_season(sonarr_id,season=season)
|
||||||
|
for ep in season_episode_list:
|
||||||
|
if int(ep["seasonNumber"] )==int(season) and int(ep["episodeNumber"] )==int(episode) :
|
||||||
|
ep_detail=sonarr.get_episode_detail(ep["id"])
|
||||||
|
file_path=ep_detail["episodeFile"]["path"]
|
||||||
|
return {"path":file_path,"tmdbId":tmdbId,"imdbId":imdbId}
|
||||||
|
|
||||||
|
else:
|
||||||
|
file_path=sonarr_series["path"]
|
||||||
|
return {"path":file_path,"tmdbId":tmdbId,"imdbId":imdbId}
|
||||||
|
|
||||||
|
async def torrent_worker():
|
||||||
|
"""Continuously process the upload queue in the background"""
|
||||||
|
|
||||||
|
while True:
|
||||||
|
if bot.upload_queue:
|
||||||
|
discord_webhook=os.getenv('torrent_update_webhook').split(",")
|
||||||
|
entry = bot.upload_queue.pop(0)
|
||||||
|
channel=bot.get_channel(entry['channel_id'])
|
||||||
|
|
||||||
|
torrentupload=TorrentUpload(console=console)
|
||||||
|
status = await torrentupload.upload_torrent(entry)
|
||||||
|
if not (any(item['status'] is False for item in status['bearbit']) or any(item['status'] is False for item in status['torrentdd'])):
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="Torrent Upload Completed",
|
||||||
|
description="All torrents have been created and uploaded successfully.",
|
||||||
|
color=discord.Color.green()
|
||||||
|
)
|
||||||
|
bb_text=""
|
||||||
|
for staBB in status['bearbit']:
|
||||||
|
bb_text+=f"{"Uploaded" if staBB["status"] else "Fail Upload"} : [{staBB["name"]}]({staBB["url"]})"
|
||||||
|
bb_text+="\n"
|
||||||
|
# bb_text+=staBB["url"]
|
||||||
|
# bb_text+="\n"
|
||||||
|
embed.add_field(name="BearBit", value=bb_text, inline=False)
|
||||||
|
|
||||||
|
dd_text=""
|
||||||
|
for staDD in status['torrentdd']:
|
||||||
|
dd_text+=f"{"Uploaded" if staDD["status"] else "Fail Upload"} : [{staDD["name"]}]({staDD["url"]})"
|
||||||
|
dd_text+="\n"
|
||||||
|
# dd_text+=staBB["url"]
|
||||||
|
# dd_text+="\n"
|
||||||
|
embed.add_field(name="TorrentDD", value=dd_text, inline=False)
|
||||||
|
|
||||||
|
# if status['bearbit'] or status['torrentdd']:
|
||||||
|
# for i,torrent in enumerate(torrent_files['all']):
|
||||||
|
# embed.add_field(name="Title", value=os.path.basename(torrent).replace(".all.torrent",""), inline=False)
|
||||||
|
# embed.add_field(name="BearBit", value="[Click here to visit BearBit](https://bearbit.org/)", inline=False)
|
||||||
|
# embed.add_field(name="TorrentDD", value="[Click here to visit TorrentDD](https://torrentdd.com/)", inline=False)
|
||||||
|
embed.set_footer(text="Thank you for visiting!")
|
||||||
|
console.debug(status)
|
||||||
|
console.log('Torrent Upload Completed',is_discord={"channel": channel,"embed": embed,"web_hook_urls":discord_webhook})
|
||||||
|
else:
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="Torrent Upload Failed",
|
||||||
|
description="Some or all torrents were fail to uploaded. Please check the logs for details.",
|
||||||
|
color=discord.Color.red() if (any(item['status'] is False for item in status['bearbit']) and any(item['status'] is False for item in status['torrentdd'])) else discord.Color.orange()
|
||||||
|
)
|
||||||
|
bb_text=""
|
||||||
|
# torrent_name=None
|
||||||
|
for staBB in status['bearbit']:
|
||||||
|
bb_text+=f"{"Uploaded" if staBB["status"] else "Fail Upload"} : {staBB["name"]}"
|
||||||
|
bb_text+="\n"
|
||||||
|
embed.add_field(name="BearBit", value=bb_text, inline=False)
|
||||||
|
|
||||||
|
dd_text=""
|
||||||
|
for staDD in status['torrentdd']:
|
||||||
|
dd_text+=f"{"Uploaded" if staDD["status"] else "Fail Upload"} : {staDD["name"]}"
|
||||||
|
dd_text+="\n"
|
||||||
|
embed.add_field(name="TorrentDD", value=dd_text, inline=False)
|
||||||
|
|
||||||
|
|
||||||
|
embed.set_footer(text="Please check the logs for more details.")
|
||||||
|
console.debug(status)
|
||||||
|
console.error('Torrent Upload Failed',is_discord={"channel": channel,"embed": embed})
|
||||||
|
else:
|
||||||
|
await asyncio.sleep(5) # Sleep briefly if queue is empty
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
token = os.getenv('TORRENT_DISCORD_TOKEN')
|
||||||
|
if not token:
|
||||||
|
console.error("❌ TORRENT_DISCORD_TOKEN not found in environment variables!")
|
||||||
|
console.error("Make sure you have a .env file with your bot token.")
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
bot.run(token)
|
||||||
|
except discord.LoginFailure:
|
||||||
|
console.error("❌ Invalid bot token! Please check your TORRENT_DISCORD_TOKEN in the .env file.")
|
||||||
|
except Exception as e:
|
||||||
|
console.error(f"❌ An error occurred: {e}")
|
||||||
338
lib/ScreenShot.py
Normal file
338
lib/ScreenShot.py
Normal file
@@ -0,0 +1,338 @@
|
|||||||
|
import os
|
||||||
|
import re
|
||||||
|
import asyncio
|
||||||
|
import subprocess
|
||||||
|
import cv2
|
||||||
|
import numpy as np
|
||||||
|
import shutil
|
||||||
|
import time
|
||||||
|
from PIL import Image, ImageDraw, ImageFont
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
import aiohttp
|
||||||
|
from dotenv import dotenv_values
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from lib.logging_data import logger
|
||||||
|
import json
|
||||||
|
|
||||||
|
class ScreenShot:
|
||||||
|
FONT_PATH = "/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf"
|
||||||
|
|
||||||
|
def __init__(self, OUTPUT_DIR="temp_screenshots",console:logger=None):
|
||||||
|
self.VIDEO_PATH = None
|
||||||
|
self.OUTPUT_DIR = OUTPUT_DIR
|
||||||
|
self.OUTPUT_IMAGE = None
|
||||||
|
|
||||||
|
self.GRID_COLS = 3
|
||||||
|
self.GRID_ROWS = 5
|
||||||
|
self.TOTAL_FRAMES = self.GRID_COLS * self.GRID_ROWS
|
||||||
|
# self.WIDTH = 1600
|
||||||
|
# self.HEIGHT = 900
|
||||||
|
self.WIDTH = None
|
||||||
|
self.HEIGHT = None
|
||||||
|
self.HEADER_HEIGHT = 90
|
||||||
|
|
||||||
|
self.base_url = "https://imgbb.ws"
|
||||||
|
self.imgbb_token = None
|
||||||
|
self.session = None
|
||||||
|
|
||||||
|
self.console=console or logger(app_name="torrent_uploader",log_dir="./log")
|
||||||
|
|
||||||
|
def get_metadata(self):
|
||||||
|
def ffprobe_entry(stream, entry):
|
||||||
|
cmd = [
|
||||||
|
"ffprobe", "-v", "error", "-select_streams", stream,
|
||||||
|
"-show_entries", f"stream={entry}",
|
||||||
|
"-of", "default=noprint_wrappers=1:nokey=1", self.VIDEO_PATH
|
||||||
|
]
|
||||||
|
# print(f"Running command: {' '.join(cmd)}")
|
||||||
|
result = subprocess.run(cmd, stdout=subprocess.PIPE)
|
||||||
|
return result.stdout.decode().strip()
|
||||||
|
|
||||||
|
def ffprobe_tag_languages(stream_type):
|
||||||
|
cmd = [
|
||||||
|
"ffprobe", "-v", "error", "-select_streams", stream_type,
|
||||||
|
"-show_entries", "stream_tags=language",
|
||||||
|
"-of", "csv=p=0", self.VIDEO_PATH
|
||||||
|
]
|
||||||
|
result = subprocess.run(cmd, stdout=subprocess.PIPE)
|
||||||
|
langs = list(set([lang.strip() for lang in result.stdout.decode().splitlines() if lang.strip()]))
|
||||||
|
return ",".join(langs) if langs else "und"
|
||||||
|
|
||||||
|
duration = float(self.get_duration(self.VIDEO_PATH))
|
||||||
|
vcodec = ffprobe_entry("v:0", "codec_name").splitlines()[0] + " " + ffprobe_entry("v:0", "profile").splitlines()[0]
|
||||||
|
acodec_profile = ffprobe_entry("a:0", "profile").splitlines()[0]
|
||||||
|
acodec = ffprobe_entry("a:0", "codec_name").splitlines()[0]
|
||||||
|
acodec += " DDP" if "Dolby Digital Plus" in acodec_profile else " DD" if "Dolby Digital" in acodec_profile else ""
|
||||||
|
acodec += " Atmos" if "Atmos" in acodec_profile else ""
|
||||||
|
audio_channels = ffprobe_entry("a:0", "channel_layout").splitlines()[0]
|
||||||
|
resolution = f"{ffprobe_entry('v:0', 'width').splitlines()[0]}x{ffprobe_entry('v:0', 'height').splitlines()[0]}"
|
||||||
|
self.WIDTH, self.HEIGHT = map(int, resolution.split('x'))
|
||||||
|
self.WIDTH, self.HEIGHT=self.WIDTH/self.GRID_COLS, self.HEIGHT/self.GRID_COLS
|
||||||
|
size_mb = os.path.getsize(self.VIDEO_PATH) / (1024 * 1024)
|
||||||
|
audio_lang = ffprobe_tag_languages("a").upper()
|
||||||
|
subtitle_lang = ffprobe_tag_languages("s").upper()
|
||||||
|
|
||||||
|
return duration, vcodec.upper(), acodec.upper(), audio_channels, resolution, size_mb, audio_lang, subtitle_lang
|
||||||
|
|
||||||
|
# def get_metadata(self):
|
||||||
|
# def get_mkv_json():
|
||||||
|
# cmd = ["mkvmerge", "-J", self.VIDEO_PATH]
|
||||||
|
# result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
# return json.loads(result.stdout.decode())
|
||||||
|
|
||||||
|
# data = get_mkv_json()
|
||||||
|
|
||||||
|
# video_track = next(t for t in data["tracks"] if t["type"] == "video")
|
||||||
|
# audio_track = next(t for t in data["tracks"] if t["type"] == "audio")
|
||||||
|
|
||||||
|
# # duration (ns -> sec)
|
||||||
|
# duration = data["container"]["properties"]["duration"] / 1_000_000_000
|
||||||
|
|
||||||
|
# # video codec
|
||||||
|
# vcodec = video_track["codec"]
|
||||||
|
# profile = video_track.get("properties", {}).get("profile")
|
||||||
|
# if profile:
|
||||||
|
# vcodec = f"{vcodec} {profile}"
|
||||||
|
|
||||||
|
# # audio codec
|
||||||
|
# acodec = audio_track["codec"]
|
||||||
|
# acodec_profile = audio_track.get("properties", {}).get("profile", "")
|
||||||
|
|
||||||
|
# if "Dolby Digital Plus" in acodec_profile:
|
||||||
|
# acodec += " DDP"
|
||||||
|
# elif "Dolby Digital" in acodec_profile:
|
||||||
|
# acodec += " DD"
|
||||||
|
|
||||||
|
# if "Atmos" in acodec_profile:
|
||||||
|
# acodec += " Atmos"
|
||||||
|
|
||||||
|
# # channels
|
||||||
|
# audio_channels = audio_track.get("properties", {}).get("audio_channels")
|
||||||
|
|
||||||
|
# # resolution
|
||||||
|
# width = video_track["properties"]["pixel_dimensions"].split("x")[0]
|
||||||
|
# height = video_track["properties"]["pixel_dimensions"].split("x")[1]
|
||||||
|
|
||||||
|
# resolution = f"{width}x{height}"
|
||||||
|
|
||||||
|
# self.WIDTH, self.HEIGHT = int(width), int(height)
|
||||||
|
# self.WIDTH, self.HEIGHT = self.WIDTH/self.GRID_COLS, self.HEIGHT/self.GRID_COLS
|
||||||
|
|
||||||
|
# # size
|
||||||
|
# size_mb = os.path.getsize(self.VIDEO_PATH) / (1024 * 1024)
|
||||||
|
|
||||||
|
# # audio languages
|
||||||
|
# audio_langs = {
|
||||||
|
# t.get("properties", {}).get("language", "und")
|
||||||
|
# for t in data["tracks"] if t["type"] == "audio"
|
||||||
|
# }
|
||||||
|
|
||||||
|
# subtitle_langs = {
|
||||||
|
# t.get("properties", {}).get("language", "und")
|
||||||
|
# for t in data["tracks"] if t["type"] == "subtitles"
|
||||||
|
# }
|
||||||
|
|
||||||
|
# audio_lang = ",".join(sorted(audio_langs)).upper()
|
||||||
|
# subtitle_lang = ",".join(sorted(subtitle_langs)).upper()
|
||||||
|
|
||||||
|
# return (
|
||||||
|
# duration,
|
||||||
|
# vcodec.upper(),
|
||||||
|
# acodec.upper(),
|
||||||
|
# audio_channels,
|
||||||
|
# resolution,
|
||||||
|
# size_mb,
|
||||||
|
# audio_lang,
|
||||||
|
# subtitle_lang
|
||||||
|
# )
|
||||||
|
@staticmethod
|
||||||
|
def get_duration(filename):
|
||||||
|
result = subprocess.run(
|
||||||
|
["ffprobe", "-v", "error", "-show_entries", "format=duration",
|
||||||
|
"-of", "default=noprint_wrappers=1:nokey=1", filename],
|
||||||
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||||
|
)
|
||||||
|
return float(result.stdout)
|
||||||
|
|
||||||
|
async def extract_screenshots(self, duration):
|
||||||
|
return await asyncio.to_thread(self._extract_screenshots_blocking, duration)
|
||||||
|
|
||||||
|
def _extract_screenshots_blocking(self, duration):
|
||||||
|
os.makedirs(self.OUTPUT_DIR, exist_ok=True)
|
||||||
|
interval =duration / self.TOTAL_FRAMES
|
||||||
|
timestamps = []
|
||||||
|
|
||||||
|
for i in range(self.TOTAL_FRAMES):
|
||||||
|
timestamp = int(i * interval)
|
||||||
|
if timestamp ==0:
|
||||||
|
timestamp = 5
|
||||||
|
output_file = os.path.join(self.OUTPUT_DIR, f"shot_{i:02d}.jpg")
|
||||||
|
|
||||||
|
# drawtext = (
|
||||||
|
# f"drawtext=fontfile={self.FONT_PATH}:"
|
||||||
|
# f"text='%{{pts\\:hms}}':"
|
||||||
|
# f"x=10:y=10:fontsize=18:fontcolor=white:borderw=2"
|
||||||
|
# )
|
||||||
|
|
||||||
|
cmd = [
|
||||||
|
"ffmpeg", "-ss", str(timestamp), "-i", self.VIDEO_PATH,
|
||||||
|
"-frames:v", "1", "-q:v", "2",
|
||||||
|
"-vf",
|
||||||
|
f"scale={self.WIDTH}:{self.HEIGHT}",
|
||||||
|
output_file, "-y"
|
||||||
|
]
|
||||||
|
result = subprocess.run(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||||
|
self.console.debug("result : ",result)
|
||||||
|
# print(" ".join(cmd))
|
||||||
|
# print(result)
|
||||||
|
# Draw timestamp with Pillow
|
||||||
|
timestamp_str = str(timedelta(seconds=timestamp)).split(".")[0]
|
||||||
|
img = Image.open(output_file)
|
||||||
|
draw = ImageDraw.Draw(img)
|
||||||
|
font = ImageFont.truetype(self.FONT_PATH, 32)
|
||||||
|
draw.text((10, 10), timestamp_str, font=font, fill="white", stroke_width=2, stroke_fill="black")
|
||||||
|
img.save(output_file)
|
||||||
|
|
||||||
|
timestamps.append(timestamp)
|
||||||
|
|
||||||
|
return timestamps
|
||||||
|
|
||||||
|
|
||||||
|
def stitch_images(self, metadata_text, timestamps):
|
||||||
|
images = []
|
||||||
|
for i in range(self.TOTAL_FRAMES):
|
||||||
|
img_path = os.path.join(self.OUTPUT_DIR, f"shot_{i:02d}.jpg")
|
||||||
|
img = cv2.imread(img_path)
|
||||||
|
if img is not None:
|
||||||
|
img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
|
||||||
|
images.append(img_rgb)
|
||||||
|
|
||||||
|
if len(images) != self.TOTAL_FRAMES:
|
||||||
|
self.console.error("Not enough images. Exiting.")
|
||||||
|
return
|
||||||
|
|
||||||
|
rows = [np.hstack(images[i*self.GRID_COLS:(i+1)*self.GRID_COLS]) for i in range(self.GRID_ROWS)]
|
||||||
|
sheet = np.vstack(rows)
|
||||||
|
|
||||||
|
sheet_img = Image.fromarray(sheet)
|
||||||
|
banner = Image.new("RGB", (sheet_img.width, self.HEADER_HEIGHT), color=(40, 40, 40))
|
||||||
|
draw = ImageDraw.Draw(banner)
|
||||||
|
font = ImageFont.truetype(self.FONT_PATH, 20)
|
||||||
|
draw.text((10, 10), metadata_text, font=font, fill="white")
|
||||||
|
|
||||||
|
final_image = Image.new("RGB", (sheet_img.width, sheet_img.height + self.HEADER_HEIGHT))
|
||||||
|
final_image.paste(banner, (0, 0))
|
||||||
|
final_image.paste(sheet_img, (0, self.HEADER_HEIGHT))
|
||||||
|
final_image.save(self.OUTPUT_IMAGE, quality=95, subsampling=0)
|
||||||
|
|
||||||
|
self.console.log(f"📷 Saved to {self.OUTPUT_IMAGE}")
|
||||||
|
self.cleanup_screenshots()
|
||||||
|
|
||||||
|
def cleanup_screenshots(self):
|
||||||
|
if os.path.exists(self.OUTPUT_DIR):
|
||||||
|
shutil.rmtree(self.OUTPUT_DIR)
|
||||||
|
self.console.log("✅ All extracted screenshots deleted.")
|
||||||
|
|
||||||
|
async def run(self, VIDEO_PATH, is_movie=False):
|
||||||
|
self.VIDEO_PATH = VIDEO_PATH
|
||||||
|
base = os.path.dirname(self.VIDEO_PATH)
|
||||||
|
parent = os.path.dirname(base)
|
||||||
|
filename = os.path.basename(self.VIDEO_PATH).replace("#", "") + "_screenshot.jpg"
|
||||||
|
self.OUTPUT_IMAGE = os.path.join(base if is_movie else parent, filename)
|
||||||
|
|
||||||
|
duration, vcodec, acodec, audio_channels, resolution, size_mb, audio_lang, subtitle_lang = self.get_metadata()
|
||||||
|
metadata_text = (
|
||||||
|
f"{os.path.basename(self.VIDEO_PATH)}\n"
|
||||||
|
f"{vcodec} | {acodec} {audio_channels} \n"
|
||||||
|
f"{resolution} | {size_mb:.2f} MB | {duration/60:.2f} min | Audio: {audio_lang} | Subtitles: {subtitle_lang}"
|
||||||
|
)
|
||||||
|
self.console.log(f"🎬 Metadata: {metadata_text}")
|
||||||
|
|
||||||
|
timestamps = await self.extract_screenshots(duration)
|
||||||
|
self.stitch_images(metadata_text, timestamps)
|
||||||
|
return self.OUTPUT_IMAGE
|
||||||
|
|
||||||
|
async def upload_to_imgbb(self, image_path):
|
||||||
|
|
||||||
|
timestamp = str(int(time.time() * 1000))
|
||||||
|
retry = 0
|
||||||
|
|
||||||
|
while retry < 5:
|
||||||
|
form = aiohttp.FormData()
|
||||||
|
form.add_field('source', open(image_path, 'rb'), filename=os.path.basename(image_path), content_type='image/jpeg')
|
||||||
|
form.add_field('type', 'file')
|
||||||
|
form.add_field('action', 'upload')
|
||||||
|
form.add_field('timestamp', timestamp)
|
||||||
|
form.add_field('auth_token', self.imgbb_token)
|
||||||
|
form.add_field('nsfw', '0')
|
||||||
|
form.add_field('mimetype', 'image/jpeg')
|
||||||
|
|
||||||
|
async with self.session.post(urljoin(self.base_url, '/json'), data=form) as response:
|
||||||
|
await response.text() # drain the response
|
||||||
|
if 200 <= response.status < 300:
|
||||||
|
self.console.log("✅ Upload successful")
|
||||||
|
await asyncio.sleep(5)
|
||||||
|
data = await response.json()
|
||||||
|
os.remove(image_path)
|
||||||
|
return data['image']['url']
|
||||||
|
else:
|
||||||
|
self.console.warn(f"❌ Upload failed ({response.status})")
|
||||||
|
retry += 1
|
||||||
|
await asyncio.sleep(10)
|
||||||
|
|
||||||
|
self.console.error("❌ Max retries reached")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def login(self):
|
||||||
|
if not self.session:
|
||||||
|
self.session = aiohttp.ClientSession(headers={
|
||||||
|
'accept': 'application/json',
|
||||||
|
'accept-language': 'en-US,en;q=0.9,th;q=0.8',
|
||||||
|
'cache-control': 'no-cache',
|
||||||
|
'origin': 'https://imgbb.ws',
|
||||||
|
'pragma': 'no-cache',
|
||||||
|
'referer': 'https://imgbb.ws/',
|
||||||
|
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/145.0.0.0 Safari/537.36 Edg/145.0.0.0',
|
||||||
|
})
|
||||||
|
|
||||||
|
async with self.session.get(self.base_url) as response:
|
||||||
|
html = await response.text()
|
||||||
|
if response.status != 200:
|
||||||
|
self.console.error(f"❌ Failed to connect to {self.base_url}: {response.status}")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
match = re.search(r'auth_token\s*=\s*"([a-f0-9]{40})"', html)
|
||||||
|
if match:
|
||||||
|
self.imgbb_token = match.group(1)
|
||||||
|
self.console.log("Auth token:", self.imgbb_token)
|
||||||
|
else:
|
||||||
|
self.console.error("Auth token not found.")
|
||||||
|
|
||||||
|
creds = dotenv_values(".env")
|
||||||
|
data = {
|
||||||
|
'login-subject': creds.get("imgbb_id"),
|
||||||
|
'password': creds.get("imgbb_password"),
|
||||||
|
'auth_token': self.imgbb_token,
|
||||||
|
}
|
||||||
|
async with self.session.post(urljoin(self.base_url, '/login'), data=data) as response:
|
||||||
|
self.console.log(f"Login status: {response.status}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
from lib.torrent_creator import TorrentCreator,ffprobe_streams_to_bbcode
|
||||||
|
|
||||||
|
torrent = TorrentCreator()
|
||||||
|
# VIDEO_PATH='/Entertainment_1/Anime/Series/365 Days to the Wedding (2024) [tvdbid-433584]/S01/365.Days.to.the.Wedding.2024.S01E01.Why.Dont.We.Get.Married.CR.WEBDL-1080P.X264.AAC.[SeFree].mkv'
|
||||||
|
VIDEO_PATH='/Entertainment_1/Anime/Movie/KPop Demon Hunters (2025) [tmdbid-803796]/KPop.Demon.Hunters.2025.NF.WEBDL-1080P.AV1.EAC3.ATMOS.[SeFree].mkv'
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
duration, video_codec, audio_codec, audio_channels, resolution, size_mb, audio_lang, subtitle_lang, json_metadata = torrent.get_metadata(VIDEO_PATH)
|
||||||
|
with open("output.json","w") as f:
|
||||||
|
json.dump(json_metadata,f,indent=4,ensure_ascii=True)
|
||||||
|
|
||||||
|
bb=ffprobe_streams_to_bbcode(json_metadata, os.path.basename(VIDEO_PATH))
|
||||||
|
with open("output_bb.txt","w") as f:
|
||||||
|
# json.dump(bb,f,indent=4,ensure_ascii=True)
|
||||||
|
f.write(bb)
|
||||||
|
asyncio.run(main())
|
||||||
0
lib/__init__.py
Normal file
0
lib/__init__.py
Normal file
247
lib/check_track_detail.py
Normal file
247
lib/check_track_detail.py
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
import re
|
||||||
|
from langcodes import Language, tag_is_valid
|
||||||
|
|
||||||
|
def first_valid_bcp47(parts):
|
||||||
|
"""
|
||||||
|
Return the first token in parts that is a valid BCP 47 tag,
|
||||||
|
or None if none are valid.
|
||||||
|
"""
|
||||||
|
for p in parts:
|
||||||
|
|
||||||
|
tok = p.strip()
|
||||||
|
# Remove bracketed markers like [Original]
|
||||||
|
if tok.startswith("[") and tok.endswith("]"):
|
||||||
|
continue
|
||||||
|
# langcodes works with exact case; tags are typically case-insensitive
|
||||||
|
# but language=lower, region/script=proper-case is okay.
|
||||||
|
# We'll just feed the token as-is; tag_is_valid handles common cases.
|
||||||
|
if tag_is_valid(tok):
|
||||||
|
return tok
|
||||||
|
return None
|
||||||
|
|
||||||
|
def extract_langs(text):
|
||||||
|
audio = []
|
||||||
|
subs = []
|
||||||
|
LANG = r'([a-z]{2}(?:-[A-Z]{2})?)'
|
||||||
|
for line in text.splitlines():
|
||||||
|
# audio
|
||||||
|
m_audio = re.search(
|
||||||
|
rf'\[(AAC|DD\+?|AC-4|OPUS|VORB|DTS|ALAC|FLAC)\]\s*\|\s*{LANG}',
|
||||||
|
line
|
||||||
|
)
|
||||||
|
if m_audio:
|
||||||
|
lang = m_audio.group(2)
|
||||||
|
if lang not in audio:
|
||||||
|
audio.append(lang)
|
||||||
|
|
||||||
|
# subtitles
|
||||||
|
m_sub = re.search(
|
||||||
|
rf'\[(SRT|SSA|ASS|VTT|TTML|SMI|SUB|MPL2|TMP|STPP|WVTT)\]\s*\|\s*{LANG}',
|
||||||
|
line
|
||||||
|
)
|
||||||
|
if m_sub:
|
||||||
|
lang = m_sub.group(2)
|
||||||
|
if lang not in subs:
|
||||||
|
subs.append(lang)
|
||||||
|
|
||||||
|
return audio, subs
|
||||||
|
|
||||||
|
def check_langs_with_langcodes(stderr_text: str, audio_lang_cfg: list[str], sub_lang_cfg: list[str]):
|
||||||
|
# audio_tags = find_audio_tags(stderr_text)
|
||||||
|
# sub_tags = find_sub_tags(stderr_text)
|
||||||
|
audio_tags,sub_tags=extract_langs(stderr_text)
|
||||||
|
|
||||||
|
|
||||||
|
# Normalize found tags to their primary language subtags
|
||||||
|
audio_langs_found = {Language.get(tag).language for tag in audio_tags}
|
||||||
|
sub_langs_found = {Language.get(tag).language for tag in sub_tags}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"audio": {
|
||||||
|
"configured": audio_lang_cfg,
|
||||||
|
"found_tags": audio_tags,
|
||||||
|
"found_langs": sorted(audio_langs_found),
|
||||||
|
"exists_all": all(Language.get(c).language in audio_langs_found for c in audio_lang_cfg),
|
||||||
|
},
|
||||||
|
"subtitle": {
|
||||||
|
"configured": sub_lang_cfg,
|
||||||
|
"found_tags": sub_tags,
|
||||||
|
"found_langs": sorted(sub_langs_found),
|
||||||
|
"exists_all": all(Language.get(c).language in sub_langs_found for c in sub_lang_cfg),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
def video_details(stderr_text: str):
|
||||||
|
"""
|
||||||
|
Parses the 'All Tracks' part (stopping at 'Selected Tracks') using a single regex.
|
||||||
|
Returns a list of dicts with codec, range, resolution [w,h], bitrate (int kb/s),
|
||||||
|
framerate (float or None if unknown), and size (e.g., '376.04 MiB').
|
||||||
|
"""
|
||||||
|
# One regex, anchored to 'VID | [ ... ]' so it won't ever read the log-level [I]
|
||||||
|
VID_RE = re.compile(r"""
|
||||||
|
VID\s*\|\s*\[\s*(?P<codec>[^,\]]+)\s*(?:,\s*(?P<range>[^\]]+))?\]\s*\|\s*
|
||||||
|
(?P<width>\d{3,4})x(?P<height>\d{3,4})\s*@\s*(?P<kbps>[\d,]+)\s*kb/s
|
||||||
|
(?:\s*\((?P<size>[^()]*?(?:MiB|GiB)[^()]*)\))?\s*,\s*(?P<fps>\d+(?:\.\d+)?)\s*FPS
|
||||||
|
""", re.VERBOSE)
|
||||||
|
|
||||||
|
# Only parse the 'All Tracks' section if 'Selected Tracks' exists
|
||||||
|
if "Selected Tracks" in stderr_text:
|
||||||
|
all_section = stderr_text.split("Selected Tracks", 1)[0]
|
||||||
|
else:
|
||||||
|
all_section = stderr_text
|
||||||
|
|
||||||
|
results = []
|
||||||
|
for m in VID_RE.finditer(all_section):
|
||||||
|
bitrate_kbps = int(m.group("kbps").replace(",", ""))
|
||||||
|
fps_val = None
|
||||||
|
if m.group("fps"):
|
||||||
|
try:
|
||||||
|
fps_val = float(m.group("fps"))
|
||||||
|
except ValueError:
|
||||||
|
fps_val = None # fallback if numeric parse fails
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
"codec": m.group("codec").strip() if m.group("codec") else None,
|
||||||
|
"range": (m.group("range").strip() if m.group("range") else None),
|
||||||
|
"resolution": [m.group("width"), m.group("height")],
|
||||||
|
"bitrate": bitrate_kbps,
|
||||||
|
"framerate": fps_val, # None when 'Unknown FPS'
|
||||||
|
"size": (m.group("size").strip() if m.group("size") else None),
|
||||||
|
})
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def extract_chapters(stderr_text: str):
|
||||||
|
"""
|
||||||
|
Parse chapter lines from vinetrimmer-like logs.
|
||||||
|
Returns: list of dicts: {'index': '01', 'time': '00:04:21.762', 'name': 'intro'}
|
||||||
|
Stops parsing at 'Selected Tracks' to prefer the 'All Tracks' inventory if present.
|
||||||
|
"""
|
||||||
|
# Matches: "CHP | [01] | 00:04:21.762 | intro"
|
||||||
|
CHAPTER_RE = re.compile(
|
||||||
|
r"""
|
||||||
|
^.*?\bCHP\b\s*\|\s*\[(?P<index>\d{1,3})\]\s*\|\s*
|
||||||
|
(?P<time>\d{2}:\d{2}:\d{2}(?:\.\d{1,4})?)\s*\|\s*
|
||||||
|
(?P<name>.+?)\s*$
|
||||||
|
""",
|
||||||
|
re.IGNORECASE | re.MULTILINE | re.VERBOSE
|
||||||
|
)
|
||||||
|
# Prefer 'All Tracks' (before 'Selected Tracks:' marker) to capture full menu
|
||||||
|
section = stderr_text.split("Selected Tracks:", 1)[0]
|
||||||
|
chapters = []
|
||||||
|
for m in CHAPTER_RE.finditer(section):
|
||||||
|
chapters.append({
|
||||||
|
"index": m.group("index"),
|
||||||
|
"time": m.group("time"),
|
||||||
|
"name": m.group("name"),
|
||||||
|
})
|
||||||
|
return chapters
|
||||||
|
|
||||||
|
def extract_title(stderr_text: str) -> str | None:
|
||||||
|
TITLE_RE = re.compile(r"Getting tracks for\s+(?P<title>.+?)\s*\[", re.IGNORECASE)
|
||||||
|
|
||||||
|
m = TITLE_RE.search(stderr_text)
|
||||||
|
return m.group("title").strip() if m else None
|
||||||
|
|
||||||
|
def extract_file_path(stderr: str) -> str | None:
|
||||||
|
import re
|
||||||
|
m = re.search(r"File path -\s*\n([\s\S]*?)\n\s*\n", stderr)
|
||||||
|
if not m:
|
||||||
|
return None
|
||||||
|
return "".join(line.strip() for line in m.group(1).splitlines())
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Example usage
|
||||||
|
stderr_example = """
|
||||||
|
▄• ▄▌ ▐ ▄ .▄▄ · ▄ .▄ ▄▄▄· ▄▄· ▄ •▄ ▄▄▌ ▄▄▄ .
|
||||||
|
█▪██▌•█▌▐█▐█ ▀. ██▪▐█▐█ ▀█ ▐█ ▌▪█▌▄▌▪██• ▀▄.▀·
|
||||||
|
█▌▐█▌▐█▐▐▌▄▀▀▀█▄██▀▐█▄█▀▀█ ██ ▄▄▐▀▀▄·██▪ ▐▀▀▪▄
|
||||||
|
▐█▄█▌██▐█▌▐█▄▪▐███▌▐▀▐█ ▪▐▌▐███▌▐█.█▌▐█▌▐▌▐█▄▄▌
|
||||||
|
▀▀▀ ▀▀ █▪ ▀▀▀▀ ▀▀▀ · ▀ ▀ ·▀▀▀ ·▀ ▀.▀▀▀ ▀▀▀
|
||||||
|
v 4.0.0 - © 2025-2026 - github.com/unshackle-dl/unshackle
|
||||||
|
|
||||||
|
Service Config loaded
|
||||||
|
Loaded 1/1 Vaults
|
||||||
|
Loaded Widevine CDM: 8159 (L3)
|
||||||
|
|
||||||
|
─────────────────────────────── Service: CR ────────────────────────────────
|
||||||
|
|
||||||
|
Authenticated with Service
|
||||||
|
|
||||||
|
─ Series: Noble Reincarnation: Born Blessed, So I’ll Obtain Ultimate Powe… ─
|
||||||
|
|
||||||
|
1 seasons, S1(12)
|
||||||
|
|
||||||
|
─ Noble Reincarnation: Born Blessed, So I’ll Obtain Ultimate Power 2026 S… ─
|
||||||
|
|
||||||
|
|
||||||
|
1 Video
|
||||||
|
└── [H.264, SDR] | ja-JP | 1920x1080 @ 11038 kb/s, 23.976 FPS
|
||||||
|
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ • 00:24 • Downloaded
|
||||||
|
2 Audio
|
||||||
|
├── [AAC] | ja-JP | 2.0 | 200 kb/s | Japanese
|
||||||
|
│ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ • 00:15 • Downloaded
|
||||||
|
└── [AAC] | th-TH | 2.0 | 201 kb/s | Thai
|
||||||
|
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ • 00:15 • Downloaded
|
||||||
|
2 Subtitles
|
||||||
|
├── [ASS] | th-TH | Thai
|
||||||
|
│ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ • 00:01 • Downloaded
|
||||||
|
└── [ASS] | en-US | English
|
||||||
|
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ • 00:01 • Downloaded
|
||||||
|
6 Chapters
|
||||||
|
├── 00:00:00.000 | Chapter 1
|
||||||
|
├── 00:02:11.000 | Chapter 2
|
||||||
|
├── 00:03:21.000 | Intro
|
||||||
|
├── 00:04:50.000 | Chapter 3
|
||||||
|
├── 00:21:56.000 | Credits
|
||||||
|
└── 00:23:27.000 | Chapter 4
|
||||||
|
|
||||||
|
Widevine(AAAAgnBzc2gAAAAA7e+LqXnWSs6jyCfc1R0h7QAAAGIIARIQx4rAibzIP...)
|
||||||
|
└── c78ac089bcc83e8ea8fe89729f1093c7:f4797a42fa189a1326dc3da31b8957ab*
|
||||||
|
from Local SQLite
|
||||||
|
|
||||||
|
Widevine(AAAAgnBzc2gAAAAA7e+LqXnWSs6jyCfc1R0h7QAAAGIIARIQrm5MD9N8M...)
|
||||||
|
└── ae6e4c0fd37c32d5be7a3188ce31a60b:d11e30c933334530a5e591e58978929c*
|
||||||
|
from Local SQLite
|
||||||
|
|
||||||
|
Track downloads finished in 0m24s
|
||||||
|
Using 'DejaVu Sans' as fallback for 'Arial Unicode MS'
|
||||||
|
Using 'Liberation Sans' as fallback for 'Arial'
|
||||||
|
Using 'Liberation Serif' as fallback for 'Times New Roman'
|
||||||
|
Using 'DejaVu Sans' as fallback for 'Trebuchet MS'
|
||||||
|
Attached 5 fonts for the Subtitles
|
||||||
|
Repacked one or more tracks with FFMPEG
|
||||||
|
Multiplexing... ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ • 00:03
|
||||||
|
|
||||||
|
🎉 Title downloaded in 0m29s!
|
||||||
|
|
||||||
|
File path -
|
||||||
|
/Entertainment_1/Downloads/USCK/Noble.Reincarnation.Born.Blessed.So.Il
|
||||||
|
l.Obtain.Ultimate.Power.2026.S01.1080p.CR.WEB-DL.DUAL.AAC2.0.H.264-[Se
|
||||||
|
Free]/Noble.Reincarnation.Born.Blessed.So.Ill.Obtain.Ultimate.Power.20
|
||||||
|
26.S01E11.Disinheritance.1080p.CR.WEB-DL.DUAL.AAC2.0.H.264-[SeFree].mk
|
||||||
|
v
|
||||||
|
|
||||||
|
Processed all titles in 0m33s
|
||||||
|
"""
|
||||||
|
# audio_lang_cfg = "ja,th"
|
||||||
|
# sub_lang_cfg = "th,en"
|
||||||
|
|
||||||
|
# audio_lang_cfg= audio_lang_cfg.split(",")
|
||||||
|
# sub_lang_cfg = sub_lang_cfg.split(",")
|
||||||
|
|
||||||
|
# title = extract_title(stderr_example)
|
||||||
|
# vid_details = video_details(stderr_example)
|
||||||
|
# chapters = extract_chapters(stderr_example)
|
||||||
|
# lang_result = check_langs_with_langcodes(stderr_example, audio_lang_cfg, sub_lang_cfg)
|
||||||
|
|
||||||
|
# print(title)
|
||||||
|
# print(vid_details)
|
||||||
|
# print(chapters)
|
||||||
|
# print(lang_result)
|
||||||
|
# print("dsfasdf")
|
||||||
|
print(extract_file_path(stderr_example))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
395
lib/db.py
Normal file
395
lib/db.py
Normal file
@@ -0,0 +1,395 @@
|
|||||||
|
import sqlite3
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
class Today_Queue_Status:
|
||||||
|
queue:str="queue"
|
||||||
|
|
||||||
|
waiting:str="waiting"
|
||||||
|
failed_search:str="failed search"
|
||||||
|
search_found:str="search found"
|
||||||
|
|
||||||
|
downloading:str="downloading"
|
||||||
|
failed_download:str="failed download"
|
||||||
|
downloaded:str="downloaded" ## finished
|
||||||
|
|
||||||
|
importing:str="importing"
|
||||||
|
failed_import:str="failed import"
|
||||||
|
imported:str="imported"
|
||||||
|
|
||||||
|
completed:str="completed" ## finished
|
||||||
|
|
||||||
|
uploading:str="uploading"
|
||||||
|
uploaded:str="uploaded"
|
||||||
|
fail_upload:str="failed upload"
|
||||||
|
|
||||||
|
class sqlite_db:
|
||||||
|
def __init__(self,db_path, **kwargs):
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
self.db_path=db_path
|
||||||
|
|
||||||
|
def find_title_config_db(self,title):
|
||||||
|
try:
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
conn.row_factory = sqlite3.Row # Enables dictionary access
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("SELECT * FROM watchlist WHERE Title = ?", (title,))
|
||||||
|
title_data = cursor.fetchone()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
return dict(title_data) if title_data is not None else None
|
||||||
|
except Exception:
|
||||||
|
return
|
||||||
|
|
||||||
|
def add_today_queue(self,queue,is_clear_queue:bool=False,is_clear_title:bool=False):
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
if is_clear_queue:
|
||||||
|
cursor.execute("DROP TABLE IF EXISTS today_queue")
|
||||||
|
conn.commit()
|
||||||
|
if is_clear_title:
|
||||||
|
cursor.execute("DELETE FROM today_queue WHERE title = ? AND season = ? AND episode = ?", (queue[0]['title'], queue[0]['season'], queue[0]['episode']))
|
||||||
|
conn.commit()
|
||||||
|
cursor.execute("""
|
||||||
|
CREATE TABLE IF NOT EXISTS today_queue (
|
||||||
|
title TEXT,
|
||||||
|
season INTEGER,
|
||||||
|
episode INTEGER,
|
||||||
|
start_timestamp INTEGER,
|
||||||
|
status TEXT DEFAULT queue
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
cursor.executemany("""
|
||||||
|
INSERT
|
||||||
|
OR IGNORE INTO today_queue (title, season, episode, start_timestamp )
|
||||||
|
VALUES (:title, :season, :episode, :start_timestamp)
|
||||||
|
""", queue)
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
def add_download_history(self,queue,is_clear_queue:bool=False,is_clear_title:bool=False):
|
||||||
|
# print(queue)
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
if is_clear_queue:
|
||||||
|
cursor.execute("DROP TABLE IF EXISTS download_history")
|
||||||
|
conn.commit()
|
||||||
|
if is_clear_title:
|
||||||
|
cursor.execute("DELETE FROM today_queue WHERE title = ? AND season = ? AND episode = ?", (queue[0]['title'], queue[0]['season'], queue[0]['episode']))
|
||||||
|
conn.commit()
|
||||||
|
cursor.execute("""
|
||||||
|
CREATE TABLE IF NOT EXISTS download_history (
|
||||||
|
title TEXT,
|
||||||
|
season INTEGER,
|
||||||
|
episode INTEGER,
|
||||||
|
start_timestamp INTEGER,
|
||||||
|
status TEXT DEFAULT queue
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
|
||||||
|
cursor.executemany("""
|
||||||
|
INSERT
|
||||||
|
OR IGNORE INTO download_history (title, season, episode, start_timestamp)
|
||||||
|
VALUES (:title, :season, :episode, :start_timestamp)
|
||||||
|
""", queue)
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
def get_today_queue(self):
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("SELECT * FROM today_queue")
|
||||||
|
today_queue = cursor.fetchall()
|
||||||
|
conn.close()
|
||||||
|
return[dict(x) for x in today_queue] if today_queue is not None else None
|
||||||
|
|
||||||
|
def get_watchlist(self):
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("SELECT * FROM watchlist")
|
||||||
|
watchlist = cursor.fetchall()
|
||||||
|
conn.close()
|
||||||
|
return [dict(x) for x in watchlist] if watchlist is not None else None
|
||||||
|
|
||||||
|
def get_schedule(self):
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("SELECT * FROM schedule")
|
||||||
|
schedule = cursor.fetchall()
|
||||||
|
conn.close()
|
||||||
|
return [dict(x) for x in schedule] if schedule is not None else None
|
||||||
|
|
||||||
|
def add_watchlist(self, entry):
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("""
|
||||||
|
INSERT OR IGNORE INTO watchlist (ID, Service, Title, if_dub, url, url_org, audio_lang, sub_lang, quality, codec, range, audio_channel, title_lang, org_lang, season)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
""", (entry['ID'], entry['Service'], entry['Title'], entry['if_dub'], entry['url'], entry['url_org'],
|
||||||
|
entry['audio_lang'], entry['sub_lang'], entry['quality'], entry['codec'],
|
||||||
|
entry['range'], entry['audio_channel'], entry['title_lang'], entry['org_lang'], entry['season']))
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
def update_download_status(self, title, season, episode, status):
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("""
|
||||||
|
UPDATE today_queue
|
||||||
|
SET status = ?
|
||||||
|
WHERE title = ? AND season = ? AND episode = ?
|
||||||
|
""", (status, title, season, episode))
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
def update_download_history_status(self, title, season, episode, status):
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("""
|
||||||
|
UPDATE download_history
|
||||||
|
SET status = ?
|
||||||
|
WHERE title = ? AND season = ? AND episode = ?
|
||||||
|
""", (status, title, season, episode))
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
def get_download_status(self, title, season,episode):
|
||||||
|
# print('test')
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("""
|
||||||
|
SELECT status FROM today_queue
|
||||||
|
WHERE title = ? AND season = ? AND episode = ?
|
||||||
|
""", (title, season, episode))
|
||||||
|
status = cursor.fetchone()
|
||||||
|
conn.close()
|
||||||
|
# print('testss')
|
||||||
|
return dict(status) if status is not None else None
|
||||||
|
|
||||||
|
# def get_overwrite_schedule(self) -> list[dict]:
|
||||||
|
# overwrite_entries = []
|
||||||
|
# weekday_map = {
|
||||||
|
# 'Monday': 1,
|
||||||
|
# 'Tuesday': 2,
|
||||||
|
# 'Wednesday': 3,
|
||||||
|
# 'Thursday': 4,
|
||||||
|
# 'Friday': 5,
|
||||||
|
# 'Saturday': 6,
|
||||||
|
# 'Sunday': 7
|
||||||
|
# }
|
||||||
|
|
||||||
|
# conn = sqlite3.connect(self.db_path)
|
||||||
|
# conn.row_factory = sqlite3.Row
|
||||||
|
# cursor = conn.cursor()
|
||||||
|
# cursor.execute("""
|
||||||
|
# SELECT * FROM schedule
|
||||||
|
# WHERE title = ?
|
||||||
|
# """, (entry['title'],))
|
||||||
|
# overwrite_schedule = cursor.fetchone()
|
||||||
|
# conn.close()
|
||||||
|
|
||||||
|
# overwrite_schedule = dict(overwrite_schedule) if overwrite_schedule is not None else {}
|
||||||
|
|
||||||
|
# dt=datetime.fromtimestamp(entry['start_timestamp'])
|
||||||
|
|
||||||
|
# if overwrite_schedule.get('air_time'):
|
||||||
|
|
||||||
|
# iso_time = datetime.fromisoformat(f"{dt.year}-{dt.month:02}-{dt.day:02}T{overwrite_schedule['air_time'][:2]:02}:{overwrite_schedule['air_time'][2:]:02}:{dt.second:02}")
|
||||||
|
# dt_overwrite = int(iso_time.timestamp())
|
||||||
|
|
||||||
|
# entry['start_timestamp'] = dt_overwrite
|
||||||
|
|
||||||
|
# if overwrite_schedule.get('offset') and overwrite_schedule.get('offset') != 0:
|
||||||
|
# entry['episode'] += overwrite_schedule['offset']
|
||||||
|
|
||||||
|
# if overwrite_schedule.get('day_of_week'):
|
||||||
|
# for dow in overwrite_schedule.get('day_of_week').split(','):
|
||||||
|
# day_of_week = dow
|
||||||
|
|
||||||
|
# if isinstance(day_of_week, str):
|
||||||
|
# day_of_week = weekday_map.get(day_of_week, None)
|
||||||
|
|
||||||
|
# if day_of_week is None:
|
||||||
|
# continue
|
||||||
|
|
||||||
|
# current_day = dt.isoweekday()
|
||||||
|
# days_difference = (day_of_week - current_day)
|
||||||
|
|
||||||
|
# if days_difference == 0:
|
||||||
|
# continue
|
||||||
|
# # print(days_difference)
|
||||||
|
# print(entry['title'],entry['episode'])
|
||||||
|
# # print(datetime.fromtimestamp(entry['start_timestamp']))
|
||||||
|
# # print(datetime.fromtimestamp(entry['start_timestamp']))
|
||||||
|
# dt = datetime.fromtimestamp(entry['start_timestamp']).replace(hour=int(overwrite_schedule['air_time'][:2]), minute=int(overwrite_schedule['air_time'][2:]), second=0, microsecond=0) + timedelta(days=days_difference)
|
||||||
|
# # print(dt)
|
||||||
|
|
||||||
|
# entry['start_timestamp'] = int(dt.timestamp())
|
||||||
|
# print(datetime.fromtimestamp(entry['start_timestamp']))
|
||||||
|
|
||||||
|
# # print(datetime.today().strftime('%Y-%m-%d'))
|
||||||
|
# # print(datetime.fromtimestamp(entry['start_timestamp']))
|
||||||
|
# if datetime.today().strftime('%Y-%m-%d') != datetime.fromtimestamp(entry['start_timestamp']).strftime('%Y-%m-%d') :
|
||||||
|
# continue
|
||||||
|
|
||||||
|
# overwrite_entries.append(entry)
|
||||||
|
|
||||||
|
# return overwrite_entries
|
||||||
|
|
||||||
|
def add_overwrite_schedule(self, entry) -> str:
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("SELECT * FROM schedule WHERE title = ?", (entry['title'],))
|
||||||
|
title= cursor.fetchone()
|
||||||
|
title = dict(title) if title is not None else None
|
||||||
|
|
||||||
|
if title == entry:
|
||||||
|
conn.close()
|
||||||
|
return 'No changes made, entry already exists.'
|
||||||
|
|
||||||
|
cursor.execute("""
|
||||||
|
INSERT INTO schedule (title, air_time, day_of_week, offset)
|
||||||
|
VALUES (?, ?, ?, ?)
|
||||||
|
ON CONFLICT(title) DO UPDATE SET
|
||||||
|
air_time=excluded.air_time,
|
||||||
|
day_of_week=excluded.day_of_week,
|
||||||
|
offset=excluded.offset
|
||||||
|
""", (entry['title'], entry['air_time'], entry['day_of_week'], entry['offset']))
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
return 'Entry added or updated successfully.'
|
||||||
|
|
||||||
|
def get_today_schedule(self):
|
||||||
|
weekday_map = {
|
||||||
|
'Monday': 1,
|
||||||
|
'Tuesday': 2,
|
||||||
|
'Wednesday': 3,
|
||||||
|
'Thursday': 4,
|
||||||
|
'Friday': 5,
|
||||||
|
'Saturday': 6,
|
||||||
|
'Sunday': 7
|
||||||
|
}
|
||||||
|
schedule=self.get_schedule()
|
||||||
|
watchlist=self.get_watchlist()
|
||||||
|
|
||||||
|
today_list=[]
|
||||||
|
for entry in schedule:
|
||||||
|
result = next((item for item in watchlist if item['Title'] == entry['title']), None)
|
||||||
|
# print(entry)
|
||||||
|
if entry['last_ep'] >= entry['end_ep']:
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
if not entry['day_of_week']:
|
||||||
|
continue
|
||||||
|
for dow in entry['day_of_week'].split(','):
|
||||||
|
if datetime.today().isoweekday() != weekday_map[dow.strip()]:
|
||||||
|
continue
|
||||||
|
if entry['last_date'] == datetime.now().date().strftime('%Y-%m-%d'):
|
||||||
|
continue
|
||||||
|
timestamp = int(datetime.now().replace(hour=int(entry['air_time'][:2]), minute=int(entry['air_time'][2:]), second=0, microsecond=0).timestamp())
|
||||||
|
|
||||||
|
for i in range(entry['multi_release']):
|
||||||
|
|
||||||
|
detail ={
|
||||||
|
"title": result['Title'],
|
||||||
|
"season": int(result['season']) if isinstance(result['season'], int) else 1,
|
||||||
|
"episode": ((int(entry['last_ep'])) if entry['last_ep'] is not None else 0) + +i+1,
|
||||||
|
"sonarr_id": result['ID'],
|
||||||
|
"air_time": entry['air_time'],
|
||||||
|
"day_of_week": entry['day_of_week'],
|
||||||
|
"offset": entry['offset'],
|
||||||
|
"start_timestamp":timestamp + (60*(i*5))
|
||||||
|
}
|
||||||
|
|
||||||
|
# print(detail)
|
||||||
|
today_list.append(detail)
|
||||||
|
today_list = sorted(today_list, key=lambda x: x["start_timestamp"])
|
||||||
|
return today_list
|
||||||
|
|
||||||
|
def update_schedule_episode(self, title, episode):
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
cursor.execute("""
|
||||||
|
UPDATE schedule
|
||||||
|
SET last_ep = ?, last_date = ?
|
||||||
|
WHERE title = ? AND last_ep = ?
|
||||||
|
""", (episode, datetime.now().date().strftime('%Y-%m-%d'), title, episode-1))
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
def get_show_by_date(self,date):
|
||||||
|
weekday_map = {
|
||||||
|
1: 'Monday',
|
||||||
|
2: 'Tuesday',
|
||||||
|
3: 'Wednesday',
|
||||||
|
4: 'Thursday',
|
||||||
|
5: 'Friday',
|
||||||
|
6: 'Saturday',
|
||||||
|
7: 'Sunday'
|
||||||
|
}
|
||||||
|
# if isinstance(date, int):
|
||||||
|
# date = list(weekday_map.keys())[list(weekday_map.values()).index(date)]
|
||||||
|
# date = (datetime.now() + timedelta((weekday_map[date] - datetime.now().isoweekday()) % 7)).strftime('%Y-%m-%d')
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
cursor = conn.cursor()
|
||||||
|
# print(weekday_map[date])
|
||||||
|
cursor.execute("""
|
||||||
|
SELECT *
|
||||||
|
FROM schedule
|
||||||
|
WHERE day_of_week LIKE ?
|
||||||
|
""", (f"%{weekday_map[date]}%",))
|
||||||
|
shows = cursor.fetchall()
|
||||||
|
conn.close()
|
||||||
|
return [dict(x) for x in shows] if shows is not None else None
|
||||||
|
def get_show_by_title(self,title):
|
||||||
|
|
||||||
|
# if isinstance(date, int):
|
||||||
|
# date = list(weekday_map.keys())[list(weekday_map.values()).index(date)]
|
||||||
|
# date = (datetime.now() + timedelta((weekday_map[date] - datetime.now().isoweekday()) % 7)).strftime('%Y-%m-%d')
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
cursor = conn.cursor()
|
||||||
|
# print(weekday_map[date])
|
||||||
|
cursor.execute("""
|
||||||
|
SELECT *
|
||||||
|
FROM schedule
|
||||||
|
WHERE title = ?
|
||||||
|
""", (title,))
|
||||||
|
shows = cursor.fetchall()
|
||||||
|
conn.close()
|
||||||
|
return [dict(x) for x in shows] if shows is not None else None
|
||||||
|
|
||||||
|
|
||||||
|
def get_torrent_detail(self,title):
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("""
|
||||||
|
SELECT * FROM torrent
|
||||||
|
WHERE title = ?
|
||||||
|
""", (title,))
|
||||||
|
torrent = cursor.fetchone()
|
||||||
|
conn.close()
|
||||||
|
# print('testss')
|
||||||
|
return dict(torrent) if torrent is not None else None
|
||||||
|
|
||||||
|
def update_torrent_detail(self, title,qbit_name, episode):
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
cursor.execute("""
|
||||||
|
UPDATE torrent
|
||||||
|
SET last_ep = ?, qbit_name = ?
|
||||||
|
WHERE title = ?
|
||||||
|
""", (episode,qbit_name,title))
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
53
lib/discord_bot.py
Normal file
53
lib/discord_bot.py
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
import discord
|
||||||
|
from discord.ext import commands
|
||||||
|
# from apscheduler.schedulers.background import BackgroundScheduler
|
||||||
|
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||||
|
|
||||||
|
|
||||||
|
from dotenv import dotenv_values
|
||||||
|
from lib.logging_data import logger
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Bot configuration
|
||||||
|
intents = discord.Intents.default()
|
||||||
|
intents.message_content = True
|
||||||
|
intents.members = True
|
||||||
|
|
||||||
|
|
||||||
|
class ScheduleBot(commands.Bot):
|
||||||
|
def __init__(self,console=None, **kwargs):
|
||||||
|
super().__init__(
|
||||||
|
command_prefix='!',
|
||||||
|
intents=intents,
|
||||||
|
help_command=None,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
self.console = console if console else logger("./log/app.log",gotify_config=dotenv_values('.env')['gotify_token'])
|
||||||
|
|
||||||
|
self.dotenv_path='.env'
|
||||||
|
|
||||||
|
self.scheduler = AsyncIOScheduler(
|
||||||
|
job_defaults={
|
||||||
|
'misfire_grace_time': 300, # run if up to 5 minutes late
|
||||||
|
'max_instances': 1, # prevent overlapping runs for the same job
|
||||||
|
'coalesce': True # useful for cron/interval jobs
|
||||||
|
})
|
||||||
|
# self.sonarr_ip = dotenv_values(self.dotenv_path)['sonarr_ip']
|
||||||
|
# self.sonarr_key = dotenv_values(self.dotenv_path)['sonarr_key']
|
||||||
|
# self.sonarr = Sonarr_API(self.sonarr_ip, self.sonarr_key)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
async def setup_hook(self):
|
||||||
|
"""Called when the bot is starting up"""
|
||||||
|
self.console.log(f"Logged in as {self.user} (ID: {self.user.id})")
|
||||||
|
|
||||||
|
# Sync slash commands
|
||||||
|
try:
|
||||||
|
synced = await self.tree.sync()
|
||||||
|
self.console.log(f"Synced {len(synced)} command(s)")
|
||||||
|
# threading.Thread(target=vt_worker).start() # Start the download worker in the background
|
||||||
|
except Exception as e:
|
||||||
|
self.console.log(f"Failed to sync commands: {e}")
|
||||||
232
lib/logging_data.py
Normal file
232
lib/logging_data.py
Normal file
@@ -0,0 +1,232 @@
|
|||||||
|
|
||||||
|
import logging
|
||||||
|
from logging.handlers import TimedRotatingFileHandler
|
||||||
|
from pathlib import Path
|
||||||
|
# from rich.logging import RichHandler # optional
|
||||||
|
from discord import SyncWebhook
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from dotenv import dotenv_values
|
||||||
|
import requests
|
||||||
|
import discord
|
||||||
|
|
||||||
|
class logger:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
app_name="app",
|
||||||
|
log_dir="../log",
|
||||||
|
gotify_config=None,
|
||||||
|
discord_config=None,
|
||||||
|
level=logging.DEBUG,
|
||||||
|
use_utc=False, # rotate at UTC midnight if True
|
||||||
|
keep_days=7, # retention
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Continuous app logging with daily rotation.
|
||||||
|
Current file name: <log_dir>/<app_name>.log
|
||||||
|
Rotated backups: <app_name>.log.YYYY-MM-DD
|
||||||
|
"""
|
||||||
|
# ---- Ensure directory ----
|
||||||
|
log_dir_path = Path(log_dir)
|
||||||
|
log_dir_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
self.app_name=app_name
|
||||||
|
self.log_dir_path=log_dir_path
|
||||||
|
|
||||||
|
base_log_path = log_dir_path / f"{app_name}.log"
|
||||||
|
|
||||||
|
# ---- Formatter using `{}` style ----
|
||||||
|
LOG_FORMAT = "{asctime} [{levelname[0]}] {name} : {message}"
|
||||||
|
LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
|
||||||
|
LOG_STYLE = "{"
|
||||||
|
LOG_FORMATTER = logging.Formatter(LOG_FORMAT, LOG_DATE_FORMAT, LOG_STYLE)
|
||||||
|
|
||||||
|
|
||||||
|
# ---- Create handlers ----
|
||||||
|
# Console (basic StreamHandler; swap to RichHandler if you want pretty output)
|
||||||
|
console_handler = logging.StreamHandler()
|
||||||
|
console_handler.setLevel(level)
|
||||||
|
console_handler.setFormatter(LOG_FORMATTER)
|
||||||
|
|
||||||
|
|
||||||
|
# Timed rotating file handler: rotates at midnight, keeps last N days
|
||||||
|
file_handler = TimedRotatingFileHandler(
|
||||||
|
filename=str(base_log_path),
|
||||||
|
when="midnight",
|
||||||
|
interval=1,
|
||||||
|
backupCount=keep_days,
|
||||||
|
encoding="utf-8",
|
||||||
|
utc=use_utc,
|
||||||
|
)
|
||||||
|
# Suffix for rotated files (defaults to app.log.YYYY-MM-DD)
|
||||||
|
file_handler.suffix = "%Y-%m-%d"
|
||||||
|
file_handler.setLevel(level)
|
||||||
|
file_handler.setFormatter(LOG_FORMATTER)
|
||||||
|
|
||||||
|
# ---- Configure a dedicated logger to avoid duplicating handlers ----
|
||||||
|
self.logger = logging.getLogger(app_name) # change name if needed
|
||||||
|
self.logger.setLevel(level)
|
||||||
|
|
||||||
|
# Remove any pre-existing handlers (optional, but avoids silent conflicts)
|
||||||
|
for h in list(self.logger.handlers):
|
||||||
|
self.logger.removeHandler(h)
|
||||||
|
|
||||||
|
self.logger.addHandler(console_handler)
|
||||||
|
self.logger.addHandler(file_handler)
|
||||||
|
|
||||||
|
|
||||||
|
# Optional: stop propagation to root to prevent double logging if root is configured elsewhere
|
||||||
|
self.logger.propagate = False
|
||||||
|
|
||||||
|
# ---- Instance state ----
|
||||||
|
self.client = None
|
||||||
|
self.worker_started = False
|
||||||
|
self.queue = asyncio.Queue()
|
||||||
|
|
||||||
|
# ---- Notification configs ----
|
||||||
|
if gotify_config:
|
||||||
|
self.gotify_token = gotify_config if isinstance(gotify_config, str) else gotify_config.get("token")
|
||||||
|
if self.gotify_token:
|
||||||
|
self.url = f"https://gotify.panitan.net/message?token={self.gotify_token}"
|
||||||
|
else:
|
||||||
|
self.url = None
|
||||||
|
self.logger.warning("Gotify token missing in config.")
|
||||||
|
else:
|
||||||
|
self.url = None
|
||||||
|
self.gotify_token = None
|
||||||
|
|
||||||
|
if discord_config:
|
||||||
|
self.discord_channel_id = discord_config
|
||||||
|
|
||||||
|
# Inform where we’re logging
|
||||||
|
self.logger.info(f"Logging to {base_log_path} (rotates daily, keep {keep_days} days).")
|
||||||
|
|
||||||
|
# ---- Internal helper ----
|
||||||
|
def _log_lines(self, message, log_level):
|
||||||
|
message = str(message)
|
||||||
|
for line in message.split('\n'):
|
||||||
|
if line:
|
||||||
|
self.logger.log(log_level, line,exc_info=log_level==logging.ERROR)
|
||||||
|
|
||||||
|
|
||||||
|
# ---- Public log APIs ----
|
||||||
|
def log(self, *message, is_gotify=False, is_discord: dict = None, image_url=None):
|
||||||
|
message = " ".join(str(m) for m in message)
|
||||||
|
self._log_lines(message, logging.INFO)
|
||||||
|
try:
|
||||||
|
if is_gotify:
|
||||||
|
self.gotify(message, "Logging", image_url)
|
||||||
|
if is_discord:
|
||||||
|
self.discord(is_discord)
|
||||||
|
except Exception:
|
||||||
|
return
|
||||||
|
|
||||||
|
def debug(self, *message, is_gotify=False, is_discord=None, image_url=None):
|
||||||
|
message = " ".join(str(m) for m in message)
|
||||||
|
self._log_lines(message, logging.DEBUG)
|
||||||
|
try:
|
||||||
|
if is_gotify:
|
||||||
|
self.gotify(message, "Debug", image_url)
|
||||||
|
if is_discord:
|
||||||
|
self.discord(is_discord)
|
||||||
|
except Exception:
|
||||||
|
return
|
||||||
|
|
||||||
|
def error(self, *message, is_gotify=False, is_discord=None, image_url=None):
|
||||||
|
message = " ".join(str(m) for m in message)
|
||||||
|
self._log_lines(message, logging.ERROR)
|
||||||
|
try:
|
||||||
|
if is_gotify:
|
||||||
|
self.gotify(message, "Error", image_url)
|
||||||
|
if is_discord:
|
||||||
|
self.discord(is_discord)
|
||||||
|
except Exception:
|
||||||
|
return
|
||||||
|
|
||||||
|
def warn(self, *message, is_gotify=False, is_discord=None, image_url=None):
|
||||||
|
message = " ".join(str(m) for m in message)
|
||||||
|
self._log_lines(message, logging.WARN)
|
||||||
|
try:
|
||||||
|
if is_gotify:
|
||||||
|
self.gotify(message, "Warning", image_url)
|
||||||
|
if is_discord:
|
||||||
|
self.discord(is_discord)
|
||||||
|
except Exception:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ---- Notifiers ----
|
||||||
|
def gotify(self, msg, title, image_url=None):
|
||||||
|
|
||||||
|
if not self.url or not self.gotify_token:
|
||||||
|
self.logger.warning("Gotify not configured; skipping notification.")
|
||||||
|
# time.sleep(2)
|
||||||
|
return
|
||||||
|
|
||||||
|
if image_url:
|
||||||
|
msg = f"{msg}\n\n!Image"
|
||||||
|
|
||||||
|
try:
|
||||||
|
requests.post(
|
||||||
|
self.url,
|
||||||
|
json={
|
||||||
|
"message": msg,
|
||||||
|
"title": title,
|
||||||
|
"extras": {"client::display": {"contentType": "text/markdown"}}
|
||||||
|
},
|
||||||
|
headers={"X-Gotify-Key": self.gotify_token},
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Gotify notification failed: {e}")
|
||||||
|
# time.sleep(2)
|
||||||
|
|
||||||
|
def discord(self, config: dict):
|
||||||
|
channel = config.get("channel")
|
||||||
|
embed = config.get("embed")
|
||||||
|
web_hook_urls = config.get("web_hook_urls",[])
|
||||||
|
if not channel and embed:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
if self.client is None:
|
||||||
|
|
||||||
|
self.client = discord.Client(intents=discord.Intents.default())
|
||||||
|
|
||||||
|
if not self.worker_started:
|
||||||
|
self.client.loop.create_task(self.worker())
|
||||||
|
self.worker_started = True
|
||||||
|
|
||||||
|
self.queue.put_nowait((channel, embed, web_hook_urls))
|
||||||
|
# async def send_message():
|
||||||
|
# await self.client.wait_until_ready()
|
||||||
|
# await channel.send(embed=embed)
|
||||||
|
# for url in web_hook_urls:
|
||||||
|
# webhook = SyncWebhook.from_url(url)
|
||||||
|
# webhook.send(embed=embed)
|
||||||
|
|
||||||
|
|
||||||
|
# self.client.loop.create_task(send_message())
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Discord notification failed: {e}")
|
||||||
|
# time.sleep(2)
|
||||||
|
|
||||||
|
async def worker(self):
|
||||||
|
await self.client.wait_until_ready()
|
||||||
|
|
||||||
|
while True:
|
||||||
|
channel, embed, web_hook_urls = await self.queue.get()
|
||||||
|
|
||||||
|
try:
|
||||||
|
await channel.send(embed=embed)
|
||||||
|
|
||||||
|
for url in web_hook_urls:
|
||||||
|
webhook = SyncWebhook.from_url(url)
|
||||||
|
webhook.send(embed=embed)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Discord send error: {e}")
|
||||||
|
|
||||||
|
self.queue.task_done()
|
||||||
|
if __name__ == "__main__":
|
||||||
|
console = logger(app_name="scheduler",log_dir="./log",gotify_config=dotenv_values('.env').get("gotify_token"),discord_config=dotenv_values('.env')['DISCORD_CHANNEL_ID'],level=logging.DEBUG)
|
||||||
|
print
|
||||||
|
console.log("This is a test log message.","blah", is_gotify=True, is_discord={"channel": None, "embed": None, "web_hook_urls": []})
|
||||||
170
lib/sonarr.py
Normal file
170
lib/sonarr.py
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
from dotenv import dotenv_values
|
||||||
|
import requests
|
||||||
|
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
class Sonarr_API:
|
||||||
|
def __init__(self,ip:str,key:str,**kwargs):
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
self.ip=ip
|
||||||
|
self.key=key
|
||||||
|
self.tz = pytz.timezone("Asia/Bangkok")
|
||||||
|
|
||||||
|
# self.db= sqlite_db(self.db_path)
|
||||||
|
|
||||||
|
self.calendar_url = f"http://{ip}/feed/v3/calendar/Sonarr.ics?unmonitored=true&apikey={key}"
|
||||||
|
|
||||||
|
def get_episodes(self, folder=None) -> dict:
|
||||||
|
# print(folder)
|
||||||
|
params = {
|
||||||
|
# 'seriesId': id,
|
||||||
|
'folder': folder
|
||||||
|
}
|
||||||
|
|
||||||
|
base_url = f"http://{self.ip}/api/v3/manualimport"
|
||||||
|
headers = {
|
||||||
|
"X-Api-Key": self.key,
|
||||||
|
}
|
||||||
|
|
||||||
|
r = requests.get(base_url, headers=headers,params=params)
|
||||||
|
r.raise_for_status()
|
||||||
|
# print(r.json())
|
||||||
|
|
||||||
|
return r.json()
|
||||||
|
|
||||||
|
def import_episodes(self, entry:dict,title_config:dict, episodes_dict:dict,mode:str="move") -> list[requests.Response]:
|
||||||
|
base_url = f"http://{self.ip}/api/v3/command"
|
||||||
|
headers = {
|
||||||
|
'Accept': 'application/json, text/javascript, */*; q=0.01',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'X-Api-Key': self.key
|
||||||
|
}
|
||||||
|
responses=[]
|
||||||
|
|
||||||
|
for d in episodes_dict:
|
||||||
|
|
||||||
|
if not d.get('path') or not d.get('series') or not d.get('episodes'):
|
||||||
|
continue
|
||||||
|
for e in d['episodes']:
|
||||||
|
season=int(entry['season'])
|
||||||
|
if title_config['absolute_season']:
|
||||||
|
season=int(title_config['season'])+int(title_config['absolute_season'])
|
||||||
|
ep=int(entry['episode'])
|
||||||
|
if title_config['absolute']:
|
||||||
|
ep=int(entry['episode'])+int(title_config['absolute'])
|
||||||
|
if not (str(season) == str(e['seasonNumber']) and str(ep) == str(e['episodeNumber'])\
|
||||||
|
# and str(title_config['ID']) == str(e['seriesId'])\
|
||||||
|
):
|
||||||
|
|
||||||
|
continue
|
||||||
|
# print(title_config['ID'],str(entry['season']), str(e['seasonNumber']), str(entry['episode']), str(e['episodeNumber']))
|
||||||
|
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"name": "ManualImport",
|
||||||
|
"files": [{
|
||||||
|
"path": d['path'],
|
||||||
|
"seriesId": d['series']['id'],
|
||||||
|
"episodeIds": [e['id']],
|
||||||
|
"releaseGroup": d.get('releaseGroup'),
|
||||||
|
"quality": d.get('quality'),
|
||||||
|
"languages": d.get('languages'),
|
||||||
|
"indexerFlags": d.get('indexerFlags')
|
||||||
|
}],
|
||||||
|
"importMode": mode
|
||||||
|
}
|
||||||
|
|
||||||
|
r = requests.post(base_url, headers=headers, json=data)
|
||||||
|
r.raise_for_status()
|
||||||
|
responses.append(r)
|
||||||
|
|
||||||
|
return responses
|
||||||
|
|
||||||
|
def get_episode_detail(self, episodeIds) -> dict:
|
||||||
|
# params = {
|
||||||
|
# 'seriesId': seriesId,
|
||||||
|
# 'episodeIds': episodeIds
|
||||||
|
# }
|
||||||
|
|
||||||
|
base_url = f"http://{self.ip}/api/v3/episode/{episodeIds}"
|
||||||
|
headers = {
|
||||||
|
"X-Api-Key": self.key,
|
||||||
|
}
|
||||||
|
r = requests.get(base_url, headers=headers)
|
||||||
|
r.raise_for_status()
|
||||||
|
# print(r.json())
|
||||||
|
|
||||||
|
return r.json()
|
||||||
|
|
||||||
|
def get_series_detail(self,id):
|
||||||
|
headers = {
|
||||||
|
'Accept': 'application/json',
|
||||||
|
'X-Api-Key': self.key,
|
||||||
|
}
|
||||||
|
|
||||||
|
response = requests.get(f'http://{self.ip}/api/v3/series/{id}', headers=headers)
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
def get_episode_detail_from_season(self,seriesId,season):
|
||||||
|
response=requests.get(
|
||||||
|
f"http://{self.ip}/api/v3/episode",
|
||||||
|
headers={
|
||||||
|
"Accept": "application/json",
|
||||||
|
"X-Api-Key": self.key
|
||||||
|
},
|
||||||
|
params={
|
||||||
|
"seriesId":seriesId,
|
||||||
|
"seasonNumber":season
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
|
||||||
|
# def get_today_schedule(self) -> list[dict]:
|
||||||
|
# response = requests.get(self.calendar_url)
|
||||||
|
# response.raise_for_status() # Raises an error if the request failed
|
||||||
|
|
||||||
|
# # Parse the calendar
|
||||||
|
# calendar = Calendar(response.text)
|
||||||
|
|
||||||
|
# db_watchlist=self.db.get_watchlist()
|
||||||
|
|
||||||
|
# # List all events
|
||||||
|
# daily_list=[]
|
||||||
|
|
||||||
|
# for event in calendar.events:
|
||||||
|
|
||||||
|
# if date.today() != event.begin.date():
|
||||||
|
# continue
|
||||||
|
|
||||||
|
# title_split = event.name.split(" - ")
|
||||||
|
# title= title_split[0].strip()
|
||||||
|
|
||||||
|
# if not any(x['Title'] == title for x in db_watchlist):
|
||||||
|
# continue
|
||||||
|
|
||||||
|
# ep = title_split[-2].split('x')
|
||||||
|
# season = ep[-2].strip() if ep else "1"
|
||||||
|
# episode = ep[-1].strip() if ep else "1"
|
||||||
|
# episode_name = event.name.split(" - ")[2].strip()
|
||||||
|
# if int(season) < 1 or int(episode) < 1:
|
||||||
|
# continue
|
||||||
|
|
||||||
|
# detail ={
|
||||||
|
# "title": title,
|
||||||
|
# "season": int(season),
|
||||||
|
# "episode": int(episode),
|
||||||
|
# "episode_name": episode_name,
|
||||||
|
# "start_timestamp": int(datetime.fromisoformat(str(event.begin)).astimezone(self.tz).timestamp()),
|
||||||
|
# "end_timestamp": int(datetime.fromisoformat(str(event.end)).astimezone(self.tz).timestamp()),
|
||||||
|
# "sonarr_id": event.uid.split("_")[-1],
|
||||||
|
# }
|
||||||
|
# daily_list.append(detail)
|
||||||
|
|
||||||
|
# return daily_list
|
||||||
|
|
||||||
|
|
||||||
|
if "__main__" == __name__:
|
||||||
|
sonarr=Sonarr_API("media-server.lan:8989",dotenv_values("/root/VT_Schedule_downloader/.env")['sonarr_key'],"/root/VT_Schedule_downloader/app.sqlite")
|
||||||
|
print(sonarr.get_episode_detail(1171,43758))
|
||||||
|
|
||||||
1463
lib/torrent_creator.py
Normal file
1463
lib/torrent_creator.py
Normal file
File diff suppressed because it is too large
Load Diff
1211
lib/usk.py
Normal file
1211
lib/usk.py
Normal file
File diff suppressed because it is too large
Load Diff
432
main.py
Normal file
432
main.py
Normal file
@@ -0,0 +1,432 @@
|
|||||||
|
|
||||||
|
from lib.logging_data import logger
|
||||||
|
|
||||||
|
import discord
|
||||||
|
from discord import app_commands
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
from lib.usk import USK
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
|
import os
|
||||||
|
|
||||||
|
load_dotenv(".env")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
console = logger(app_name="scheduler",log_dir="./log",gotify_config=os.getenv("gotify_token"),discord_config=os.getenv('DISCORD_CHANNEL_ID'))
|
||||||
|
console.log("Starting Unshackle Schedule Bot...")
|
||||||
|
# bot=ScheduleBot(console)
|
||||||
|
# console.client=bot
|
||||||
|
usk_scheduler = USK(console)
|
||||||
|
console.client=usk_scheduler
|
||||||
|
|
||||||
|
@usk_scheduler.event
|
||||||
|
async def on_ready():
|
||||||
|
# console.log(f'{bot.user} has connected to Discord!')
|
||||||
|
activity = discord.Game(name="Managing schedules | /help")
|
||||||
|
await usk_scheduler.change_presence(activity=activity)
|
||||||
|
|
||||||
|
|
||||||
|
@usk_scheduler.tree.command(name="help", description="Show bot commands and usage")
|
||||||
|
async def help_command(interaction: discord.Interaction):
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="Unshackle Schedule Bot Commands",
|
||||||
|
description="Here are the available commands:",
|
||||||
|
color=discord.Color.blue()
|
||||||
|
)
|
||||||
|
embed.add_field(name="/help", value="Show this help message", inline=False)
|
||||||
|
embed.add_field(name="/schedule", value="Schedule command", inline=False)
|
||||||
|
embed.add_field(name="/get_watchlist", value="Show watchlist", inline=False)
|
||||||
|
embed.add_field(name="/add_to_watchlist", value="Add watchlist", inline=False)
|
||||||
|
embed.add_field(name="/add_schedule_overwrite", value="Add schedule overwrite", inline=False)
|
||||||
|
|
||||||
|
await interaction.response.send_message(embed=embed)
|
||||||
|
|
||||||
|
# Error handling
|
||||||
|
@usk_scheduler.tree.error
|
||||||
|
async def on_app_command_error(interaction: discord.Interaction, error: app_commands.AppCommandError):
|
||||||
|
if isinstance(error, app_commands.CheckFailure):
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="❌ Permission Denied",
|
||||||
|
description="You don't have permission to use this command.",
|
||||||
|
color=0xff0000
|
||||||
|
)
|
||||||
|
if not interaction.response.is_done():
|
||||||
|
await interaction.response.send_message(embed=embed, ephemeral=True)
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="❌ Error",
|
||||||
|
description=f"An error occurred: {str(error)}",
|
||||||
|
color=0xff0000
|
||||||
|
)
|
||||||
|
# Get line and file
|
||||||
|
console.error("An error occurred")
|
||||||
|
channel = usk_scheduler.get_channel(interaction.channel_id)
|
||||||
|
|
||||||
|
if interaction.response.is_done():
|
||||||
|
await interaction.followup.send(embed=embed, ephemeral=True)
|
||||||
|
else:
|
||||||
|
await channel.send(embed=embed)
|
||||||
|
# raise error
|
||||||
|
|
||||||
|
@usk_scheduler.tree.command(name="schedule", description="Schedule command")
|
||||||
|
@app_commands.describe(
|
||||||
|
cmd="Command for schedule (e.g., start, check, stop)",
|
||||||
|
)
|
||||||
|
@app_commands.choices(cmd=[
|
||||||
|
app_commands.Choice(name="Start", value="start"),
|
||||||
|
app_commands.Choice(name="Check", value="check"),
|
||||||
|
app_commands.Choice(name="Stop", value="stop"),
|
||||||
|
])
|
||||||
|
async def schedule_command(
|
||||||
|
interaction: discord.Interaction,
|
||||||
|
cmd: str,
|
||||||
|
|
||||||
|
):
|
||||||
|
if cmd == "start":
|
||||||
|
|
||||||
|
# await interaction.response.send_message("Starting the schedule...")
|
||||||
|
# Here you would start your scheduling logic
|
||||||
|
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="Schedule Starting",
|
||||||
|
description="The schedule is starting now. Please wait...",
|
||||||
|
color=discord.Color.green()
|
||||||
|
)
|
||||||
|
await interaction.response.send_message(embed=embed)
|
||||||
|
|
||||||
|
await usk_scheduler.set_today_schedule(interaction)
|
||||||
|
|
||||||
|
console.log("Schedule started successfully.")
|
||||||
|
|
||||||
|
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="Scheduled Jobs",
|
||||||
|
description="Here are the currently scheduled jobs:",
|
||||||
|
color=discord.Color.blue()
|
||||||
|
)
|
||||||
|
jobs = usk_scheduler.scheduler.get_jobs()
|
||||||
|
for job in jobs:
|
||||||
|
if job:
|
||||||
|
embed.add_field(
|
||||||
|
name=job.id,
|
||||||
|
value=f"Next run time: {job.next_run_time.strftime('%Y-%m-%d %H:%M:%S')}",
|
||||||
|
inline=False
|
||||||
|
)
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
await interaction.followup.send(embed=embed)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
elif cmd == "check":
|
||||||
|
# Here you would check the current schedule
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="Today's Schedule",
|
||||||
|
description="Here is the current schedule for today:",
|
||||||
|
color=discord.Color.blue()
|
||||||
|
)
|
||||||
|
# schedule = bot.sonarr.get_today_schedule()
|
||||||
|
queue=usk_scheduler.db.get_today_queue()
|
||||||
|
# print(queue)
|
||||||
|
# exit()
|
||||||
|
if queue:
|
||||||
|
for item in queue:
|
||||||
|
embed.add_field(
|
||||||
|
name=f"{item['title']} (S{item['season']:02}E{item['episode']:02}) - {item['status']}",
|
||||||
|
value=f"Air Date: {datetime.fromtimestamp(item['start_timestamp']).strftime('%Y-%m-%d %H:%M:%S')}",
|
||||||
|
inline=False
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
embed.add_field(name="No Queue", value="There are no queued episodes for today.", inline=False)
|
||||||
|
|
||||||
|
jobs = usk_scheduler.scheduler.get_jobs()
|
||||||
|
|
||||||
|
if jobs:
|
||||||
|
job_list = "\n".join([f"{job.id} - {job.next_run_time}" for job in jobs])
|
||||||
|
embed.add_field(name="Scheduled Jobs", value=job_list, inline=False)
|
||||||
|
else:
|
||||||
|
embed.add_field(name="No Scheduled Jobs", value="There are no scheduled jobs at the moment.", inline=False)
|
||||||
|
await interaction.response.send_message(embed=embed)
|
||||||
|
console.log('Checked the queue successfully.')
|
||||||
|
|
||||||
|
elif cmd == "stop":
|
||||||
|
await interaction.response.send_message("Stopping the schedule...")
|
||||||
|
# Here you would stop your scheduling logic
|
||||||
|
usk_scheduler.scheduler.shutdown(wait=False)
|
||||||
|
await interaction.followup.send("Schedule stopped.")
|
||||||
|
console.log('Schedule stopped successfully.')
|
||||||
|
|
||||||
|
@usk_scheduler.tree.command(name="recheck", description="recheck schedule by weekday")
|
||||||
|
@app_commands.describe(
|
||||||
|
weekday="Day of the week (0=Monday, 6=Sunday)",
|
||||||
|
)
|
||||||
|
@app_commands.choices(weekday=[
|
||||||
|
app_commands.Choice(name="Monday", value='1'),
|
||||||
|
app_commands.Choice(name="Tuesday", value='2'),
|
||||||
|
app_commands.Choice(name="Wednesday", value='3'),
|
||||||
|
app_commands.Choice(name="Thursday", value='4'),
|
||||||
|
app_commands.Choice(name="Friday", value='5'),
|
||||||
|
app_commands.Choice(name="Saturday", value='6'),
|
||||||
|
app_commands.Choice(name="Sunday", value='7'),
|
||||||
|
app_commands.Choice(name="All", value='8'),
|
||||||
|
|
||||||
|
])
|
||||||
|
async def recheck_command(
|
||||||
|
interaction: discord.Interaction,
|
||||||
|
weekday: str,
|
||||||
|
|
||||||
|
):
|
||||||
|
weekday=int(weekday)
|
||||||
|
weekday_name = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday", "All"][weekday-1]
|
||||||
|
await interaction.response.send_message(f"Rechecking schedule for {weekday_name}...")
|
||||||
|
# Here you would start your scheduling logic
|
||||||
|
await usk_scheduler.recheck_day(weekday,interaction)
|
||||||
|
console.log(f"Rechecked the schedule for {weekday_name} successfully.")
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
await interaction.followup.send(f"Rechecked the schedule for {weekday_name} successfully.")
|
||||||
|
|
||||||
|
@usk_scheduler.tree.command(name="recheck_title", description="recheck schedule by title")
|
||||||
|
@app_commands.describe(
|
||||||
|
title="Title of the show"
|
||||||
|
)
|
||||||
|
async def recheck_title_command(
|
||||||
|
interaction: discord.Interaction,
|
||||||
|
title: str,
|
||||||
|
season: Optional[int] = None,
|
||||||
|
episode: Optional[int] = None,
|
||||||
|
|
||||||
|
):
|
||||||
|
await interaction.response.send_message(f"Rechecking schedule for {title}...")
|
||||||
|
# Here you would start your scheduling logic
|
||||||
|
await usk_scheduler.recheck_Title(title,interaction,season,episode)
|
||||||
|
console.log(f"Rechecked the schedule for {title} successfully.")
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
await interaction.followup.send(f"Rechecked the schedule for {title} successfully.")
|
||||||
|
|
||||||
|
|
||||||
|
@usk_scheduler.tree.command(name="get_watchlist", description="Get watchlist command")
|
||||||
|
async def get_watchlist_command(interaction: discord.Interaction):
|
||||||
|
watchlist = usk_scheduler.db.get_watchlist()
|
||||||
|
if not watchlist:
|
||||||
|
await interaction.response.send_message("Your watchlist is empty.")
|
||||||
|
return
|
||||||
|
|
||||||
|
MAX_EMBED_CHARS = 5900 # Stay safely under 6000 char limit
|
||||||
|
embeds = []
|
||||||
|
current_embed = discord.Embed(
|
||||||
|
title="Your Watchlist",
|
||||||
|
description="Here are the shows in your watchlist:",
|
||||||
|
color=discord.Color.green()
|
||||||
|
)
|
||||||
|
total_chars = len(current_embed.title) + len(current_embed.description)
|
||||||
|
|
||||||
|
for show in watchlist:
|
||||||
|
field_name = show['Title']
|
||||||
|
field_value = (
|
||||||
|
f"Service: {show['Service']}\n"
|
||||||
|
f"URL: {show['url']}\n"
|
||||||
|
f"Quality: {show['quality']}\n"
|
||||||
|
f"Codec: {show['codec']}\n"
|
||||||
|
f"Range: {show['range']}\n"
|
||||||
|
f"Audio lang: {show['audio_lang']}\n"
|
||||||
|
f"Subtitle lang: {show['sub_lang']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if adding this field would exceed the limit
|
||||||
|
if total_chars + len(field_name) + len(field_value) > MAX_EMBED_CHARS:
|
||||||
|
embeds.append(current_embed)
|
||||||
|
current_embed = discord.Embed(color=discord.Color.green())
|
||||||
|
total_chars = 0
|
||||||
|
|
||||||
|
current_embed.add_field(name=field_name, value=field_value, inline=False)
|
||||||
|
total_chars += len(field_name) + len(field_value)
|
||||||
|
|
||||||
|
# Add the last embed
|
||||||
|
embeds.append(current_embed)
|
||||||
|
|
||||||
|
# Send embeds
|
||||||
|
for i, embed in enumerate(embeds):
|
||||||
|
if i == 0:
|
||||||
|
await interaction.response.send_message(embed=embed)
|
||||||
|
else:
|
||||||
|
await interaction.followup.send(embed=embed)
|
||||||
|
|
||||||
|
console.log('Fetched watchlist successfully.')
|
||||||
|
|
||||||
|
@usk_scheduler.tree.command(name="add_to_watchlist", description="Add watchlist command")
|
||||||
|
@app_commands.describe(
|
||||||
|
id_="ID of the show as Sonarr ID (e.g., 12345)",
|
||||||
|
service="Service (e.g., BLBL, TID, MMAX)",
|
||||||
|
title="Title of the show",
|
||||||
|
url="URL of the show can be ID or URL",
|
||||||
|
audio_lang="Audio language (e.g., en, jp)",
|
||||||
|
sub_lang="Subtitle language (e.g., en, jp)",
|
||||||
|
quality="Quality of the show (e.g., 1080p, 720p)",
|
||||||
|
codec="Codec used (e.g., h264, h265)",
|
||||||
|
range_="Range of episodes (e.g., SDR,HDR)",
|
||||||
|
audio_channel="Audio channel (e.g., 2.0,5.1)",
|
||||||
|
season="Season number (e.g., 1, 2)",
|
||||||
|
|
||||||
|
# for BiliBili
|
||||||
|
if_dub="For BiliBili : If the show is dubbed (e.g., True, False)",
|
||||||
|
url_org="For BiliBili : URL of the original show (if applicable)",
|
||||||
|
|
||||||
|
title_lang="For BiliBili : Title language (e.g., en, jp)",
|
||||||
|
org_lang="For BiliBili : Original language (e.g., en, jp)",
|
||||||
|
|
||||||
|
)
|
||||||
|
@app_commands.choices(service=[
|
||||||
|
app_commands.Choice(name="Amazon Prime", value="AMZN"),
|
||||||
|
app_commands.Choice(name="Netflix", value="NF"),
|
||||||
|
app_commands.Choice(name="Hotstar", value="HS"),
|
||||||
|
app_commands.Choice(name="VIU", value="VIU"),
|
||||||
|
app_commands.Choice(name="TrueID", value="TID"),
|
||||||
|
app_commands.Choice(name="Mono Max", value="MMAX"),
|
||||||
|
app_commands.Choice(name="BiliBili", value="BLBL"),
|
||||||
|
app_commands.Choice(name="FutureSkill", value="FSK"),
|
||||||
|
app_commands.Choice(name="HBO Max", value="HMAX"),
|
||||||
|
app_commands.Choice(name="iQIYI", value="IQ"),
|
||||||
|
app_commands.Choice(name="WeTV", value="WTV"),
|
||||||
|
app_commands.Choice(name="Crunchyroll", value="CR"),
|
||||||
|
app_commands.Choice(name="Laftel", value="LT"),
|
||||||
|
])
|
||||||
|
@app_commands.choices(if_dub=[
|
||||||
|
app_commands.Choice(name="True", value="True"),
|
||||||
|
app_commands.Choice(name="False", value="False"),
|
||||||
|
|
||||||
|
])
|
||||||
|
@app_commands.choices(quality=[
|
||||||
|
app_commands.Choice(name="2160p", value="2160"),
|
||||||
|
app_commands.Choice(name="1440p", value="1440"),
|
||||||
|
app_commands.Choice(name="1080p", value="1080"),
|
||||||
|
app_commands.Choice(name="720p", value="720"),
|
||||||
|
app_commands.Choice(name="480p", value="480"),
|
||||||
|
app_commands.Choice(name="Best", value="Best"),
|
||||||
|
])
|
||||||
|
@app_commands.choices(codec=[
|
||||||
|
app_commands.Choice(name="H265", value="265"),
|
||||||
|
app_commands.Choice(name="H264", value="264"),
|
||||||
|
])
|
||||||
|
@app_commands.choices(range_=[
|
||||||
|
app_commands.Choice(name="HDR", value="HDR"),
|
||||||
|
app_commands.Choice(name="SDR", value="SDR"),
|
||||||
|
])
|
||||||
|
@app_commands.choices(audio_channel=[
|
||||||
|
app_commands.Choice(name="2.0", value="20"),
|
||||||
|
app_commands.Choice(name="5.1", value="51"),
|
||||||
|
app_commands.Choice(name="Best", value= "Best"),
|
||||||
|
])
|
||||||
|
async def add_to_watchlist_command(
|
||||||
|
interaction: discord.Interaction,
|
||||||
|
id_: int,
|
||||||
|
service: str,
|
||||||
|
title: str,
|
||||||
|
url: str,
|
||||||
|
|
||||||
|
audio_lang: str = "orig,th",
|
||||||
|
sub_lang: str = "orig,th,en",
|
||||||
|
quality:str = '1080',
|
||||||
|
codec: str = '264',
|
||||||
|
range_: str = 'SDR',
|
||||||
|
audio_channel: str = None,
|
||||||
|
|
||||||
|
|
||||||
|
#for BiliBili
|
||||||
|
if_dub: str = "False",
|
||||||
|
url_org: str = None,
|
||||||
|
season: str = None,
|
||||||
|
title_lang: str = None,
|
||||||
|
org_lang: str = None,
|
||||||
|
|
||||||
|
):
|
||||||
|
entry = {
|
||||||
|
'ID': id_,
|
||||||
|
'Service': service,
|
||||||
|
'Title': title,
|
||||||
|
'url': url,
|
||||||
|
'audio_lang': audio_lang,
|
||||||
|
'sub_lang': sub_lang,
|
||||||
|
'quality': int(quality),
|
||||||
|
'codec': int(codec),
|
||||||
|
'range': range_,
|
||||||
|
'audio_channel': audio_channel,
|
||||||
|
|
||||||
|
'if_dub': True if if_dub.lower() == "true" else False,
|
||||||
|
'url_org': url_org,
|
||||||
|
'season': int(season),
|
||||||
|
'title_lang': title_lang,
|
||||||
|
'org_lang': org_lang
|
||||||
|
}
|
||||||
|
|
||||||
|
usk_scheduler.db.add_watchlist(entry)
|
||||||
|
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="Watchlist Entry Added",
|
||||||
|
description=f"Added **{title}** to your watchlist.",
|
||||||
|
color=discord.Color.green()
|
||||||
|
)
|
||||||
|
|
||||||
|
await interaction.response.send_message(embed=embed)
|
||||||
|
console.log(f'Added {title} to watchlist successfully.')
|
||||||
|
|
||||||
|
@usk_scheduler.tree.command(name="add_schedule", description="Add schedule command")
|
||||||
|
@app_commands.describe(
|
||||||
|
title="Title of the show",
|
||||||
|
air_time="Air time of the show (format: HHMM, e.g., 2100 for 9 PM)",
|
||||||
|
day_of_week="Day of the week (Monday, Sunday or Monday,Sunday)",
|
||||||
|
offset="Offset episode (e.g., 0 for no offset, 1 for next episode)",
|
||||||
|
)
|
||||||
|
# @app_commands.choices(title=[
|
||||||
|
# app_commands.Choice(name=f'{t['Title'][:15]} season {t['season']}', value=t['Title'][:20]) for t in sorted(bot.db.get_watchlist(), key=lambda x: x["Title"], reverse=True)
|
||||||
|
# ])
|
||||||
|
async def add_schedule_command(interaction: discord.Interaction,
|
||||||
|
title: str,
|
||||||
|
air_time: str,
|
||||||
|
day_of_week: str,
|
||||||
|
offset: int = 0):
|
||||||
|
entry = {
|
||||||
|
'title': title,
|
||||||
|
'air_time': air_time,
|
||||||
|
'day_of_week': day_of_week,
|
||||||
|
'offset': offset
|
||||||
|
}
|
||||||
|
status=usk_scheduler.db.add_overwrite_schedule(entry)
|
||||||
|
# console.log(f'Added schedule overwrite for {title} at {air_time} on {day_of_week} with offset {offset}. Status: {status}')
|
||||||
|
if status == 'No changes made, entry already exists.':
|
||||||
|
console.log(f'Schedule overwrite for {title} at {air_time} on {day_of_week} with offset {offset} already exists.')
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="Schedule Overwrite Already Exists",
|
||||||
|
description=f"An overwrite for **{title}** at **{air_time}** on **{day_of_week}** with offset **{offset}** already exists.",
|
||||||
|
color=discord.Color.orange()
|
||||||
|
)
|
||||||
|
await interaction.response.send_message(embed=embed, ephemeral=True)
|
||||||
|
return
|
||||||
|
elif status == 'Entry added or updated successfully.':
|
||||||
|
# If the entry was added or updated successfully, send a confirmation message
|
||||||
|
console.log(f'Schedule overwrite for {title} at {air_time} on {day_of_week} with offset {offset} added or updated successfully.')
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="Schedule Overwrite Added",
|
||||||
|
description=f"Added schedule overwrite for **{title}** at **{air_time}** on **{day_of_week}** with offset **{offset}**.",
|
||||||
|
color=discord.Color.green()
|
||||||
|
)
|
||||||
|
await interaction.response.send_message(embed=embed)
|
||||||
|
return
|
||||||
|
|
||||||
|
token = os.getenv("DISCORD_TOKEN")
|
||||||
|
if not token:
|
||||||
|
print("❌ DISCORD_TOKEN not found in environment variables!")
|
||||||
|
print("Make sure you have a .env file with your bot token.")
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
# bot.usk_scheduler.set_today_schedule() # Initialize the schedule
|
||||||
|
usk_scheduler.run(token, log_handler=None)
|
||||||
|
except discord.LoginFailure:
|
||||||
|
print("❌ Invalid bot token! Please check your DISCORD_TOKEN in the .env file.")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ An error occurred: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
30
pyproject.toml
Normal file
30
pyproject.toml
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
[project]
|
||||||
|
name = "usk-schedule-downloader"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Add your description here"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = [
|
||||||
|
"apscheduler>=3.11.2",
|
||||||
|
"beautifulsoup4>=4.14.3",
|
||||||
|
"discord-py>=2.7.1",
|
||||||
|
"dotenv>=0.9.9",
|
||||||
|
"langcodes>=3.5.1",
|
||||||
|
"numpy>=2.4.3",
|
||||||
|
"opencv-python>=4.13.0.92",
|
||||||
|
"pillow>=12.1.1",
|
||||||
|
"python-dotenv>=1.2.2",
|
||||||
|
"pytz>=2026.1.post1",
|
||||||
|
"ruff>=0.15.8",
|
||||||
|
"themoviedb[full]>=1.0.2",
|
||||||
|
"torf>=4.3.1",
|
||||||
|
"tqdm>=4.67.3",
|
||||||
|
"unshackle",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
where = ["."]
|
||||||
|
include = ["lib*"]
|
||||||
|
|
||||||
|
[tool.uv.sources]
|
||||||
|
unshackle = { path = "/root/unshackle-SeFree" }
|
||||||
Reference in New Issue
Block a user