feat(debug): add JSONL debug logging to decryption, muxing, and all downloaders

Expand debug logging coverage for better diagnostics when investigating download/decryption issues like QUICKTIME/cbcs problem.
This commit is contained in:
Andy
2026-02-17 13:58:36 -07:00
parent 0217086abf
commit cc89f4ca93
7 changed files with 241 additions and 7 deletions

View File

@@ -441,6 +441,37 @@ def download(
yield dict(downloaded=f"{filesize.decimal(dl_speed)}/s")
time.sleep(1)
if debug_logger:
output_files = []
output_total_size = 0
if output_dir.exists():
try:
for f in sorted(output_dir.iterdir()):
if f.is_file():
fsize = f.stat().st_size
output_files.append(f.name)
output_total_size += fsize
except OSError:
output_files = ["<error listing files>"]
debug_logger.log(
level="DEBUG",
operation="downloader_aria2c_complete",
message="Aria2c download completed successfully",
context={
"url_count": len(urls),
"gid_count": len(gids),
"completed_count": len(completed),
"output_dir": str(output_dir),
"output_dir_exists": output_dir.exists(),
"output_files_count": len(output_files),
"output_files": output_files[:10],
"output_total_size": output_total_size,
"filename": filename,
},
)
except KeyboardInterrupt:
DOWNLOAD_CANCELLED.set()
raise

View File

@@ -293,6 +293,18 @@ def curl_impersonate(
download_sizes.clear()
if debug_logger:
output_files = []
output_total_size = 0
if output_dir.exists():
try:
for f in sorted(output_dir.iterdir()):
if f.is_file():
fsize = f.stat().st_size
output_files.append(f.name)
output_total_size += fsize
except OSError:
output_files = ["<error listing files>"]
debug_logger.log(
level="DEBUG",
operation="downloader_curl_impersonate_complete",
@@ -300,6 +312,10 @@ def curl_impersonate(
context={
"url_count": len(urls),
"output_dir": str(output_dir),
"output_dir_exists": output_dir.exists(),
"output_files_count": len(output_files),
"output_files": output_files[:10],
"output_total_size": output_total_size,
"filename": filename,
},
)

View File

@@ -316,6 +316,17 @@ def download(
])
track_url_display = track.url[:200] + "..." if len(track.url) > 200 else track.url
# Determine decryption engine info for logging
decryption_engine = None
decryption_binary = None
if content_keys:
decryption_config = config.decryption.lower()
decryption_engine = DECRYPTION_ENGINE.get(decryption_config) or "SHAKA_PACKAGER"
if decryption_engine == "SHAKA_PACKAGER" and binaries.ShakaPackager:
decryption_binary = str(binaries.ShakaPackager)
elif decryption_engine == "MP4DECRYPT" and binaries.Mp4decrypt:
decryption_binary = str(binaries.Mp4decrypt)
debug_logger.log(
level="DEBUG",
operation="downloader_n_m3u8dl_re_start",
@@ -331,6 +342,9 @@ def download(
"retry_count": retry_count,
"has_content_keys": bool(content_keys),
"content_key_count": len(content_keys) if content_keys else 0,
"decryption_engine": decryption_engine,
"decryption_binary": decryption_binary,
"decryption_config": config.decryption if content_keys else None,
"has_proxy": bool(proxy),
"skip_merge": skip_merge,
"has_custom_args": bool(track.downloader_args),

View File

@@ -303,6 +303,18 @@ def requests(
raise
if debug_logger:
output_files = []
output_total_size = 0
if output_dir.exists():
try:
for f in sorted(output_dir.iterdir()):
if f.is_file():
fsize = f.stat().st_size
output_files.append(f.name)
output_total_size += fsize
except OSError:
output_files = ["<error listing files>"]
debug_logger.log(
level="DEBUG",
operation="downloader_requests_complete",
@@ -310,6 +322,10 @@ def requests(
context={
"url_count": len(urls),
"output_dir": str(output_dir),
"output_dir_exists": output_dir.exists(),
"output_files_count": len(output_files),
"output_files": output_files[:10],
"output_total_size": output_total_size,
"filename": filename,
},
)