feat(main): main

This commit is contained in:
2026-03-09 13:29:23 -04:00
parent f14454b4c8
commit b1a93161c0
22 changed files with 719 additions and 192 deletions

View File

@@ -41,10 +41,18 @@ class ScheduleGenerator:
Idempotent generation of airings for `target_date`.
Returns the number of new Airing rows created.
"""
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
template = self._get_template()
if not template:
return 0
# Resolve the template's local timezone (fall back to UTC)
try:
local_tz = ZoneInfo(template.timezone_name or 'UTC')
except (ZoneInfoNotFoundError, Exception):
local_tz = ZoneInfo('UTC')
target_weekday_bit = 1 << target_date.weekday()
blocks = template.scheduleblock_set.all().order_by('start_local_time')
airings_created = 0
@@ -53,10 +61,14 @@ class ScheduleGenerator:
if not (block.day_of_week_mask & target_weekday_bit):
continue
start_dt = datetime.combine(target_date, block.start_local_time, tzinfo=timezone.utc)
end_dt = datetime.combine(target_date, block.end_local_time, tzinfo=timezone.utc)
# Convert local block times to UTC-aware datetimes
start_local = datetime.combine(target_date, block.start_local_time, tzinfo=local_tz)
end_local = datetime.combine(target_date, block.end_local_time, tzinfo=local_tz)
# Midnight-wrap support (e.g. 23:0002:00)
start_dt = start_local.astimezone(timezone.utc)
end_dt = end_local.astimezone(timezone.utc)
# Midnight-wrap support (e.g. 23:0002:00 local)
if end_dt <= start_dt:
end_dt += timedelta(days=1)
@@ -81,12 +93,18 @@ class ScheduleGenerator:
if latest_prior_airing and latest_prior_airing.ends_at > start_dt:
actual_start_dt = latest_prior_airing.ends_at
# If the prior block ran all the way through this block's window, skip
if actual_start_dt >= end_dt:
continue
airings_created += self._fill_block(
template, block, actual_start_dt, end_dt, available_items
)
return airings_created
# ------------------------------------------------------------------
# Helpers
# ------------------------------------------------------------------
@@ -248,13 +266,21 @@ class ScheduleGenerator:
logger = logging.getLogger(__name__)
for original_airing in airings:
# 1. Fetch available downloaded items for this block
safe_items = self._get_weighted_items(original_airing.schedule_block, require_downloaded=True)
if not safe_items:
logger.error(f"Cannot replace airing {original_airing.id}: No downloaded items available for block {original_airing.schedule_block.name}")
continue
# 1. First check if the channel has a dedicated error fallback collection
safe_items = []
if getattr(self.channel, 'fallback_collection', None):
safe_items = list(self.channel.fallback_collection.media_items.exclude(
cached_file_path__isnull=True,
media_source__source_type__in=['youtube', 'youtube_channel', 'youtube_playlist']
))
# 2. If no fallback collection or it yielded no valid items, try block sources
if not safe_items:
safe_items = self._get_weighted_items(original_airing.schedule_block, require_downloaded=True)
if not safe_items:
logger.error(f"Cannot replace airing {original_airing.id}: No downloaded items available for fallback or block {original_airing.schedule_block.name}")
continue
# 2. Pick a random valid fallback item
fallback_item = random.choice(safe_items)
old_duration = original_airing.ends_at - original_airing.starts_at

View File

@@ -207,6 +207,21 @@ def download_for_airing(media_item: MediaItem) -> Path:
logger.info("cache hit: %s already at %s", video_id, existing)
return existing
from django.core.cache import cache
def progress_hook(d):
if d['status'] == 'downloading':
# Note: _percent_str includes ANSI escape codes sometimes, but yt_dlp usually cleans it if not a tty
pct = d.get('_percent_str', '').strip()
# Clean ANSI just in case
import re
pct_clean = re.sub(r'\x1b\[[0-9;]*m', '', pct).strip()
if pct_clean:
# Store the string "xx.x%" into Django cache. Expire after 5 mins so it cleans itself up.
cache.set(f"yt_progress_{video_id}", pct_clean, timeout=300)
elif d['status'] == 'finished':
cache.set(f"yt_progress_{video_id}", "100%", timeout=300)
ydl_opts = {
"quiet": True,
"no_warnings": True,
@@ -218,6 +233,7 @@ def download_for_airing(media_item: MediaItem) -> Path:
# 3. Any pre-muxed webm
# 4. Anything pre-muxed (no merger needed)
"format": "best[ext=mp4][height<=1080]/best[ext=mp4]/best[ext=webm]/best",
"progress_hooks": [progress_hook],
}
url = media_item.file_path # URL stored here by sync_source