feat(main): main

This commit is contained in:
2026-03-09 08:26:45 -04:00
parent f37382d2b8
commit f14454b4c8
12 changed files with 598 additions and 62 deletions

View File

@@ -46,7 +46,7 @@ class ScheduleGenerator:
return 0
target_weekday_bit = 1 << target_date.weekday()
blocks = template.scheduleblock_set.all()
blocks = template.scheduleblock_set.all().order_by('start_local_time')
airings_created = 0
for block in blocks:
@@ -60,7 +60,7 @@ class ScheduleGenerator:
if end_dt <= start_dt:
end_dt += timedelta(days=1)
# Clear existing airings in this window (idempotency)
# Clear existing airings whose start time is within this block's window
Airing.objects.filter(
channel=self.channel,
starts_at__gte=start_dt,
@@ -71,8 +71,18 @@ class ScheduleGenerator:
if not available_items:
continue
# Prevent overlaps: ensure we don't start before the end of the previous block's overrun
latest_prior_airing = Airing.objects.filter(
channel=self.channel,
starts_at__lt=start_dt
).order_by('-ends_at').first()
actual_start_dt = start_dt
if latest_prior_airing and latest_prior_airing.ends_at > start_dt:
actual_start_dt = latest_prior_airing.ends_at
airings_created += self._fill_block(
template, block, start_dt, end_dt, available_items
template, block, actual_start_dt, end_dt, available_items
)
return airings_created
@@ -88,14 +98,20 @@ class ScheduleGenerator:
).order_by('-priority')
return qs.first()
def _get_weighted_items(self, block: ScheduleBlock) -> list:
def _get_weighted_items(self, block: ScheduleBlock, require_downloaded: bool = False) -> list:
"""
Build a weighted pool of MediaItems respecting ChannelSourceRule.
If require_downloaded is True, strictly exclude items from YouTube sources
that have not yet been downloaded (cached_file_path is null).
Returns a flat list with items duplicated according to their effective
weight (rounded to nearest int, min 1) so random.choice() gives the
right probability distribution without needing numpy.
"""
if block.block_type == ScheduleBlock.BlockType.OFF_AIR:
return []
rules = list(
ChannelSourceRule.objects.filter(channel=self.channel)
.select_related('media_source')
@@ -109,6 +125,10 @@ class ScheduleGenerator:
source_weights: dict[int, float] = {}
for rule in rules:
# If a rule has a label, it only applies if this block's name matches
if rule.schedule_block_label and rule.schedule_block_label != block.name:
continue
sid = rule.media_source_id
mode = rule.rule_mode
w = float(rule.weight or 1.0)
@@ -148,6 +168,14 @@ class ScheduleGenerator:
if block.default_genre:
base_qs = base_qs.filter(genres=block.default_genre)
# Enforce downloaded requirement for emergency replacements
if require_downloaded:
from django.db.models import Q
from core.services.youtube import YOUTUBE_SOURCE_TYPES
base_qs = base_qs.exclude(
Q(media_source__source_type__in=YOUTUBE_SOURCE_TYPES) & Q(cached_file_path__isnull=True)
)
items = list(base_qs)
if not items:
return []
@@ -208,3 +236,51 @@ class ScheduleGenerator:
created += 1
return created
def replace_undownloaded_airings(self, airings: list[Airing]):
"""
Takes a list of specific Airings that failed to download or are
too close to airtime without a valid cache file. Replaces the
underlying media_item with one guaranteed to be playable, and
ripple-shifts all following airings on the channel by the duration diff.
"""
import logging
logger = logging.getLogger(__name__)
for original_airing in airings:
# 1. Fetch available downloaded items for this block
safe_items = self._get_weighted_items(original_airing.schedule_block, require_downloaded=True)
if not safe_items:
logger.error(f"Cannot replace airing {original_airing.id}: No downloaded items available for block {original_airing.schedule_block.name}")
continue
# 2. Pick a random valid fallback item
fallback_item = random.choice(safe_items)
old_duration = original_airing.ends_at - original_airing.starts_at
# Update the original airing to reference the new item
original_airing.media_item = fallback_item
original_airing.source_reason = 'recovery'
new_duration = timedelta(seconds=max(fallback_item.runtime_seconds or 1800, 1))
original_airing.ends_at = original_airing.starts_at + new_duration
original_airing.save(update_fields=['media_item', 'source_reason', 'ends_at'])
logger.info(f"Replaced airing {original_airing.id} with '{fallback_item.title}' (diff: {new_duration - old_duration})")
# 3. Ripple shift downstream airings accurately
delta = new_duration - old_duration
if delta.total_seconds() != 0:
# Find all airings strictly after this one on the same channel
downstream = Airing.objects.filter(
channel=self.channel,
starts_at__gte=original_airing.starts_at + old_duration
).exclude(id=original_airing.id).order_by('starts_at')
# Apply shift
for later_airing in downstream:
later_airing.starts_at += delta
later_airing.ends_at += delta
later_airing.save(update_fields=['starts_at', 'ends_at'])