feat(main): main

This commit is contained in:
2026-03-10 08:39:28 -04:00
parent b1a93161c0
commit af3076342a
18 changed files with 826 additions and 38 deletions

View File

@@ -16,11 +16,13 @@ from core.services.youtube import download_for_airing, YOUTUBE_SOURCE_TYPES
logger = logging.getLogger(__name__)
def run_cache(hours: int = 24, prune_only: bool = False) -> dict:
def run_cache(hours: int = 24, prune_only: bool = False, channel_id: int | None = None) -> dict:
"""
Scan Airings in the next `hours` hours, download any uncached YouTube
videos, and prune stale local files.
If `channel_id` is provided, only process airings for that specific channel.
Returns a summary dict suitable for JSON serialization.
"""
now = timezone.now()
@@ -33,11 +35,11 @@ def run_cache(hours: int = 24, prune_only: bool = False) -> dict:
return {"pruned": pruned, "downloaded": 0, "already_cached": 0, "failed": 0, "items": []}
# ── Find upcoming and currently playing YouTube-backed airings ──────────
upcoming = (
Airing.objects
.filter(ends_at__gt=now, starts_at__lte=window_end)
.select_related("media_item__media_source")
)
qs = Airing.objects.filter(ends_at__gt=now, starts_at__lte=window_end)
if channel_id is not None:
qs = qs.filter(channel_id=channel_id)
upcoming = qs.select_related("media_item__media_source")
youtube_items: dict[int, MediaItem] = {}
downloaded = already_cached = failed = 0

View File

@@ -56,6 +56,24 @@ class ScheduleGenerator:
target_weekday_bit = 1 << target_date.weekday()
blocks = template.scheduleblock_set.all().order_by('start_local_time')
airings_created = 0
# Build last_played mapping for the repeat gap
from core.models import ChannelSourceRule
rules = ChannelSourceRule.objects.filter(channel=self.channel).select_related('media_source')
max_gap_hours = 0
for rule in rules:
if rule.media_source and rule.media_source.min_repeat_gap_hours:
max_gap_hours = max(max_gap_hours, rule.media_source.min_repeat_gap_hours)
last_played_times = {}
if max_gap_hours > 0:
past_dt = datetime.combine(target_date, datetime.min.time(), tzinfo=local_tz).astimezone(timezone.utc) - timedelta(hours=max_gap_hours)
past_airings = Airing.objects.filter(
channel=self.channel,
starts_at__gte=past_dt
).order_by('starts_at')
for a in past_airings:
last_played_times[a.media_item_id] = a.starts_at
for block in blocks:
if not (block.day_of_week_mask & target_weekday_bit):
@@ -98,7 +116,7 @@ class ScheduleGenerator:
continue
airings_created += self._fill_block(
template, block, actual_start_dt, end_dt, available_items
template, block, actual_start_dt, end_dt, available_items, last_played_times
)
return airings_created
@@ -220,17 +238,41 @@ class ScheduleGenerator:
start_dt: datetime,
end_dt: datetime,
items: list,
last_played_times: dict[int, datetime] = None,
) -> int:
"""Fill start_dt→end_dt with sequential Airings, cycling through items."""
cursor = start_dt
idx = 0
created = 0
batch = uuid.uuid4()
if last_played_times is None:
last_played_times = {}
while cursor < end_dt:
item = items[idx % len(items)]
idx += 1
# Look ahead to find the first item that respects its cooldown rules
valid_item = None
items_checked = 0
while items_checked < len(items):
candidate = items[idx % len(items)]
idx += 1
items_checked += 1
# Check cooldown gap
gap_hours = candidate.media_source.min_repeat_gap_hours if candidate.media_source else None
if gap_hours:
last_played = last_played_times.get(candidate.id)
if last_played:
if (cursor - last_played).total_seconds() < gap_hours * 3600:
continue # skip, hasn't been long enough
valid_item = candidate
break
if not valid_item:
# If everything in the pool is currently cooling down, fallback to ignoring cooldowns
valid_item = items[(idx - 1) % len(items)]
item = valid_item
duration = timedelta(seconds=max(item.runtime_seconds or 1800, 1))
# Don't let a single item overshoot the end by more than its own length
@@ -249,6 +291,8 @@ class ScheduleGenerator:
source_reason="template",
generation_batch_uuid=batch,
)
last_played_times[item.id] = cursor
cursor += duration
created += 1

View File

@@ -133,9 +133,31 @@ def sync_source(media_source: MediaSource, max_videos: int | None = None) -> dic
description = entry.get("description") or ""
release_year = None
upload_date = entry.get("upload_date") # "YYYYMMDD"
if upload_date and len(upload_date) >= 4:
# Enforce Source Rules
if media_source.min_video_length_seconds is not None:
if duration < media_source.min_video_length_seconds:
skipped += 1
continue
if media_source.max_video_length_seconds is not None:
if duration > media_source.max_video_length_seconds:
skipped += 1
continue
if upload_date and len(upload_date) >= 8:
try:
release_year = int(upload_date[:4])
year = int(upload_date[0:4])
month = int(upload_date[4:6])
day = int(upload_date[6:8])
from datetime import date
video_date = date(year, month, day)
release_year = year
if media_source.max_age_days is not None:
age_days = (date.today() - video_date).days
if age_days > media_source.max_age_days:
skipped += 1
continue
except ValueError:
pass