feat(main): main
This commit is contained in:
@@ -39,11 +39,13 @@ class ChannelSourceRuleSchema(Schema):
|
||||
source_name: str
|
||||
rule_mode: str
|
||||
weight: float
|
||||
schedule_block_label: Optional[str] = None
|
||||
|
||||
class ChannelSourceAssignSchema(Schema):
|
||||
source_id: int
|
||||
rule_mode: str = 'allow' # allow | prefer | avoid | block
|
||||
weight: float = 1.0
|
||||
schedule_block_label: Optional[str] = None
|
||||
|
||||
class AiringSchema(Schema):
|
||||
id: int
|
||||
@@ -58,7 +60,21 @@ class AiringSchema(Schema):
|
||||
def from_airing(airing) -> 'AiringSchema':
|
||||
media_path = None
|
||||
if airing.media_item:
|
||||
raw_path = airing.media_item.cached_file_path or airing.media_item.file_path
|
||||
item = airing.media_item
|
||||
|
||||
# 1. Determine if this item is from a YouTube source
|
||||
is_youtube = False
|
||||
if item.media_source and item.media_source.source_type in ['youtube', 'youtube_channel', 'youtube_playlist']:
|
||||
is_youtube = True
|
||||
|
||||
# 2. Strict signaling: If YouTube, we MUST have it downloaded
|
||||
if is_youtube:
|
||||
raw_path = item.cached_file_path
|
||||
# If cached_file_path is None, raw_path is None, and media_path remains None
|
||||
else:
|
||||
# Fallback for generic local files/links
|
||||
raw_path = item.cached_file_path or item.file_path
|
||||
|
||||
if raw_path:
|
||||
if raw_path.startswith("http://") or raw_path.startswith("https://"):
|
||||
media_path = raw_path
|
||||
@@ -133,6 +149,7 @@ def list_channel_sources(request, channel_id: int):
|
||||
source_name=r.media_source.name,
|
||||
rule_mode=r.rule_mode,
|
||||
weight=float(r.weight),
|
||||
schedule_block_label=r.schedule_block_label,
|
||||
)
|
||||
for r in rules
|
||||
]
|
||||
@@ -146,6 +163,7 @@ def assign_source_to_channel(request, channel_id: int, payload: ChannelSourceAss
|
||||
media_source=source,
|
||||
rule_mode=payload.rule_mode,
|
||||
weight=payload.weight,
|
||||
schedule_block_label=payload.schedule_block_label,
|
||||
)
|
||||
return 201, ChannelSourceRuleSchema(
|
||||
id=rule.id,
|
||||
@@ -153,6 +171,7 @@ def assign_source_to_channel(request, channel_id: int, payload: ChannelSourceAss
|
||||
source_name=source.name,
|
||||
rule_mode=rule.rule_mode,
|
||||
weight=float(rule.weight),
|
||||
schedule_block_label=rule.schedule_block_label,
|
||||
)
|
||||
|
||||
@router.delete("/{channel_id}/sources/{rule_id}", response={204: None})
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from ninja import Router, Schema
|
||||
from typing import List, Optional
|
||||
from datetime import date, time
|
||||
from core.models import ScheduleTemplate, Channel, ScheduleBlock
|
||||
from django.shortcuts import get_object_or_404
|
||||
from datetime import date
|
||||
@@ -27,6 +28,28 @@ class ScheduleTemplateCreateSchema(Schema):
|
||||
is_active: bool = True
|
||||
channel_id: int
|
||||
|
||||
class ScheduleBlockSchema(Schema):
|
||||
id: int
|
||||
schedule_template_id: int
|
||||
name: str
|
||||
block_type: str
|
||||
start_local_time: time
|
||||
end_local_time: time
|
||||
day_of_week_mask: int
|
||||
spills_past_midnight: bool
|
||||
target_content_rating: Optional[int] = None
|
||||
default_genre_id: Optional[int] = None
|
||||
|
||||
class ScheduleBlockCreateSchema(Schema):
|
||||
schedule_template_id: int
|
||||
name: str
|
||||
block_type: str
|
||||
start_local_time: time
|
||||
end_local_time: time
|
||||
day_of_week_mask: int
|
||||
spills_past_midnight: bool = False
|
||||
target_content_rating: Optional[int] = None
|
||||
|
||||
@router.get("/template/", response=List[ScheduleTemplateSchema])
|
||||
def list_schedule_templates(request):
|
||||
return ScheduleTemplate.objects.all()
|
||||
@@ -92,3 +115,29 @@ def generate_schedule_today(request, channel_id: int):
|
||||
generator = ScheduleGenerator(channel=channel)
|
||||
airings_created = generator.generate_for_date(date.today())
|
||||
return {"status": "success", "airings_created": airings_created}
|
||||
|
||||
@router.get("/template/{template_id}/blocks", response=List[ScheduleBlockSchema])
|
||||
def list_schedule_blocks(request, template_id: int):
|
||||
template = get_object_or_404(ScheduleTemplate, id=template_id)
|
||||
return template.scheduleblock_set.all().order_by('start_local_time')
|
||||
|
||||
@router.post("/block/", response={201: ScheduleBlockSchema})
|
||||
def create_schedule_block(request, payload: ScheduleBlockCreateSchema):
|
||||
template = get_object_or_404(ScheduleTemplate, id=payload.schedule_template_id)
|
||||
block = ScheduleBlock.objects.create(
|
||||
schedule_template=template,
|
||||
name=payload.name,
|
||||
block_type=payload.block_type,
|
||||
start_local_time=payload.start_local_time,
|
||||
end_local_time=payload.end_local_time,
|
||||
day_of_week_mask=payload.day_of_week_mask,
|
||||
spills_past_midnight=payload.spills_past_midnight,
|
||||
target_content_rating=payload.target_content_rating,
|
||||
)
|
||||
return 201, block
|
||||
|
||||
@router.delete("/block/{block_id}", response={204: None})
|
||||
def delete_schedule_block(request, block_id: int):
|
||||
block = get_object_or_404(ScheduleBlock, id=block_id)
|
||||
block.delete()
|
||||
return 204, None
|
||||
|
||||
56
core/management/commands/run_cache_worker.py
Normal file
56
core/management/commands/run_cache_worker.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""
|
||||
management command: run_cache_worker
|
||||
|
||||
Runs continuously in the background to automatically download and cache
|
||||
upcoming programming for the next 24 hours. Intended to run as a daemon
|
||||
or Docker service.
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from django.core.management.base import BaseCommand
|
||||
from core.services.cache import run_cache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Run the 24-hour ahead cache worker continuously in the background."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--interval",
|
||||
type=int,
|
||||
default=600,
|
||||
help="Interval in seconds between cache runs (default: 600s/10m).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--hours",
|
||||
type=int,
|
||||
default=24,
|
||||
help="How many hours ahead to scan for upcoming airings (default: 24).",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
interval = options["interval"]
|
||||
hours = options["hours"]
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(f"Starting continuous cache worker (interval: {interval}s, ahead: {hours}h)"))
|
||||
|
||||
while True:
|
||||
try:
|
||||
self.stdout.write(f"▶ Running background cache worker (window: {hours}h)")
|
||||
result = run_cache(hours=hours, prune_only=False)
|
||||
|
||||
if result["downloaded"] > 0 or result["pruned"] > 0 or result["failed"] > 0:
|
||||
self.stdout.write(self.style.SUCCESS(f" 🗑 Pruned: {result['pruned']}"))
|
||||
self.stdout.write(self.style.SUCCESS(f" ↓ Downloaded: {result['downloaded']}"))
|
||||
self.stdout.write(self.style.SUCCESS(f" ✓ Already cached: {result['already_cached']}"))
|
||||
if result["failed"]:
|
||||
self.stderr.write(self.style.ERROR(f" ✗ Failed: {result['failed']}"))
|
||||
|
||||
except Exception as e:
|
||||
self.stderr.write(self.style.ERROR(f"Error in cache worker loop: {e}"))
|
||||
logger.error(f"Error in cache worker loop: {e}")
|
||||
|
||||
# Sleep until next interval
|
||||
time.sleep(interval)
|
||||
@@ -0,0 +1,34 @@
|
||||
# Generated by Django 6.0.3 on 2026-03-08 22:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0002_mediaitem_cache_expires_at_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="channelsourcerule",
|
||||
name="schedule_block_label",
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="scheduleblock",
|
||||
name="target_content_rating",
|
||||
field=models.IntegerField(
|
||||
blank=True,
|
||||
choices=[
|
||||
(1, "TV-Y / All Children"),
|
||||
(2, "TV-Y7 / Directed to Older Children"),
|
||||
(3, "TV-G / General Audience"),
|
||||
(4, "TV-PG / Parental Guidance Suggested"),
|
||||
(5, "TV-14 / Parents Strongly Cautioned"),
|
||||
(6, "TV-MA / Mature Audience Only"),
|
||||
],
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -255,6 +255,7 @@ class ChannelSourceRule(models.Model):
|
||||
channel = models.ForeignKey(Channel, on_delete=models.CASCADE)
|
||||
media_source = models.ForeignKey(MediaSource, on_delete=models.CASCADE, blank=True, null=True)
|
||||
media_collection = models.ForeignKey(MediaCollection, on_delete=models.CASCADE, blank=True, null=True)
|
||||
schedule_block_label = models.CharField(max_length=255, blank=True, null=True)
|
||||
|
||||
class RuleMode(models.TextChoices):
|
||||
ALLOW = 'allow', 'Allow'
|
||||
@@ -347,6 +348,16 @@ class ScheduleBlock(models.Model):
|
||||
end_local_time = models.TimeField()
|
||||
day_of_week_mask = models.SmallIntegerField() # 1 to 127
|
||||
spills_past_midnight = models.BooleanField(default=False)
|
||||
|
||||
class TargetRating(models.IntegerChoices):
|
||||
TV_Y = 1, 'TV-Y / All Children'
|
||||
TV_Y7 = 2, 'TV-Y7 / Directed to Older Children'
|
||||
TV_G = 3, 'TV-G / General Audience'
|
||||
TV_PG = 4, 'TV-PG / Parental Guidance Suggested'
|
||||
TV_14 = 5, 'TV-14 / Parents Strongly Cautioned'
|
||||
TV_MA = 6, 'TV-MA / Mature Audience Only'
|
||||
|
||||
target_content_rating = models.IntegerField(choices=TargetRating.choices, blank=True, null=True)
|
||||
default_genre = models.ForeignKey(Genre, on_delete=models.SET_NULL, blank=True, null=True)
|
||||
min_content_rating = models.ForeignKey(ContentRating, on_delete=models.SET_NULL, blank=True, null=True, related_name='+')
|
||||
max_content_rating = models.ForeignKey(ContentRating, on_delete=models.SET_NULL, blank=True, null=True, related_name='+')
|
||||
|
||||
@@ -40,44 +40,66 @@ def run_cache(hours: int = 24, prune_only: bool = False) -> dict:
|
||||
)
|
||||
|
||||
youtube_items: dict[int, MediaItem] = {}
|
||||
for airing in upcoming:
|
||||
item = airing.media_item
|
||||
if item.media_source and item.media_source.source_type in YOUTUBE_SOURCE_TYPES:
|
||||
youtube_items[item.pk] = item
|
||||
|
||||
downloaded = already_cached = failed = 0
|
||||
items_status = []
|
||||
|
||||
for airing in upcoming:
|
||||
item = airing.media_item
|
||||
|
||||
# Determine if we are inside the 1-hour critical safety window
|
||||
time_until_airing = airing.starts_at - now
|
||||
in_safety_window = time_until_airing.total_seconds() < 3600
|
||||
|
||||
if item.media_source and item.media_source.source_type in YOUTUBE_SOURCE_TYPES:
|
||||
youtube_items[item.pk] = item
|
||||
|
||||
# Skip if already cached
|
||||
if item.cached_file_path and pathlib.Path(item.cached_file_path).exists():
|
||||
already_cached += 1
|
||||
items_status.append({
|
||||
"id": item.pk,
|
||||
"title": item.title,
|
||||
"status": "cached",
|
||||
"path": item.cached_file_path,
|
||||
})
|
||||
continue
|
||||
|
||||
# If in the 1-hour safety valve window, DO NOT download. Replace the airing.
|
||||
if in_safety_window:
|
||||
logger.warning(f"Airing {airing.id} ({item.title}) is < 1h away and not cached! Triggering emergency replacement.")
|
||||
from core.services.scheduler import ScheduleGenerator
|
||||
generator = ScheduleGenerator(channel=airing.channel)
|
||||
try:
|
||||
generator.replace_undownloaded_airings([airing])
|
||||
items_status.append({
|
||||
"id": item.pk,
|
||||
"title": item.title,
|
||||
"status": "replaced",
|
||||
"error": "Not downloaded in time",
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Emergency replacement failed for airing {airing.id}: {e}")
|
||||
continue
|
||||
|
||||
for item in youtube_items.values():
|
||||
# Skip if already cached
|
||||
if item.cached_file_path and pathlib.Path(item.cached_file_path).exists():
|
||||
already_cached += 1
|
||||
items_status.append({
|
||||
"id": item.pk,
|
||||
"title": item.title,
|
||||
"status": "cached",
|
||||
"path": item.cached_file_path,
|
||||
})
|
||||
continue
|
||||
|
||||
try:
|
||||
local_path = download_for_airing(item)
|
||||
downloaded += 1
|
||||
items_status.append({
|
||||
"id": item.pk,
|
||||
"title": item.title,
|
||||
"status": "downloaded",
|
||||
"path": str(local_path),
|
||||
})
|
||||
except Exception as exc:
|
||||
failed += 1
|
||||
items_status.append({
|
||||
"id": item.pk,
|
||||
"title": item.title,
|
||||
"status": "failed",
|
||||
"error": str(exc),
|
||||
})
|
||||
logger.error("download_for_airing(%s) failed: %s", item.pk, exc)
|
||||
# Otherwise, attempt download normally
|
||||
try:
|
||||
local_path = download_for_airing(item)
|
||||
downloaded += 1
|
||||
items_status.append({
|
||||
"id": item.pk,
|
||||
"title": item.title,
|
||||
"status": "downloaded",
|
||||
"path": str(local_path),
|
||||
})
|
||||
except Exception as exc:
|
||||
failed += 1
|
||||
items_status.append({
|
||||
"id": item.pk,
|
||||
"title": item.title,
|
||||
"status": "failed",
|
||||
"error": str(exc),
|
||||
})
|
||||
logger.error("download_for_airing(%s) failed: %s", item.pk, exc)
|
||||
|
||||
logger.info(
|
||||
"run_cache(hours=%d): pruned=%d downloaded=%d cached=%d failed=%d",
|
||||
|
||||
@@ -46,7 +46,7 @@ class ScheduleGenerator:
|
||||
return 0
|
||||
|
||||
target_weekday_bit = 1 << target_date.weekday()
|
||||
blocks = template.scheduleblock_set.all()
|
||||
blocks = template.scheduleblock_set.all().order_by('start_local_time')
|
||||
airings_created = 0
|
||||
|
||||
for block in blocks:
|
||||
@@ -60,7 +60,7 @@ class ScheduleGenerator:
|
||||
if end_dt <= start_dt:
|
||||
end_dt += timedelta(days=1)
|
||||
|
||||
# Clear existing airings in this window (idempotency)
|
||||
# Clear existing airings whose start time is within this block's window
|
||||
Airing.objects.filter(
|
||||
channel=self.channel,
|
||||
starts_at__gte=start_dt,
|
||||
@@ -71,8 +71,18 @@ class ScheduleGenerator:
|
||||
if not available_items:
|
||||
continue
|
||||
|
||||
# Prevent overlaps: ensure we don't start before the end of the previous block's overrun
|
||||
latest_prior_airing = Airing.objects.filter(
|
||||
channel=self.channel,
|
||||
starts_at__lt=start_dt
|
||||
).order_by('-ends_at').first()
|
||||
|
||||
actual_start_dt = start_dt
|
||||
if latest_prior_airing and latest_prior_airing.ends_at > start_dt:
|
||||
actual_start_dt = latest_prior_airing.ends_at
|
||||
|
||||
airings_created += self._fill_block(
|
||||
template, block, start_dt, end_dt, available_items
|
||||
template, block, actual_start_dt, end_dt, available_items
|
||||
)
|
||||
|
||||
return airings_created
|
||||
@@ -88,14 +98,20 @@ class ScheduleGenerator:
|
||||
).order_by('-priority')
|
||||
return qs.first()
|
||||
|
||||
def _get_weighted_items(self, block: ScheduleBlock) -> list:
|
||||
def _get_weighted_items(self, block: ScheduleBlock, require_downloaded: bool = False) -> list:
|
||||
"""
|
||||
Build a weighted pool of MediaItems respecting ChannelSourceRule.
|
||||
|
||||
If require_downloaded is True, strictly exclude items from YouTube sources
|
||||
that have not yet been downloaded (cached_file_path is null).
|
||||
|
||||
Returns a flat list with items duplicated according to their effective
|
||||
weight (rounded to nearest int, min 1) so random.choice() gives the
|
||||
right probability distribution without needing numpy.
|
||||
"""
|
||||
if block.block_type == ScheduleBlock.BlockType.OFF_AIR:
|
||||
return []
|
||||
|
||||
rules = list(
|
||||
ChannelSourceRule.objects.filter(channel=self.channel)
|
||||
.select_related('media_source')
|
||||
@@ -109,6 +125,10 @@ class ScheduleGenerator:
|
||||
source_weights: dict[int, float] = {}
|
||||
|
||||
for rule in rules:
|
||||
# If a rule has a label, it only applies if this block's name matches
|
||||
if rule.schedule_block_label and rule.schedule_block_label != block.name:
|
||||
continue
|
||||
|
||||
sid = rule.media_source_id
|
||||
mode = rule.rule_mode
|
||||
w = float(rule.weight or 1.0)
|
||||
@@ -148,6 +168,14 @@ class ScheduleGenerator:
|
||||
if block.default_genre:
|
||||
base_qs = base_qs.filter(genres=block.default_genre)
|
||||
|
||||
# Enforce downloaded requirement for emergency replacements
|
||||
if require_downloaded:
|
||||
from django.db.models import Q
|
||||
from core.services.youtube import YOUTUBE_SOURCE_TYPES
|
||||
base_qs = base_qs.exclude(
|
||||
Q(media_source__source_type__in=YOUTUBE_SOURCE_TYPES) & Q(cached_file_path__isnull=True)
|
||||
)
|
||||
|
||||
items = list(base_qs)
|
||||
if not items:
|
||||
return []
|
||||
@@ -208,3 +236,51 @@ class ScheduleGenerator:
|
||||
created += 1
|
||||
|
||||
return created
|
||||
|
||||
def replace_undownloaded_airings(self, airings: list[Airing]):
|
||||
"""
|
||||
Takes a list of specific Airings that failed to download or are
|
||||
too close to airtime without a valid cache file. Replaces the
|
||||
underlying media_item with one guaranteed to be playable, and
|
||||
ripple-shifts all following airings on the channel by the duration diff.
|
||||
"""
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
for original_airing in airings:
|
||||
# 1. Fetch available downloaded items for this block
|
||||
safe_items = self._get_weighted_items(original_airing.schedule_block, require_downloaded=True)
|
||||
if not safe_items:
|
||||
logger.error(f"Cannot replace airing {original_airing.id}: No downloaded items available for block {original_airing.schedule_block.name}")
|
||||
continue
|
||||
|
||||
|
||||
# 2. Pick a random valid fallback item
|
||||
fallback_item = random.choice(safe_items)
|
||||
old_duration = original_airing.ends_at - original_airing.starts_at
|
||||
|
||||
# Update the original airing to reference the new item
|
||||
original_airing.media_item = fallback_item
|
||||
original_airing.source_reason = 'recovery'
|
||||
|
||||
new_duration = timedelta(seconds=max(fallback_item.runtime_seconds or 1800, 1))
|
||||
original_airing.ends_at = original_airing.starts_at + new_duration
|
||||
original_airing.save(update_fields=['media_item', 'source_reason', 'ends_at'])
|
||||
|
||||
logger.info(f"Replaced airing {original_airing.id} with '{fallback_item.title}' (diff: {new_duration - old_duration})")
|
||||
|
||||
# 3. Ripple shift downstream airings accurately
|
||||
delta = new_duration - old_duration
|
||||
|
||||
if delta.total_seconds() != 0:
|
||||
# Find all airings strictly after this one on the same channel
|
||||
downstream = Airing.objects.filter(
|
||||
channel=self.channel,
|
||||
starts_at__gte=original_airing.starts_at + old_duration
|
||||
).exclude(id=original_airing.id).order_by('starts_at')
|
||||
|
||||
# Apply shift
|
||||
for later_airing in downstream:
|
||||
later_airing.starts_at += delta
|
||||
later_airing.ends_at += delta
|
||||
later_airing.save(update_fields=['starts_at', 'ends_at'])
|
||||
|
||||
@@ -238,7 +238,36 @@ def download_for_airing(media_item: MediaItem) -> Path:
|
||||
|
||||
# Persist the cache location on the model
|
||||
media_item.cached_file_path = str(downloaded_path)
|
||||
media_item.save(update_fields=["cached_file_path"])
|
||||
|
||||
# Extract exact runtime from the cached file using ffprobe-static via Node.js
|
||||
import subprocess
|
||||
import json
|
||||
|
||||
exact_duration = None
|
||||
try:
|
||||
# Resolve ffprobe path from the npm package
|
||||
node_cmd = ["node", "-e", "console.log(require('ffprobe-static').path)"]
|
||||
result = subprocess.run(node_cmd, capture_output=True, text=True, check=True)
|
||||
ffprobe_cmd = result.stdout.strip()
|
||||
|
||||
probe_cmd = [
|
||||
ffprobe_cmd,
|
||||
"-v", "error",
|
||||
"-show_entries", "format=duration",
|
||||
"-of", "default=noprint_wrappers=1:nokey=1",
|
||||
str(downloaded_path)
|
||||
]
|
||||
probe_result = subprocess.run(probe_cmd, capture_output=True, text=True, check=True)
|
||||
exact_duration = float(probe_result.stdout.strip())
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to extract exact runtime for {video_id} using ffprobe: {e}")
|
||||
|
||||
if exact_duration:
|
||||
# Round up to nearest integer to be safe on bounds
|
||||
import math
|
||||
media_item.runtime_seconds = int(math.ceil(exact_duration))
|
||||
|
||||
media_item.save(update_fields=["cached_file_path", "runtime_seconds"])
|
||||
|
||||
logger.info("downloaded %s -> %s", video_id, downloaded_path)
|
||||
logger.info("downloaded %s -> %s (exact runtime: %s)", video_id, downloaded_path, exact_duration)
|
||||
return downloaded_path
|
||||
|
||||
@@ -34,6 +34,13 @@ export const deleteTemplate = async (id) => { await apiClient.delete(`/schedule/
|
||||
export const generateScheduleToday = async (channelId) =>
|
||||
(await apiClient.post(`/schedule/generate-today/${channelId}`)).data;
|
||||
|
||||
export const fetchTemplateBlocks = async (templateId) =>
|
||||
(await apiClient.get(`/schedule/template/${templateId}/blocks`)).data;
|
||||
export const createTemplateBlock = async (payload) =>
|
||||
(await apiClient.post('/schedule/block/', payload)).data;
|
||||
export const deleteTemplateBlock = async (blockId) =>
|
||||
(await apiClient.delete(`/schedule/block/${blockId}`)).data;
|
||||
|
||||
// Legacy – used by guide
|
||||
export const fetchScheduleGenerations = async (channelId) =>
|
||||
(await apiClient.post(`/schedule/generate/${channelId}`)).data;
|
||||
|
||||
@@ -142,6 +142,24 @@ export default function ChannelTuner({ onOpenGuide }) {
|
||||
muted={!isCurrent}
|
||||
loop
|
||||
playsInline
|
||||
onLoadedMetadata={(e) => {
|
||||
const video = e.target;
|
||||
if (currentAiring && currentAiring.starts_at) {
|
||||
const startTime = new Date(currentAiring.starts_at).getTime();
|
||||
const nowTime = Date.now();
|
||||
|
||||
if (nowTime > startTime) {
|
||||
const offsetSeconds = (nowTime - startTime) / 1000;
|
||||
// If the video is shorter than the offset (e.g. repeating a short clip),
|
||||
// modulo the offset by duration to emulate a continuous loop.
|
||||
if (video.duration && video.duration > 0) {
|
||||
video.currentTime = offsetSeconds % video.duration;
|
||||
} else {
|
||||
video.currentTime = offsetSeconds;
|
||||
}
|
||||
}
|
||||
}
|
||||
}}
|
||||
onError={(e) => {
|
||||
if (e.target.src !== chan.fallbackFile) {
|
||||
console.warn(`Video failed to load: ${e.target.src}, falling back.`);
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
fetchChannels, createChannel, deleteChannel, updateChannel,
|
||||
fetchChannelSources, assignSourceToChannel, removeSourceFromChannel,
|
||||
fetchTemplates, createTemplate, deleteTemplate, generateScheduleToday,
|
||||
fetchTemplateBlocks, createTemplateBlock, deleteTemplateBlock,
|
||||
fetchSources, createSource, syncSource, deleteSource,
|
||||
fetchLibraries,
|
||||
fetchDownloadStatus, triggerCacheUpcoming, downloadItem,
|
||||
@@ -156,7 +157,7 @@ function ChannelsTab() {
|
||||
const [channelSources, setChannelSources] = useState({}); // { channelId: [rules] }
|
||||
const [showForm, setShowForm] = useState(false);
|
||||
const [form, setForm] = useState({ name: '', slug: '', channel_number: '', description: '', library_id: '', owner_user_id: '' });
|
||||
const [assignForm, setAssignForm] = useState({ source_id: '', rule_mode: 'allow', weight: 1.0 });
|
||||
const [assignForm, setAssignForm] = useState({ source_id: '', rule_mode: 'allow', weight: 1.0, schedule_block_label: '' });
|
||||
const [syncingId, setSyncingId] = useState(null);
|
||||
const [feedback, setFeedback, ok, err] = useFeedback();
|
||||
|
||||
@@ -210,9 +211,10 @@ function ChannelsTab() {
|
||||
source_id: parseInt(assignForm.source_id),
|
||||
rule_mode: assignForm.rule_mode,
|
||||
weight: parseFloat(assignForm.weight),
|
||||
schedule_block_label: assignForm.schedule_block_label || null,
|
||||
});
|
||||
setChannelSources(cs => ({ ...cs, [channelId]: [...(cs[channelId] || []), rule] }));
|
||||
setAssignForm({ source_id: '', rule_mode: 'allow', weight: 1.0 });
|
||||
setAssignForm({ source_id: '', rule_mode: 'allow', weight: 1.0, schedule_block_label: '' });
|
||||
ok('Source assigned to channel.');
|
||||
} catch { err('Failed to assign source.'); }
|
||||
};
|
||||
@@ -367,6 +369,13 @@ function ChannelsTab() {
|
||||
style={{ width: 60 }}
|
||||
title="Weight (higher = more airings)"
|
||||
/>
|
||||
<input
|
||||
placeholder="Target Block Label (Optional)"
|
||||
value={assignForm.schedule_block_label}
|
||||
onChange={e => setAssignForm(f => ({ ...f, schedule_block_label: e.target.value }))}
|
||||
style={{ flex: 1 }}
|
||||
title="If set, this source will ONLY play during blocks with this exact name"
|
||||
/>
|
||||
<button className="btn-sync sm" onClick={() => handleAssign(ch.id)}>+ Add Rule</button>
|
||||
</div>
|
||||
|
||||
@@ -690,12 +699,35 @@ function SchedulingTab() {
|
||||
const [form, setForm] = useState({ name: '', channel_id: '', timezone_name: 'America/New_York', priority: 10 });
|
||||
const [feedback, setFeedback, ok, err] = useFeedback();
|
||||
|
||||
// Block Editor State
|
||||
const [expandedTmplId, setExpandedTmplId] = useState(null);
|
||||
const [templateBlocks, setTemplateBlocks] = useState({}); // { tmplId: [blocks] }
|
||||
const [blockForm, setBlockForm] = useState({
|
||||
name: 'A Block',
|
||||
block_type: 'PROGRAM',
|
||||
start_local_time: '08:00',
|
||||
end_local_time: '12:00',
|
||||
day_of_week_mask: 127,
|
||||
target_content_rating: ''
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
Promise.all([fetchTemplates(), fetchChannels()])
|
||||
.then(([t, c]) => { setTemplates(t); setChannels(c); })
|
||||
.catch(() => err('Failed to load schedule data'));
|
||||
}, []);
|
||||
|
||||
const toggleExpand = async (tmpl) => {
|
||||
const next = expandedTmplId === tmpl.id ? null : tmpl.id;
|
||||
setExpandedTmplId(next);
|
||||
if (next && !templateBlocks[next]) {
|
||||
try {
|
||||
const blocks = await fetchTemplateBlocks(tmpl.id);
|
||||
setTemplateBlocks(tb => ({ ...tb, [tmpl.id]: blocks }));
|
||||
} catch { err('Failed to load blocks'); }
|
||||
}
|
||||
};
|
||||
|
||||
const handleCreate = async (e) => {
|
||||
e.preventDefault();
|
||||
try {
|
||||
@@ -780,25 +812,105 @@ function SchedulingTab() {
|
||||
|
||||
<div className="settings-row-list">
|
||||
{templates.length === 0 && <EmptyState text="No schedule templates yet. Create one above." />}
|
||||
{templates.map(t => (
|
||||
<div key={t.id} className="settings-row">
|
||||
<div className="row-avatar" style={{ fontSize: '1.2rem' }}>📄</div>
|
||||
<div className="row-info">
|
||||
<strong>{t.name}</strong>
|
||||
<span className="row-sub">{channelName(t.channel_id)} · {t.timezone_name}</span>
|
||||
<span className="row-badges">
|
||||
<span className="badge badge-type">Priority {t.priority}</span>
|
||||
{t.is_active
|
||||
? <span className="badge badge-ok">Active</span>
|
||||
: <span className="badge badge-muted">Inactive</span>
|
||||
}
|
||||
</span>
|
||||
{templates.map(t => {
|
||||
const isExpanded = expandedTmplId === t.id;
|
||||
const blocks = templateBlocks[t.id] || [];
|
||||
return (
|
||||
<div key={t.id} className={`settings-row-expandable ${isExpanded ? 'expanded' : ''}`}>
|
||||
<div className="settings-row" onClick={() => toggleExpand(t)}>
|
||||
<div className="row-avatar" style={{ fontSize: '1.2rem' }}>📄</div>
|
||||
<div className="row-info">
|
||||
<strong>{t.name}</strong>
|
||||
<span className="row-sub">{channelName(t.channel_id)} · {t.timezone_name}</span>
|
||||
<span className="row-badges">
|
||||
<span className="badge badge-type">Priority {t.priority}</span>
|
||||
{t.is_active
|
||||
? <span className="badge badge-ok">Active</span>
|
||||
: <span className="badge badge-muted">Inactive</span>
|
||||
}
|
||||
</span>
|
||||
</div>
|
||||
<div className="row-actions" onClick={e => e.stopPropagation()}>
|
||||
<IconBtn icon="🗑" kind="danger" onClick={() => handleDelete(t)} />
|
||||
<span className="expand-chevron">{isExpanded ? '▲' : '▼'}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{isExpanded && (
|
||||
<div className="channel-expand-panel block-editor" style={{ background: 'rgba(0,0,0,0.1)', borderTop: 'none', padding: '1rem', borderBottomLeftRadius: '6px', borderBottomRightRadius: '6px' }}>
|
||||
<h4 style={{ margin: '0 0 1rem 0', opacity: 0.9 }}>Schedule Blocks</h4>
|
||||
|
||||
{blocks.length === 0 && (
|
||||
<div style={{ fontSize: '0.9rem', opacity: 0.7, marginBottom: '1rem' }}>
|
||||
No blocks defined. By default, PYTV acts as if there is a single 24/7 block. If you define blocks here, you must completely cover the 24 hours of a day to avoid dead air.
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: '0.5rem', marginBottom: '1.5rem' }}>
|
||||
{blocks.map(b => (
|
||||
<div key={b.id} style={{ display: 'flex', gap: '0.5rem', background: '#353b48', padding: '0.5rem', borderRadius: '4px', alignItems: 'center', fontSize: '0.9rem' }}>
|
||||
<strong style={{ minWidth: 100 }}>{b.name}</strong>
|
||||
<span style={{ fontFamily: 'monospace', opacity: 0.8 }}>{b.start_local_time.slice(0,5)} - {b.end_local_time.slice(0,5)}</span>
|
||||
<span className={`badge ${b.block_type === 'OFF_AIR' ? 'badge-warn' : 'badge-ok'}`}>{b.block_type}</span>
|
||||
{b.target_content_rating && <span className="badge badge-type">Rating Tier: {b.target_content_rating}</span>}
|
||||
<div style={{ flex: 1 }} />
|
||||
<IconBtn icon="✕" kind="danger" onClick={async () => {
|
||||
try {
|
||||
await deleteTemplateBlock(b.id);
|
||||
setTemplateBlocks(tb => ({ ...tb, [t.id]: tb[t.id].filter(x => x.id !== b.id) }));
|
||||
ok('Block deleted.');
|
||||
} catch { err('Failed to delete block.'); }
|
||||
}} />
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<form className="assign-form" style={{ background: '#2f3640' }} onSubmit={async (e) => {
|
||||
e.preventDefault();
|
||||
if (!blockForm.name || !blockForm.start_local_time || !blockForm.end_local_time) { err('Fill req fields'); return; }
|
||||
try {
|
||||
const nb = await createTemplateBlock({
|
||||
schedule_template_id: t.id,
|
||||
name: blockForm.name,
|
||||
block_type: blockForm.block_type,
|
||||
start_local_time: blockForm.start_local_time,
|
||||
end_local_time: blockForm.end_local_time,
|
||||
day_of_week_mask: parseInt(blockForm.day_of_week_mask),
|
||||
target_content_rating: blockForm.target_content_rating ? parseInt(blockForm.target_content_rating) : null,
|
||||
});
|
||||
setTemplateBlocks(tb => ({ ...tb, [t.id]: [...(tb[t.id] || []), nb] }));
|
||||
ok('Block created.');
|
||||
} catch { err('Failed to create block'); }
|
||||
}}>
|
||||
<div style={{ display: 'flex', gap: '0.5rem', alignItems: 'center' }}>
|
||||
<input placeholder="Block Name (e.g. Morning News)" required style={{ flex: 1 }} value={blockForm.name} onChange={e => setBlockForm(f => ({...f, name: e.target.value}))} />
|
||||
<select value={blockForm.block_type} onChange={e => setBlockForm(f => ({...f, block_type: e.target.value}))}>
|
||||
<option value="PROGRAM">Programming</option>
|
||||
<option value="OFF_AIR">Off Air / Dead Time</option>
|
||||
</select>
|
||||
<input type="time" required value={blockForm.start_local_time} onChange={e => setBlockForm(f => ({...f, start_local_time: e.target.value}))} />
|
||||
<span style={{ opacity: 0.5 }}>to</span>
|
||||
<input type="time" required value={blockForm.end_local_time} onChange={e => setBlockForm(f => ({...f, end_local_time: e.target.value}))} />
|
||||
</div>
|
||||
<div style={{ display: 'flex', gap: '0.5rem', alignItems: 'center', marginTop: '0.5rem' }}>
|
||||
<select value={blockForm.target_content_rating} onChange={e => setBlockForm(f => ({...f, target_content_rating: e.target.value}))}>
|
||||
<option value="">Any content rating</option>
|
||||
<option value="1">TV-Y</option>
|
||||
<option value="2">TV-Y7</option>
|
||||
<option value="3">TV-G</option>
|
||||
<option value="4">TV-PG</option>
|
||||
<option value="5">TV-14</option>
|
||||
<option value="6">TV-MA</option>
|
||||
</select>
|
||||
<button type="submit" className="btn-sync sm">+ Add Block</button>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div className="row-actions">
|
||||
<IconBtn icon="🗑" kind="danger" onClick={() => handleDelete(t)} />
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
103
tests/test_channel_signaling.py
Normal file
103
tests/test_channel_signaling.py
Normal file
@@ -0,0 +1,103 @@
|
||||
import pytest
|
||||
import os
|
||||
from datetime import timedelta
|
||||
from django.utils import timezone
|
||||
from core.models import Channel, MediaSource, MediaItem, Airing
|
||||
from api.routers.channel import channel_now_playing
|
||||
from django.conf import settings
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_channel_signaling_now_playing():
|
||||
"""
|
||||
Test what file should be playing on a channel, and whether the API
|
||||
response correctly signals an existing local file or fallback.
|
||||
"""
|
||||
channel = Channel.objects.first()
|
||||
if not channel:
|
||||
pytest.skip("No channels found in test database to test signaling")
|
||||
|
||||
# Clear current airings to set up a controlled test
|
||||
Airing.objects.filter(channel=channel).delete()
|
||||
|
||||
source = MediaSource.objects.first()
|
||||
|
||||
# 1. Test a fully downloaded video
|
||||
good_item = MediaItem.objects.create(
|
||||
title="Valid Cached Video",
|
||||
media_source=source,
|
||||
cached_file_path=os.path.join(settings.MEDIA_ROOT, "test_valid.mp4"),
|
||||
runtime_seconds=600,
|
||||
is_active=True
|
||||
)
|
||||
|
||||
# Create a dummy file to simulate it existing
|
||||
os.makedirs(settings.MEDIA_ROOT, exist_ok=True)
|
||||
with open(good_item.cached_file_path, 'w') as f:
|
||||
f.write("dummy video data")
|
||||
|
||||
now = timezone.now()
|
||||
|
||||
airing = Airing.objects.create(
|
||||
channel=channel,
|
||||
media_item=good_item,
|
||||
starts_at=now - timedelta(minutes=5),
|
||||
ends_at=now + timedelta(minutes=5),
|
||||
slot_kind="program"
|
||||
)
|
||||
|
||||
# Call the API function
|
||||
response = channel_now_playing(None, channel.id)
|
||||
|
||||
# Assertions
|
||||
assert response is not None, "API should return an airing"
|
||||
assert response.media_item_title == "Valid Cached Video"
|
||||
assert response.media_item_path is not None, "A valid path must be returned"
|
||||
|
||||
# Check if the returned path maps to a real file
|
||||
if not response.media_item_path.startswith("http"):
|
||||
# The API returns a URL path like /media/..., we need to strip /media to get rel_path
|
||||
rel_path = response.media_item_path.replace(settings.MEDIA_URL, "")
|
||||
fs_path = os.path.join(settings.MEDIA_ROOT, rel_path)
|
||||
assert os.path.exists(fs_path), f"Signaled file {fs_path} does not actually exist!"
|
||||
|
||||
# Cleanup
|
||||
if os.path.exists(good_item.cached_file_path):
|
||||
os.remove(good_item.cached_file_path)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_channel_signaling_youtube_raw_url():
|
||||
"""
|
||||
Test what happens if the video is NOT downloaded and only has a raw YouTube URL.
|
||||
This demonstrates the bug where the frontend fails to play it.
|
||||
"""
|
||||
channel = Channel.objects.first()
|
||||
if not channel:
|
||||
pytest.skip("No channels found")
|
||||
|
||||
Airing.objects.filter(channel=channel).delete()
|
||||
source = MediaSource.objects.filter(source_type__icontains='youtube').first()
|
||||
if not source:
|
||||
pytest.skip("No youtube source found")
|
||||
|
||||
raw_item = MediaItem.objects.create(
|
||||
title="Uncached YouTube URL",
|
||||
media_source=source,
|
||||
file_path="https://www.youtube.com/watch?v=dQw4w9WgXcQ",
|
||||
cached_file_path=None,
|
||||
runtime_seconds=600,
|
||||
is_active=True
|
||||
)
|
||||
|
||||
now = timezone.now()
|
||||
Airing.objects.create(
|
||||
channel=channel,
|
||||
media_item=raw_item,
|
||||
starts_at=now - timedelta(minutes=5),
|
||||
ends_at=now + timedelta(minutes=5),
|
||||
slot_kind="program"
|
||||
)
|
||||
|
||||
response = channel_now_playing(None, channel.id)
|
||||
assert response is not None
|
||||
assert response.media_item_path == "https://www.youtube.com/watch?v=dQw4w9WgXcQ"
|
||||
print(f"\\nWARNING: Backend signaled {response.media_item_path} which HTML5 <video> CANNOT play directly.")
|
||||
Reference in New Issue
Block a user