feat(main): main

This commit is contained in:
2026-03-10 08:39:28 -04:00
parent b1a93161c0
commit af3076342a
18 changed files with 826 additions and 38 deletions

BIN
.coverage

Binary file not shown.

View File

@@ -58,13 +58,26 @@ class AiringSchema(Schema):
ends_at: datetime
slot_kind: str
status: str
exact_playback_offset_seconds: float = 0.0
@staticmethod
def from_airing(airing) -> 'AiringSchema':
media_path = None
exact_offset = 0.0
# Calculate exactly how far into the video we should be right now
now = timezone.now()
# if the airing hasn't started yet, offset is 0
if now >= airing.starts_at:
exact_offset = (now - airing.starts_at).total_seconds()
if airing.media_item:
item = airing.media_item
# If the item has a known runtime, and we are looping it, modulo the offset
if item.runtime_seconds and item.runtime_seconds > 0:
exact_offset = exact_offset % item.runtime_seconds
# 1. Determine if this item is from a YouTube source
is_youtube = False
if item.media_source and item.media_source.source_type in ['youtube', 'youtube_channel', 'youtube_playlist']:
@@ -102,6 +115,7 @@ class AiringSchema(Schema):
ends_at=airing.ends_at,
slot_kind=airing.slot_kind,
status=airing.status,
exact_playback_offset_seconds=max(0.0, exact_offset)
)
@router.get("/", response=List[ChannelSchema])
@@ -112,7 +126,61 @@ def list_channels(request):
)
class ChannelStatusSchema(Schema):
total_upcoming_airings: int
total_cached_airings: int
percent_cached: float
missing_items: List[dict]
@router.get("/{channel_id}/status", response=ChannelStatusSchema)
def get_channel_status(request, channel_id: int):
channel = get_object_or_404(Channel, id=channel_id)
now = timezone.now()
window_end = now + timedelta(hours=24)
airings = Airing.objects.filter(
channel=channel,
ends_at__gt=now,
starts_at__lte=window_end
).select_related('media_item')
total = 0
cached = 0
missing = []
for a in airings:
total += 1
item = a.media_item
if item and item.cached_file_path:
# We don't do path.exists() here to keep it fast, but we could.
cached += 1
elif item:
missing.append({
"id": item.id,
"title": item.title,
"starts_at": a.starts_at.isoformat()
})
pct = (cached / total * 100.0) if total > 0 else 100.0
return {
"total_upcoming_airings": total,
"total_cached_airings": cached,
"percent_cached": pct,
"missing_items": missing
}
@router.post("/{channel_id}/download")
def trigger_channel_download(request, channel_id: int):
get_object_or_404(Channel, id=channel_id)
from core.services.cache import run_cache
# Run cache explicitly for this channel for the next 24 hours
result = run_cache(hours=24, prune_only=False, channel_id=channel_id)
return result
@router.get("/{channel_id}", response=ChannelSchema)
def get_channel(request, channel_id: int):
return get_object_or_404(Channel, id=channel_id)

View File

@@ -41,6 +41,10 @@ class MediaSourceIn(BaseModel):
uri: str
is_active: bool = True
scan_interval_minutes: Optional[int] = None
min_video_length_seconds: Optional[int] = None
max_video_length_seconds: Optional[int] = None
min_repeat_gap_hours: Optional[int] = None
max_age_days: Optional[int] = None
class MediaSourceOut(BaseModel):
id: int
@@ -50,6 +54,10 @@ class MediaSourceOut(BaseModel):
uri: str
is_active: bool
scan_interval_minutes: Optional[int]
min_video_length_seconds: Optional[int]
max_video_length_seconds: Optional[int]
min_repeat_gap_hours: Optional[int]
max_age_days: Optional[int]
last_scanned_at: Optional[datetime]
created_at: datetime
@@ -140,6 +148,23 @@ def delete_source(request, source_id: int):
source.delete()
return 204, None
@router.put("/{source_id}", response=MediaSourceOut)
def update_source(request, source_id: int, payload: MediaSourceIn):
"""Update an existing media source."""
source = get_object_or_404(MediaSource, id=source_id)
source.name = payload.name
source.source_type = payload.source_type
source.uri = payload.uri
source.library_id = payload.library_id
source.is_active = payload.is_active
source.scan_interval_minutes = payload.scan_interval_minutes
source.min_video_length_seconds = payload.min_video_length_seconds
source.max_video_length_seconds = payload.max_video_length_seconds
source.min_repeat_gap_hours = payload.min_repeat_gap_hours
source.max_age_days = payload.max_age_days
source.save()
return source
@router.post("/{source_id}/sync", response=SyncResult)
def trigger_sync(request, source_id: int, max_videos: Optional[int] = None):

View File

@@ -1,3 +1,71 @@
from django.shortcuts import render
import os
import re
from django.conf import settings
from django.http import StreamingHttpResponse, Http404, HttpResponseNotModified, FileResponse
from django.views.static import was_modified_since
from wsgiref.util import FileWrapper
# Create your views here.
def serve_video_with_range(request, path):
"""
Serve a media file with HTTP Range support. Required for HTML5 video
seeking in Chrome/Safari using the Django development server.
"""
clean_path = path.lstrip('/')
full_path = os.path.normpath(os.path.join(settings.MEDIA_ROOT, clean_path))
# Security check to prevent directory traversal
if not full_path.startswith(os.path.normpath(settings.MEDIA_ROOT)):
raise Http404("Invalid path")
if not os.path.exists(full_path):
raise Http404(f"File {path} not found")
statobj = os.stat(full_path)
size = statobj.st_size
# Very simple content type mapping for videos
content_type = "video/mp4"
if full_path.endswith('.webm'): content_type = "video/webm"
elif full_path.endswith('.mkv'): content_type = "video/x-matroska"
elif full_path.endswith('.svg'): content_type = "image/svg+xml"
elif full_path.endswith('.png'): content_type = "image/png"
elif full_path.endswith('.jpg') or full_path.endswith('.jpeg'): content_type = "image/jpeg"
range_header = request.META.get('HTTP_RANGE', '').strip()
if range_header.startswith('bytes='):
range_match = re.match(r'bytes=(\d+)-(\d*)', range_header)
if range_match:
first_byte, last_byte = range_match.groups()
first_byte = int(first_byte)
last_byte = int(last_byte) if last_byte else size - 1
if last_byte >= size:
last_byte = size - 1
length = last_byte - first_byte + 1
def file_iterator(file_path, offset=0, bytes_to_read=None):
with open(file_path, 'rb') as f:
f.seek(offset)
remaining = bytes_to_read
while remaining > 0:
chunk_size = min(8192, remaining)
data = f.read(chunk_size)
if not data:
break
yield data
remaining -= len(data)
resp = StreamingHttpResponse(
file_iterator(full_path, offset=first_byte, bytes_to_read=length),
status=206,
content_type=content_type
)
resp['Content-Range'] = f'bytes {first_byte}-{last_byte}/{size}'
resp['Content-Length'] = str(length)
resp['Accept-Ranges'] = 'bytes'
return resp
# Fallback to standard 200 FileResponse if no range
resp = FileResponse(open(full_path, 'rb'), content_type=content_type)
resp['Content-Length'] = str(size)
resp['Accept-Ranges'] = 'bytes'
return resp

View File

@@ -0,0 +1,33 @@
# Generated by Django 6.0.3 on 2026-03-09 18:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("core", "0004_channel_fallback_collection"),
]
operations = [
migrations.AddField(
model_name="mediasource",
name="max_age_days",
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name="mediasource",
name="max_video_length_seconds",
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name="mediasource",
name="min_repeat_gap_hours",
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name="mediasource",
name="min_video_length_seconds",
field=models.IntegerField(blank=True, null=True),
),
]

View File

@@ -95,6 +95,13 @@ class MediaSource(models.Model):
is_active = models.BooleanField(default=True)
scan_interval_minutes = models.IntegerField(blank=True, null=True)
last_scanned_at = models.DateTimeField(blank=True, null=True)
# Source Rules
min_video_length_seconds = models.IntegerField(blank=True, null=True)
max_video_length_seconds = models.IntegerField(blank=True, null=True)
min_repeat_gap_hours = models.IntegerField(blank=True, null=True)
max_age_days = models.IntegerField(blank=True, null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)

View File

@@ -16,11 +16,13 @@ from core.services.youtube import download_for_airing, YOUTUBE_SOURCE_TYPES
logger = logging.getLogger(__name__)
def run_cache(hours: int = 24, prune_only: bool = False) -> dict:
def run_cache(hours: int = 24, prune_only: bool = False, channel_id: int | None = None) -> dict:
"""
Scan Airings in the next `hours` hours, download any uncached YouTube
videos, and prune stale local files.
If `channel_id` is provided, only process airings for that specific channel.
Returns a summary dict suitable for JSON serialization.
"""
now = timezone.now()
@@ -33,11 +35,11 @@ def run_cache(hours: int = 24, prune_only: bool = False) -> dict:
return {"pruned": pruned, "downloaded": 0, "already_cached": 0, "failed": 0, "items": []}
# ── Find upcoming and currently playing YouTube-backed airings ──────────
upcoming = (
Airing.objects
.filter(ends_at__gt=now, starts_at__lte=window_end)
.select_related("media_item__media_source")
)
qs = Airing.objects.filter(ends_at__gt=now, starts_at__lte=window_end)
if channel_id is not None:
qs = qs.filter(channel_id=channel_id)
upcoming = qs.select_related("media_item__media_source")
youtube_items: dict[int, MediaItem] = {}
downloaded = already_cached = failed = 0

View File

@@ -57,6 +57,24 @@ class ScheduleGenerator:
blocks = template.scheduleblock_set.all().order_by('start_local_time')
airings_created = 0
# Build last_played mapping for the repeat gap
from core.models import ChannelSourceRule
rules = ChannelSourceRule.objects.filter(channel=self.channel).select_related('media_source')
max_gap_hours = 0
for rule in rules:
if rule.media_source and rule.media_source.min_repeat_gap_hours:
max_gap_hours = max(max_gap_hours, rule.media_source.min_repeat_gap_hours)
last_played_times = {}
if max_gap_hours > 0:
past_dt = datetime.combine(target_date, datetime.min.time(), tzinfo=local_tz).astimezone(timezone.utc) - timedelta(hours=max_gap_hours)
past_airings = Airing.objects.filter(
channel=self.channel,
starts_at__gte=past_dt
).order_by('starts_at')
for a in past_airings:
last_played_times[a.media_item_id] = a.starts_at
for block in blocks:
if not (block.day_of_week_mask & target_weekday_bit):
continue
@@ -98,7 +116,7 @@ class ScheduleGenerator:
continue
airings_created += self._fill_block(
template, block, actual_start_dt, end_dt, available_items
template, block, actual_start_dt, end_dt, available_items, last_played_times
)
return airings_created
@@ -220,17 +238,41 @@ class ScheduleGenerator:
start_dt: datetime,
end_dt: datetime,
items: list,
last_played_times: dict[int, datetime] = None,
) -> int:
"""Fill start_dt→end_dt with sequential Airings, cycling through items."""
cursor = start_dt
idx = 0
created = 0
batch = uuid.uuid4()
if last_played_times is None:
last_played_times = {}
while cursor < end_dt:
item = items[idx % len(items)]
# Look ahead to find the first item that respects its cooldown rules
valid_item = None
items_checked = 0
while items_checked < len(items):
candidate = items[idx % len(items)]
idx += 1
items_checked += 1
# Check cooldown gap
gap_hours = candidate.media_source.min_repeat_gap_hours if candidate.media_source else None
if gap_hours:
last_played = last_played_times.get(candidate.id)
if last_played:
if (cursor - last_played).total_seconds() < gap_hours * 3600:
continue # skip, hasn't been long enough
valid_item = candidate
break
if not valid_item:
# If everything in the pool is currently cooling down, fallback to ignoring cooldowns
valid_item = items[(idx - 1) % len(items)]
item = valid_item
duration = timedelta(seconds=max(item.runtime_seconds or 1800, 1))
# Don't let a single item overshoot the end by more than its own length
@@ -250,6 +292,8 @@ class ScheduleGenerator:
generation_batch_uuid=batch,
)
last_played_times[item.id] = cursor
cursor += duration
created += 1

View File

@@ -133,9 +133,31 @@ def sync_source(media_source: MediaSource, max_videos: int | None = None) -> dic
description = entry.get("description") or ""
release_year = None
upload_date = entry.get("upload_date") # "YYYYMMDD"
if upload_date and len(upload_date) >= 4:
# Enforce Source Rules
if media_source.min_video_length_seconds is not None:
if duration < media_source.min_video_length_seconds:
skipped += 1
continue
if media_source.max_video_length_seconds is not None:
if duration > media_source.max_video_length_seconds:
skipped += 1
continue
if upload_date and len(upload_date) >= 8:
try:
release_year = int(upload_date[:4])
year = int(upload_date[0:4])
month = int(upload_date[4:6])
day = int(upload_date[6:8])
from datetime import date
video_date = date(year, month, day)
release_year = year
if media_source.max_age_days is not None:
age_days = (date.today() - video_date).days
if age_days > media_source.max_age_days:
skipped += 1
continue
except ValueError:
pass

View File

@@ -26,6 +26,12 @@ export const removeSourceFromChannel = async (channelId, ruleId) => {
await apiClient.delete(`/channel/${channelId}/sources/${ruleId}`);
};
// Channel Actions
export const fetchChannelStatus = async (channelId) =>
(await apiClient.get(`/channel/${channelId}/status`)).data;
export const triggerChannelDownload = async (channelId) =>
(await apiClient.post(`/channel/${channelId}/download`)).data;
// ── Schedule ──────────────────────────────────────────────────────────────
export const fetchTemplates = async () => (await apiClient.get('/schedule/template/')).data;
export const createTemplate = async (payload) =>
@@ -48,6 +54,7 @@ export const fetchScheduleGenerations = async (channelId) =>
// ── Media Sources (YouTube / local) ───────────────────────────────────────
export const fetchSources = async () => (await apiClient.get('/sources/')).data;
export const createSource = async (payload) => (await apiClient.post('/sources/', payload)).data;
export const updateSource = async (id, payload) => (await apiClient.put(`/sources/${id}`, payload)).data;
export const syncSource = async (sourceId, maxVideos) => {
const url = maxVideos ? `/sources/${sourceId}/sync?max_videos=${maxVideos}` : `/sources/${sourceId}/sync`;
return (await apiClient.post(url)).data;

View File

@@ -144,15 +144,39 @@ export default function ChannelTuner({ onOpenGuide }) {
muted={!isCurrent}
loop
playsInline
ref={(video) => {
if (video && video.readyState >= 1) { // HAVE_METADATA or higher
const currentAiring = nowPlaying[chan.id];
if (currentAiring && video.dataset.airingId !== String(currentAiring.id)) {
video.dataset.airingId = currentAiring.id;
if (currentAiring.exact_playback_offset_seconds !== undefined) {
let offset = currentAiring.exact_playback_offset_seconds;
if (video.duration && video.duration > 0 && !isNaN(video.duration)) {
offset = offset % video.duration;
}
video.currentTime = offset;
}
}
}
}}
onLoadedMetadata={(e) => {
const video = e.target;
const currentAiring = nowPlaying[chan.id];
if (currentAiring?.starts_at) {
if (currentAiring && video.dataset.airingId !== String(currentAiring.id)) {
video.dataset.airingId = currentAiring.id;
if (currentAiring.exact_playback_offset_seconds !== undefined) {
let offset = currentAiring.exact_playback_offset_seconds;
if (video.duration && video.duration > 0 && !isNaN(video.duration)) {
offset = offset % video.duration;
}
video.currentTime = offset;
} else if (currentAiring.starts_at) {
const offsetSeconds = (Date.now() - new Date(currentAiring.starts_at).getTime()) / 1000;
if (offsetSeconds > 0 && video.duration > 0) {
video.currentTime = offsetSeconds % video.duration;
}
}
}
}}
onError={(e) => {
// Replace video with test card on error

View File

@@ -5,9 +5,10 @@ import {
fetchChannelSources, assignSourceToChannel, removeSourceFromChannel,
fetchTemplates, createTemplate, deleteTemplate, generateScheduleToday,
fetchTemplateBlocks, createTemplateBlock, deleteTemplateBlock,
fetchSources, createSource, syncSource, deleteSource,
fetchSources, createSource, syncSource, deleteSource, updateSource,
fetchLibraries, fetchCollections,
fetchDownloadStatus, triggerCacheUpcoming, downloadItem, fetchDownloadProgress,
fetchChannelStatus, triggerChannelDownload,
} from '../api';
// ─── Constants ────────────────────────────────────────────────────────────
@@ -164,10 +165,12 @@ function ChannelsTab() {
const [templateBlocks, setTemplateBlocks] = useState({}); // { templateId: [blocks] }
const [expandedId, setExpandedId] = useState(null);
const [channelSources, setChannelSources] = useState({}); // { channelId: [rules] }
const [channelStatuses, setChannelStatuses] = useState({}); // { channelId: statusData }
const [showForm, setShowForm] = useState(false);
const [form, setForm] = useState({ name: '', slug: '', channel_number: '', description: '', library_id: '', owner_user_id: '' });
const [assignForm, setAssignForm] = useState({ source_id: '', rule_mode: 'allow', weight: 1.0, schedule_block_label: '' });
const [syncingId, setSyncingId] = useState(null);
const [downloadingId, setDownloadingId] = useState(null);
const [feedback, setFeedback, ok, err] = useFeedback();
useEffect(() => {
@@ -185,15 +188,25 @@ function ChannelsTab() {
.catch(() => err('Failed to load channels'));
}, []);
const loadChannelStatus = async (channelId) => {
try {
const status = await fetchChannelStatus(channelId);
setChannelStatuses(prev => ({ ...prev, [channelId]: status }));
} catch { err('Failed to load channel caching status'); }
};
const toggleExpand = async (ch) => {
const next = expandedId === ch.id ? null : ch.id;
setExpandedId(next);
if (next && !channelSources[next]) {
if (next) {
if (!channelSources[next]) {
try {
const rules = await fetchChannelSources(ch.id);
setChannelSources(cs => ({ ...cs, [ch.id]: rules }));
} catch { err('Failed to load channel sources'); }
}
loadChannelStatus(next);
}
};
const handleCreate = async (e) => {
@@ -250,10 +263,21 @@ function ChannelsTab() {
try {
const result = await generateScheduleToday(ch.id);
ok(`Schedule generated for "${ch.name}": ${result.airings_created} airings created.`);
if (expandedId === ch.id) loadChannelStatus(ch.id);
} catch { err('Failed to generate schedule.'); }
finally { setSyncingId(null); }
};
const handleDownload = async (ch) => {
setDownloadingId(ch.id);
try {
await triggerChannelDownload(ch.id);
ok(`Download triggered for "${ch.name}".`);
if (expandedId === ch.id) loadChannelStatus(ch.id);
} catch { err('Failed to trigger download.'); }
finally { setDownloadingId(null); }
};
const handleSetFallback = async (ch, collectionId) => {
try {
const updated = await updateChannel(ch.id, { fallback_collection_id: collectionId ? parseInt(collectionId) : null });
@@ -327,6 +351,14 @@ function ChannelsTab() {
>
{syncingId === ch.id ? '...' : '▶ Schedule'}
</button>
<button
className="btn-sync"
onClick={() => handleDownload(ch)}
disabled={downloadingId === ch.id}
title="Download upcoming airings for this channel"
>
{downloadingId === ch.id ? '...' : '⬇ Download'}
</button>
<IconBtn icon="🗑" kind="danger" onClick={() => handleDelete(ch)} />
<span className="expand-chevron">{isExpanded ? '▲' : '▼'}</span>
</div>
@@ -336,6 +368,23 @@ function ChannelsTab() {
{isExpanded && (
<div className="channel-expand-panel">
{/* ─── Channel Status ──────────────────────────────────── */}
{channelStatuses[ch.id] && (
<div style={{ marginBottom: '1.25rem', padding: '0.75rem', background: 'rgba(59, 130, 246, 0.1)', border: '1px solid rgba(59, 130, 246, 0.3)', borderRadius: '6px' }}>
<div style={{ fontWeight: 600, marginBottom: '0.4rem', color: '#60a5fa' }}>Schedule Status (Next 24 Hours)</div>
<div style={{ display: 'grid', gridTemplateColumns: '1fr 1fr', gap: '0.5rem', fontSize: '0.9rem' }}>
<span><strong>Total Upcoming:</strong> {channelStatuses[ch.id].total_upcoming_airings}</span>
<span><strong>Cached:</strong> {channelStatuses[ch.id].total_cached_airings} ({Math.round(channelStatuses[ch.id].percent_cached)}%)</span>
</div>
{channelStatuses[ch.id].missing_items?.length > 0 && (
<div style={{ marginTop: '0.75rem', fontSize: '0.8rem', opacity: 0.8 }}>
<strong>Missing Downloads:</strong> {channelStatuses[ch.id].missing_items.slice(0, 3).map(i => `[${i.source_name}] ${i.title}`).join(', ')}
{channelStatuses[ch.id].missing_items.length > 3 ? ` +${channelStatuses[ch.id].missing_items.length - 3} more` : ''}
</div>
)}
</div>
)}
{/* ─── Fallback block selector ───────────────────────── */}
<div style={{ marginBottom: '1.25rem', padding: '0.75rem', background: 'rgba(239,68,68,0.1)', border: '1px solid rgba(239,68,68,0.3)', borderRadius: '6px' }}>
<label style={{ display: 'flex', alignItems: 'center', gap: '0.75rem', fontSize: '0.9rem' }}>
@@ -454,7 +503,8 @@ function SourcesTab() {
const [loading, setLoading] = useState(true);
const [syncingId, setSyncingId] = useState(null);
const [showForm, setShowForm] = useState(false);
const [form, setForm] = useState({ name: '', source_type: 'youtube_playlist', uri: '', library_id: '', max_videos: 50, scan_interval_minutes: 60 });
const [editingId, setEditingId] = useState(null);
const [form, setForm] = useState({ name: '', source_type: 'youtube_playlist', uri: '', library_id: '', max_videos: 50, scan_interval_minutes: 60, min_video_length_seconds: '', max_video_length_seconds: '', min_repeat_gap_hours: '', max_age_days: '' });
const [feedback, setFeedback, ok, err] = useFeedback();
useEffect(() => {
@@ -463,20 +513,51 @@ function SourcesTab() {
.catch(() => { err('Failed to load sources'); setLoading(false); });
}, []);
const handleCreate = async (e) => {
const handleEdit = (src) => {
setForm({
name: src.name,
source_type: src.source_type,
uri: src.uri,
library_id: src.library_id,
max_videos: src.max_videos || 50,
scan_interval_minutes: src.scan_interval_minutes || '',
min_video_length_seconds: src.min_video_length_seconds || '',
max_video_length_seconds: src.max_video_length_seconds || '',
min_repeat_gap_hours: src.min_repeat_gap_hours || '',
max_age_days: src.max_age_days || '',
});
setEditingId(src.id);
setShowForm(true);
};
const handleSubmit = async (e) => {
e.preventDefault();
if (!form.library_id) { err('Please select a library.'); return; }
try {
const src = await createSource({
const payload = {
...form,
library_id: parseInt(form.library_id),
scan_interval_minutes: parseInt(form.scan_interval_minutes) || null
});
scan_interval_minutes: parseInt(form.scan_interval_minutes) || null,
min_video_length_seconds: parseInt(form.min_video_length_seconds) || null,
max_video_length_seconds: parseInt(form.max_video_length_seconds) || null,
min_repeat_gap_hours: parseInt(form.min_repeat_gap_hours) || null,
max_age_days: parseInt(form.max_age_days) || null
};
if (editingId) {
const updated = await updateSource(editingId, payload);
setSources(s => s.map(x => x.id === editingId ? updated : x));
ok(`Source "${updated.name}" updated.`);
} else {
const src = await createSource(payload);
setSources(s => [...s, src]);
setShowForm(false);
setForm({ name: '', source_type: 'youtube_playlist', uri: '', library_id: '', max_videos: 50, scan_interval_minutes: 60 });
ok(`Source "${src.name}" registered. Hit Sync to import videos.`);
} catch { err('Failed to create source.'); }
}
setShowForm(false);
setEditingId(null);
setForm({ name: '', source_type: 'youtube_playlist', uri: '', library_id: '', max_videos: 50, scan_interval_minutes: 60, min_video_length_seconds: '', max_video_length_seconds: '', min_repeat_gap_hours: '', max_age_days: '' });
} catch { err(`Failed to ${editingId ? 'update' : 'create'} source.`); }
};
const handleSync = async (src) => {
@@ -507,13 +588,21 @@ function SourcesTab() {
<Feedback fb={feedback} clear={() => setFeedback(null)} />
<div className="settings-section-title">
<h3>Media Sources</h3>
<button className="btn-accent" onClick={() => setShowForm(f => !f)}>
<button className="btn-accent" onClick={() => {
if (showForm) {
setShowForm(false);
setEditingId(null);
setForm({ name: '', source_type: 'youtube_playlist', uri: '', library_id: '', max_videos: 50, scan_interval_minutes: 60, min_video_length_seconds: '', max_video_length_seconds: '', min_repeat_gap_hours: '', max_age_days: '' });
} else {
setShowForm(true);
}
}}>
{showForm ? ' Cancel' : '+ Add Source'}
</button>
</div>
{showForm && (
<form className="settings-form" onSubmit={handleCreate}>
<form className="settings-form" onSubmit={handleSubmit}>
<div className="form-row">
<label>Name<input required placeholder="ABC News" value={form.name} onChange={e => setForm(f => ({ ...f, name: e.target.value }))} /></label>
<label>Type
@@ -548,7 +637,21 @@ function SourcesTab() {
title="How often background workers should fetch new metadata updates" />
</label>
</div>
<button type="submit" className="btn-accent">Register Source</button>
<div className="form-row">
<label>Min Length (sec)
<input type="number" placeholder="none" value={form.min_video_length_seconds} onChange={e => setForm(f => ({ ...f, min_video_length_seconds: e.target.value }))} />
</label>
<label>Max Length (sec)
<input type="number" placeholder="none" value={form.max_video_length_seconds} onChange={e => setForm(f => ({ ...f, max_video_length_seconds: e.target.value }))} />
</label>
<label>Min Repeat Gap (hrs)
<input type="number" placeholder="none" value={form.min_repeat_gap_hours} onChange={e => setForm(f => ({ ...f, min_repeat_gap_hours: e.target.value }))} />
</label>
<label>Max Age (days)
<input type="number" placeholder="none" value={form.max_age_days} onChange={e => setForm(f => ({ ...f, max_age_days: e.target.value }))} title="Skip videos uploaded older than this" />
</label>
</div>
<button type="submit" className="btn-accent">{editingId ? 'Update Source' : 'Register Source'}</button>
</form>
)}
@@ -570,9 +673,14 @@ function SourcesTab() {
? <span className="badge badge-ok">Synced {new Date(synced).toLocaleDateString()}</span>
: <span className="badge badge-warn">Not synced</span>
}
{src.min_video_length_seconds && <span className="badge badge-info">Min {src.min_video_length_seconds}s</span>}
{src.max_video_length_seconds && <span className="badge badge-info">Max {src.max_video_length_seconds}s</span>}
{src.min_repeat_gap_hours && <span className="badge badge-info">Gap {src.min_repeat_gap_hours}h</span>}
{src.max_age_days && <span className="badge badge-info">Age {'<='} {src.max_age_days}d</span>}
</span>
</div>
<div className="row-actions">
<IconBtn icon="✎" onClick={() => handleEdit(src)} title="Edit Source" />
{isYT(src) && (
<button className="btn-sync" onClick={() => handleSync(src)} disabled={isSyncing}>
{isSyncing ? '…' : '↻ Sync'}

View File

@@ -12,8 +12,7 @@ export default defineConfig({
},
'/media': {
target: 'http://localhost:8000',
changeOrigin: true,
rewrite: (path) => path.replace(/^\/media/, '')
changeOrigin: true
}
}
}

View File

@@ -26,5 +26,11 @@ urlpatterns = [
path("api/", api.urls),
]
from api.views import serve_video_with_range
from django.urls import re_path
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Use custom range-aware server for media files, enabling HTML5 Video seek
urlpatterns += [
re_path(r'^media/(?P<path>.*)$', serve_video_with_range, name='media'),
]

View File

@@ -0,0 +1,89 @@
import pytest
import datetime
from django.utils import timezone
from core.models import Channel, AppUser, Library, MediaSource, ChannelSourceRule, ScheduleTemplate, ScheduleBlock
@pytest.mark.django_db
def test_channel_status_and_download_flow(client):
# 1. Setup mock user, library, channel
user = AppUser.objects.create(username="testuser")
library = Library.objects.create(name="Test Library", owner_user=user)
channel = Channel.objects.create(
name="Action Channel",
slug="action-ch",
owner_user=user,
library=library,
scheduling_mode="fill_blocks"
)
# 2. Add the Solid Color source (this requires no real downloading)
source = MediaSource.objects.create(
library=library,
name="Color Test",
source_type="solid_color"
)
from core.models import MediaItem
MediaItem.objects.create(
media_source=source,
title="Test Solid Color Item",
item_kind="movie",
runtime_seconds=3600,
file_path="dummy_path"
)
# 3. Create a schedule template and a 24/7 block
template = ScheduleTemplate.objects.create(
channel=channel,
name="Daily",
timezone_name="UTC",
is_active=True
)
block = ScheduleBlock.objects.create(
schedule_template=template,
name="All Day Block",
block_type="programming",
start_local_time=datetime.time(0, 0),
end_local_time=datetime.time(23, 59),
day_of_week_mask=127
)
# Map the solid_color source to the channel
ChannelSourceRule.objects.create(
channel=channel,
media_source=source,
rule_mode="allow",
weight=1.0,
schedule_block_label="All Day Block"
)
# 4. Check initial status (should be 0 airings)
resp = client.get(f"/api/channel/{channel.id}/status")
assert resp.status_code == 200
data = resp.json()
assert data["total_upcoming_airings"] == 0
# 5. Generate schedule for today
resp = client.post(f"/api/schedule/generate-today/{channel.id}")
assert resp.status_code == 200
assert resp.json()["status"] == "success"
assert resp.json()["airings_created"] > 0
# 6. Check status again (airings exist, but might not be cached yet depending on source type logic)
resp = client.get(f"/api/channel/{channel.id}/status")
assert resp.status_code == 200
data = resp.json()
assert data["total_upcoming_airings"] > 0
# 7. Trigger the channel download endpoint
resp = client.post(f"/api/channel/{channel.id}/download")
assert resp.status_code == 200
cache_data = resp.json()
assert "downloaded" in cache_data
# 8. Final status check
resp = client.get(f"/api/channel/{channel.id}/status")
assert resp.status_code == 200
final_data = resp.json()
# Solid colors don't need real downloads, but we ensure the API reported successfully
assert "percent_cached" in final_data

View File

@@ -0,0 +1,72 @@
import asyncio
from playwright.async_api import async_playwright
import sys
import time
async def main():
async with async_playwright() as p:
browser = await p.chromium.launch(headless=True)
page = await browser.new_page()
url = "http://localhost:5173/"
print("Testing frontend playback sync consistency on reload...")
times = []
wall_times = []
# Intercept console messages
page.on("console", lambda msg: print(f"BROWSER LOG: {msg.text}"))
for i in range(5):
start_wall = time.time()
await page.goto(url)
# Wait for the video element and for it to begin playing
try:
await page.wait_for_selector('video.tuner-video', state='attached', timeout=5000)
# wait 1 second to let metadata and ref execute
await page.wait_for_timeout(1000)
current_time = await page.evaluate("() => { const v = document.querySelector('video.tuner-video.playing'); return v ? v.currentTime : null; }")
current_src = await page.evaluate("() => { const v = document.querySelector('video.tuner-video.playing'); return v ? v.currentSrc : null; }")
if current_time is None:
print(f"Reload {i+1}: Video element found but currentTime is null")
continue
times.append(current_time)
wall_times.append(time.time() - start_wall)
print(f"Reload {i+1}: src = {current_src}, currentTime = {current_time:.2f} seconds")
except Exception as e:
print(f"Reload {i+1}: Error - {e}")
await browser.close()
if not times:
print("No times recorded. Ensure the frontend and backend are running.")
sys.exit(1)
diffs = [times[i] - times[i-1] for i in range(1, len(times))]
print("Differences in video time between loads:", [f"{d:.2f}s" for d in diffs])
# If the video restarts on reload, the current time will always be ~1.0 seconds (since we wait 1000ms).
# Normal behavior should have the time incrementing by the wall clock duration of the reload loop.
bug_present = False
for i, t in enumerate(times):
if t < 2.5: # Should definitely be higher than 2.5 after a few reloads if it started > 0, OR if it started near 0, subsequent ones should go up.
pass
# Actually, let's just check if the sequence of times is monotonically increasing
# given that the item is supposed to be continuous wall-clock time
is_monotonic = all(times[i] > times[i-1] for i in range(1, len(times)))
if not is_monotonic:
print("TEST FAILED: BUG REPRODUCED. The currentTime is not continuous across reloads.")
sys.exit(1)
else:
print("TEST PASSED: The currentTime is continuous across reloads.")
sys.exit(0)
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,98 @@
import pytest
from django.utils import timezone
from datetime import timedelta
from core.models import MediaSource, MediaItem, Channel, ScheduleBlock, ChannelSourceRule, Airing, AppUser, Library
from api.routers.channel import AiringSchema
from django.test import Client
@pytest.fixture
def test_channel(db):
user = AppUser.objects.create(username="playback_tester")
library = Library.objects.create(name="Test Library", owner_user=user)
channel = Channel.objects.create(name="Test Channel", channel_number=10, owner_user=user, library=library)
source = MediaSource.objects.create(library=library, name="Test Source", source_type="solid_color")
ChannelSourceRule.objects.create(channel=channel, media_source=source, rule_mode='allow', weight=1.0)
# Create media item
item = MediaItem.objects.create(
media_source=source,
title="Color Loop",
item_kind="solid_color",
runtime_seconds=600 # 10 minutes length
)
return channel, item
import uuid
@pytest.mark.django_db
def test_playback_offset_calculation(test_channel):
channel, item = test_channel
now = timezone.now()
# Airing started exactly 3 minutes ago
starts_at = now - timedelta(minutes=3)
ends_at = starts_at + timedelta(minutes=30)
airing = Airing.objects.create(
channel=channel,
media_item=item,
starts_at=starts_at,
ends_at=ends_at,
slot_kind='content',
generation_batch_uuid=uuid.uuid4()
)
schema = AiringSchema.from_airing(airing)
# Airing started 3 minutes (180 seconds) ago.
# The item runtime is 600s, so offset is simply 180s.
assert schema.exact_playback_offset_seconds == pytest.approx(180.0, rel=1e-2)
@pytest.mark.django_db
def test_playback_offset_modulo(test_channel):
channel, item = test_channel
now = timezone.now()
# Airing started exactly 14 minutes ago
starts_at = now - timedelta(minutes=14)
ends_at = starts_at + timedelta(minutes=30)
airing = Airing.objects.create(
channel=channel,
media_item=item,
starts_at=starts_at,
ends_at=ends_at,
slot_kind='content',
generation_batch_uuid=uuid.uuid4()
)
schema = AiringSchema.from_airing(airing)
# 14 minutes = 840 seconds.
# Video is 600 seconds.
# 840 % 600 = 240 seconds offset into the second loop.
assert schema.exact_playback_offset_seconds == pytest.approx(240.0, rel=1e-2)
@pytest.mark.django_db
def test_channel_now_api_returns_offset(test_channel, client: Client):
channel, item = test_channel
now = timezone.now()
starts_at = now - timedelta(seconds=45)
ends_at = starts_at + timedelta(minutes=10)
Airing.objects.create(
channel=channel,
media_item=item,
starts_at=starts_at,
ends_at=ends_at,
slot_kind='content',
generation_batch_uuid=uuid.uuid4()
)
response = client.get(f"/api/channel/{channel.id}/now")
assert response.status_code == 200
data = response.json()
assert "exact_playback_offset_seconds" in data
assert data["exact_playback_offset_seconds"] == pytest.approx(45.0, rel=1e-1)

116
tests/test_source_rules.py Normal file
View File

@@ -0,0 +1,116 @@
import pytest
from datetime import date, timedelta, datetime, timezone
from django.utils import timezone as django_timezone
from core.models import AppUser, Library, Channel, ScheduleTemplate, ScheduleBlock, MediaSource, MediaItem, ChannelSourceRule, Airing
from core.services.scheduler import ScheduleGenerator
@pytest.mark.django_db
def test_repeat_gap_hours():
# Setup user, library, channel
user = AppUser.objects.create_user(username="rule_tester", password="pw")
library = Library.objects.create(owner_user=user, name="Rule Test Lib")
channel = Channel.objects.create(
owner_user=user, library=library, name="Rule TV", slug="r-tv", channel_number=99, timezone_name="UTC"
)
source = MediaSource.objects.create(
library=library,
name="Gap Source",
source_type="local_directory",
uri="/tmp/fake",
min_repeat_gap_hours=5
)
# Create 6 items (each 1 hour long) -> 6 hours total duration
# This means a 5 hour gap is mathematically satisfiable without breaking cooldowns.
for i in range(6):
MediaItem.objects.create(
media_source=source,
title=f"Vid {i}",
item_kind="movie",
runtime_seconds=3600, # 1 hour
file_path=f"/tmp/fake/{i}.mp4",
cached_file_path=f"/tmp/fake/{i}.mp4",
is_active=True
)
ChannelSourceRule.objects.create(channel=channel, media_source=source, rule_mode="allow", weight=1.0)
template = ScheduleTemplate.objects.create(channel=channel, name="T", priority=10, is_active=True)
block = ScheduleBlock.objects.create(
schedule_template=template, name="All Day", block_type="programming",
start_local_time="00:00:00", end_local_time="12:00:00", day_of_week_mask=127
)
target_date = django_timezone.now().date()
gen = ScheduleGenerator(channel)
created = gen.generate_for_date(target_date)
assert created == 12 # 12 1-hour slots
airings = list(Airing.objects.filter(channel=channel).order_by('starts_at'))
# Assert that NO item is repeated within 5 hours of itself
last_played = {}
for a in airings:
if a.media_item_id in last_played:
gap = (a.starts_at - last_played[a.media_item_id]).total_seconds() / 3600.0
assert gap >= 5.0, f"Item {a.media_item_id} played too soon. Gap: {gap} hours"
last_played[a.media_item_id] = a.starts_at
@pytest.mark.django_db
def test_youtube_age_filter():
from unittest.mock import patch
from core.services.youtube import sync_source
user = AppUser.objects.create_user(username="rule_tester2", password="pw")
library = Library.objects.create(owner_user=user, name="Rule Test Lib 2")
source = MediaSource.objects.create(
library=library,
name="Old Videos Only",
source_type="youtube_playlist",
uri="https://www.youtube.com/playlist?list=fake_playlist",
max_age_days=10 # Anything older than 10 days is skipped
)
# Mock yt-dlp extract_info to return 3 videos
# 1. 2 days old (keep)
# 2. 15 days old (skip)
# 3. 5 days old (keep)
from datetime import date, timedelta
today = date.today()
with patch("core.services.youtube.yt_dlp.YoutubeDL") as mock_ydl:
mock_instance = mock_ydl.return_value.__enter__.return_value
mock_instance.extract_info.return_value = {
"entries": [
{
"id": "vid1",
"title": "New Video",
"duration": 600,
"upload_date": (today - timedelta(days=2)).strftime("%Y%m%d")
},
{
"id": "vid2",
"title": "Old Video",
"duration": 600,
"upload_date": (today - timedelta(days=15)).strftime("%Y%m%d")
},
{
"id": "vid3",
"title": "Medium Video",
"duration": 600,
"upload_date": (today - timedelta(days=5)).strftime("%Y%m%d")
}
]
}
res = sync_source(source, max_videos=5)
assert res['created'] == 2 # Only vid1 and vid3
assert res['skipped'] == 1 # vid2 skipped due to age
# Verify the items in DB
items = list(MediaItem.objects.filter(media_source=source).order_by('title'))
assert len(items) == 2
assert "vid2" not in [item.title for item in items]