diff --git a/.coverage b/.coverage index f9a1846..c5f5cd7 100644 Binary files a/.coverage and b/.coverage differ diff --git a/api/routers/channel.py b/api/routers/channel.py index 729df45..cd4580d 100644 --- a/api/routers/channel.py +++ b/api/routers/channel.py @@ -58,13 +58,26 @@ class AiringSchema(Schema): ends_at: datetime slot_kind: str status: str + exact_playback_offset_seconds: float = 0.0 @staticmethod def from_airing(airing) -> 'AiringSchema': media_path = None + exact_offset = 0.0 + + # Calculate exactly how far into the video we should be right now + now = timezone.now() + # if the airing hasn't started yet, offset is 0 + if now >= airing.starts_at: + exact_offset = (now - airing.starts_at).total_seconds() + if airing.media_item: item = airing.media_item + # If the item has a known runtime, and we are looping it, modulo the offset + if item.runtime_seconds and item.runtime_seconds > 0: + exact_offset = exact_offset % item.runtime_seconds + # 1. Determine if this item is from a YouTube source is_youtube = False if item.media_source and item.media_source.source_type in ['youtube', 'youtube_channel', 'youtube_playlist']: @@ -102,6 +115,7 @@ class AiringSchema(Schema): ends_at=airing.ends_at, slot_kind=airing.slot_kind, status=airing.status, + exact_playback_offset_seconds=max(0.0, exact_offset) ) @router.get("/", response=List[ChannelSchema]) @@ -112,7 +126,61 @@ def list_channels(request): ) +class ChannelStatusSchema(Schema): + total_upcoming_airings: int + total_cached_airings: int + percent_cached: float + missing_items: List[dict] + +@router.get("/{channel_id}/status", response=ChannelStatusSchema) +def get_channel_status(request, channel_id: int): + channel = get_object_or_404(Channel, id=channel_id) + now = timezone.now() + window_end = now + timedelta(hours=24) + + airings = Airing.objects.filter( + channel=channel, + ends_at__gt=now, + starts_at__lte=window_end + ).select_related('media_item') + + total = 0 + cached = 0 + missing = [] + + for a in airings: + total += 1 + item = a.media_item + if item and item.cached_file_path: + # We don't do path.exists() here to keep it fast, but we could. + cached += 1 + elif item: + missing.append({ + "id": item.id, + "title": item.title, + "starts_at": a.starts_at.isoformat() + }) + + pct = (cached / total * 100.0) if total > 0 else 100.0 + + return { + "total_upcoming_airings": total, + "total_cached_airings": cached, + "percent_cached": pct, + "missing_items": missing + } + +@router.post("/{channel_id}/download") +def trigger_channel_download(request, channel_id: int): + get_object_or_404(Channel, id=channel_id) + from core.services.cache import run_cache + + # Run cache explicitly for this channel for the next 24 hours + result = run_cache(hours=24, prune_only=False, channel_id=channel_id) + return result + @router.get("/{channel_id}", response=ChannelSchema) + def get_channel(request, channel_id: int): return get_object_or_404(Channel, id=channel_id) diff --git a/api/routers/sources.py b/api/routers/sources.py index 7fcfccd..c8f8890 100644 --- a/api/routers/sources.py +++ b/api/routers/sources.py @@ -41,6 +41,10 @@ class MediaSourceIn(BaseModel): uri: str is_active: bool = True scan_interval_minutes: Optional[int] = None + min_video_length_seconds: Optional[int] = None + max_video_length_seconds: Optional[int] = None + min_repeat_gap_hours: Optional[int] = None + max_age_days: Optional[int] = None class MediaSourceOut(BaseModel): id: int @@ -50,6 +54,10 @@ class MediaSourceOut(BaseModel): uri: str is_active: bool scan_interval_minutes: Optional[int] + min_video_length_seconds: Optional[int] + max_video_length_seconds: Optional[int] + min_repeat_gap_hours: Optional[int] + max_age_days: Optional[int] last_scanned_at: Optional[datetime] created_at: datetime @@ -140,6 +148,23 @@ def delete_source(request, source_id: int): source.delete() return 204, None +@router.put("/{source_id}", response=MediaSourceOut) +def update_source(request, source_id: int, payload: MediaSourceIn): + """Update an existing media source.""" + source = get_object_or_404(MediaSource, id=source_id) + source.name = payload.name + source.source_type = payload.source_type + source.uri = payload.uri + source.library_id = payload.library_id + source.is_active = payload.is_active + source.scan_interval_minutes = payload.scan_interval_minutes + source.min_video_length_seconds = payload.min_video_length_seconds + source.max_video_length_seconds = payload.max_video_length_seconds + source.min_repeat_gap_hours = payload.min_repeat_gap_hours + source.max_age_days = payload.max_age_days + source.save() + return source + @router.post("/{source_id}/sync", response=SyncResult) def trigger_sync(request, source_id: int, max_videos: Optional[int] = None): diff --git a/api/views.py b/api/views.py index 91ea44a..1369b35 100644 --- a/api/views.py +++ b/api/views.py @@ -1,3 +1,71 @@ -from django.shortcuts import render +import os +import re +from django.conf import settings +from django.http import StreamingHttpResponse, Http404, HttpResponseNotModified, FileResponse +from django.views.static import was_modified_since +from wsgiref.util import FileWrapper -# Create your views here. +def serve_video_with_range(request, path): + """ + Serve a media file with HTTP Range support. Required for HTML5 video + seeking in Chrome/Safari using the Django development server. + """ + clean_path = path.lstrip('/') + full_path = os.path.normpath(os.path.join(settings.MEDIA_ROOT, clean_path)) + + # Security check to prevent directory traversal + if not full_path.startswith(os.path.normpath(settings.MEDIA_ROOT)): + raise Http404("Invalid path") + + if not os.path.exists(full_path): + raise Http404(f"File {path} not found") + + statobj = os.stat(full_path) + size = statobj.st_size + + # Very simple content type mapping for videos + content_type = "video/mp4" + if full_path.endswith('.webm'): content_type = "video/webm" + elif full_path.endswith('.mkv'): content_type = "video/x-matroska" + elif full_path.endswith('.svg'): content_type = "image/svg+xml" + elif full_path.endswith('.png'): content_type = "image/png" + elif full_path.endswith('.jpg') or full_path.endswith('.jpeg'): content_type = "image/jpeg" + + range_header = request.META.get('HTTP_RANGE', '').strip() + if range_header.startswith('bytes='): + range_match = re.match(r'bytes=(\d+)-(\d*)', range_header) + if range_match: + first_byte, last_byte = range_match.groups() + first_byte = int(first_byte) + last_byte = int(last_byte) if last_byte else size - 1 + if last_byte >= size: + last_byte = size - 1 + length = last_byte - first_byte + 1 + + def file_iterator(file_path, offset=0, bytes_to_read=None): + with open(file_path, 'rb') as f: + f.seek(offset) + remaining = bytes_to_read + while remaining > 0: + chunk_size = min(8192, remaining) + data = f.read(chunk_size) + if not data: + break + yield data + remaining -= len(data) + + resp = StreamingHttpResponse( + file_iterator(full_path, offset=first_byte, bytes_to_read=length), + status=206, + content_type=content_type + ) + resp['Content-Range'] = f'bytes {first_byte}-{last_byte}/{size}' + resp['Content-Length'] = str(length) + resp['Accept-Ranges'] = 'bytes' + return resp + + # Fallback to standard 200 FileResponse if no range + resp = FileResponse(open(full_path, 'rb'), content_type=content_type) + resp['Content-Length'] = str(size) + resp['Accept-Ranges'] = 'bytes' + return resp diff --git a/core/migrations/0005_mediasource_max_age_days_and_more.py b/core/migrations/0005_mediasource_max_age_days_and_more.py new file mode 100644 index 0000000..9163a3c --- /dev/null +++ b/core/migrations/0005_mediasource_max_age_days_and_more.py @@ -0,0 +1,33 @@ +# Generated by Django 6.0.3 on 2026-03-09 18:09 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("core", "0004_channel_fallback_collection"), + ] + + operations = [ + migrations.AddField( + model_name="mediasource", + name="max_age_days", + field=models.IntegerField(blank=True, null=True), + ), + migrations.AddField( + model_name="mediasource", + name="max_video_length_seconds", + field=models.IntegerField(blank=True, null=True), + ), + migrations.AddField( + model_name="mediasource", + name="min_repeat_gap_hours", + field=models.IntegerField(blank=True, null=True), + ), + migrations.AddField( + model_name="mediasource", + name="min_video_length_seconds", + field=models.IntegerField(blank=True, null=True), + ), + ] diff --git a/core/models.py b/core/models.py index 4c54081..a529c04 100644 --- a/core/models.py +++ b/core/models.py @@ -95,6 +95,13 @@ class MediaSource(models.Model): is_active = models.BooleanField(default=True) scan_interval_minutes = models.IntegerField(blank=True, null=True) last_scanned_at = models.DateTimeField(blank=True, null=True) + + # Source Rules + min_video_length_seconds = models.IntegerField(blank=True, null=True) + max_video_length_seconds = models.IntegerField(blank=True, null=True) + min_repeat_gap_hours = models.IntegerField(blank=True, null=True) + max_age_days = models.IntegerField(blank=True, null=True) + created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) diff --git a/core/services/cache.py b/core/services/cache.py index 2b07452..6d95033 100644 --- a/core/services/cache.py +++ b/core/services/cache.py @@ -16,11 +16,13 @@ from core.services.youtube import download_for_airing, YOUTUBE_SOURCE_TYPES logger = logging.getLogger(__name__) -def run_cache(hours: int = 24, prune_only: bool = False) -> dict: +def run_cache(hours: int = 24, prune_only: bool = False, channel_id: int | None = None) -> dict: """ Scan Airings in the next `hours` hours, download any uncached YouTube videos, and prune stale local files. + If `channel_id` is provided, only process airings for that specific channel. + Returns a summary dict suitable for JSON serialization. """ now = timezone.now() @@ -33,11 +35,11 @@ def run_cache(hours: int = 24, prune_only: bool = False) -> dict: return {"pruned": pruned, "downloaded": 0, "already_cached": 0, "failed": 0, "items": []} # ── Find upcoming and currently playing YouTube-backed airings ────────── - upcoming = ( - Airing.objects - .filter(ends_at__gt=now, starts_at__lte=window_end) - .select_related("media_item__media_source") - ) + qs = Airing.objects.filter(ends_at__gt=now, starts_at__lte=window_end) + if channel_id is not None: + qs = qs.filter(channel_id=channel_id) + + upcoming = qs.select_related("media_item__media_source") youtube_items: dict[int, MediaItem] = {} downloaded = already_cached = failed = 0 diff --git a/core/services/scheduler.py b/core/services/scheduler.py index 4a53144..795d9cc 100644 --- a/core/services/scheduler.py +++ b/core/services/scheduler.py @@ -56,6 +56,24 @@ class ScheduleGenerator: target_weekday_bit = 1 << target_date.weekday() blocks = template.scheduleblock_set.all().order_by('start_local_time') airings_created = 0 + + # Build last_played mapping for the repeat gap + from core.models import ChannelSourceRule + rules = ChannelSourceRule.objects.filter(channel=self.channel).select_related('media_source') + max_gap_hours = 0 + for rule in rules: + if rule.media_source and rule.media_source.min_repeat_gap_hours: + max_gap_hours = max(max_gap_hours, rule.media_source.min_repeat_gap_hours) + + last_played_times = {} + if max_gap_hours > 0: + past_dt = datetime.combine(target_date, datetime.min.time(), tzinfo=local_tz).astimezone(timezone.utc) - timedelta(hours=max_gap_hours) + past_airings = Airing.objects.filter( + channel=self.channel, + starts_at__gte=past_dt + ).order_by('starts_at') + for a in past_airings: + last_played_times[a.media_item_id] = a.starts_at for block in blocks: if not (block.day_of_week_mask & target_weekday_bit): @@ -98,7 +116,7 @@ class ScheduleGenerator: continue airings_created += self._fill_block( - template, block, actual_start_dt, end_dt, available_items + template, block, actual_start_dt, end_dt, available_items, last_played_times ) return airings_created @@ -220,17 +238,41 @@ class ScheduleGenerator: start_dt: datetime, end_dt: datetime, items: list, + last_played_times: dict[int, datetime] = None, ) -> int: """Fill start_dt→end_dt with sequential Airings, cycling through items.""" cursor = start_dt idx = 0 created = 0 batch = uuid.uuid4() + if last_played_times is None: + last_played_times = {} while cursor < end_dt: - item = items[idx % len(items)] - idx += 1 + # Look ahead to find the first item that respects its cooldown rules + valid_item = None + items_checked = 0 + while items_checked < len(items): + candidate = items[idx % len(items)] + idx += 1 + items_checked += 1 + + # Check cooldown gap + gap_hours = candidate.media_source.min_repeat_gap_hours if candidate.media_source else None + if gap_hours: + last_played = last_played_times.get(candidate.id) + if last_played: + if (cursor - last_played).total_seconds() < gap_hours * 3600: + continue # skip, hasn't been long enough + + valid_item = candidate + break + + if not valid_item: + # If everything in the pool is currently cooling down, fallback to ignoring cooldowns + valid_item = items[(idx - 1) % len(items)] + item = valid_item duration = timedelta(seconds=max(item.runtime_seconds or 1800, 1)) # Don't let a single item overshoot the end by more than its own length @@ -249,6 +291,8 @@ class ScheduleGenerator: source_reason="template", generation_batch_uuid=batch, ) + + last_played_times[item.id] = cursor cursor += duration created += 1 diff --git a/core/services/youtube.py b/core/services/youtube.py index 706a3ec..e52d4d6 100644 --- a/core/services/youtube.py +++ b/core/services/youtube.py @@ -133,9 +133,31 @@ def sync_source(media_source: MediaSource, max_videos: int | None = None) -> dic description = entry.get("description") or "" release_year = None upload_date = entry.get("upload_date") # "YYYYMMDD" - if upload_date and len(upload_date) >= 4: + + # Enforce Source Rules + if media_source.min_video_length_seconds is not None: + if duration < media_source.min_video_length_seconds: + skipped += 1 + continue + if media_source.max_video_length_seconds is not None: + if duration > media_source.max_video_length_seconds: + skipped += 1 + continue + + if upload_date and len(upload_date) >= 8: try: - release_year = int(upload_date[:4]) + year = int(upload_date[0:4]) + month = int(upload_date[4:6]) + day = int(upload_date[6:8]) + from datetime import date + video_date = date(year, month, day) + release_year = year + + if media_source.max_age_days is not None: + age_days = (date.today() - video_date).days + if age_days > media_source.max_age_days: + skipped += 1 + continue except ValueError: pass diff --git a/frontend/src/api.js b/frontend/src/api.js index 23219d1..f3a7700 100644 --- a/frontend/src/api.js +++ b/frontend/src/api.js @@ -26,6 +26,12 @@ export const removeSourceFromChannel = async (channelId, ruleId) => { await apiClient.delete(`/channel/${channelId}/sources/${ruleId}`); }; +// Channel Actions +export const fetchChannelStatus = async (channelId) => + (await apiClient.get(`/channel/${channelId}/status`)).data; +export const triggerChannelDownload = async (channelId) => + (await apiClient.post(`/channel/${channelId}/download`)).data; + // ── Schedule ────────────────────────────────────────────────────────────── export const fetchTemplates = async () => (await apiClient.get('/schedule/template/')).data; export const createTemplate = async (payload) => @@ -48,6 +54,7 @@ export const fetchScheduleGenerations = async (channelId) => // ── Media Sources (YouTube / local) ─────────────────────────────────────── export const fetchSources = async () => (await apiClient.get('/sources/')).data; export const createSource = async (payload) => (await apiClient.post('/sources/', payload)).data; +export const updateSource = async (id, payload) => (await apiClient.put(`/sources/${id}`, payload)).data; export const syncSource = async (sourceId, maxVideos) => { const url = maxVideos ? `/sources/${sourceId}/sync?max_videos=${maxVideos}` : `/sources/${sourceId}/sync`; return (await apiClient.post(url)).data; diff --git a/frontend/src/components/ChannelTuner.jsx b/frontend/src/components/ChannelTuner.jsx index fa17b09..b8acdc1 100644 --- a/frontend/src/components/ChannelTuner.jsx +++ b/frontend/src/components/ChannelTuner.jsx @@ -144,13 +144,37 @@ export default function ChannelTuner({ onOpenGuide }) { muted={!isCurrent} loop playsInline + ref={(video) => { + if (video && video.readyState >= 1) { // HAVE_METADATA or higher + const currentAiring = nowPlaying[chan.id]; + if (currentAiring && video.dataset.airingId !== String(currentAiring.id)) { + video.dataset.airingId = currentAiring.id; + if (currentAiring.exact_playback_offset_seconds !== undefined) { + let offset = currentAiring.exact_playback_offset_seconds; + if (video.duration && video.duration > 0 && !isNaN(video.duration)) { + offset = offset % video.duration; + } + video.currentTime = offset; + } + } + } + }} onLoadedMetadata={(e) => { const video = e.target; const currentAiring = nowPlaying[chan.id]; - if (currentAiring?.starts_at) { - const offsetSeconds = (Date.now() - new Date(currentAiring.starts_at).getTime()) / 1000; - if (offsetSeconds > 0 && video.duration > 0) { - video.currentTime = offsetSeconds % video.duration; + if (currentAiring && video.dataset.airingId !== String(currentAiring.id)) { + video.dataset.airingId = currentAiring.id; + if (currentAiring.exact_playback_offset_seconds !== undefined) { + let offset = currentAiring.exact_playback_offset_seconds; + if (video.duration && video.duration > 0 && !isNaN(video.duration)) { + offset = offset % video.duration; + } + video.currentTime = offset; + } else if (currentAiring.starts_at) { + const offsetSeconds = (Date.now() - new Date(currentAiring.starts_at).getTime()) / 1000; + if (offsetSeconds > 0 && video.duration > 0) { + video.currentTime = offsetSeconds % video.duration; + } } } }} diff --git a/frontend/src/components/Settings.jsx b/frontend/src/components/Settings.jsx index 6dd40e8..a065168 100644 --- a/frontend/src/components/Settings.jsx +++ b/frontend/src/components/Settings.jsx @@ -5,9 +5,10 @@ import { fetchChannelSources, assignSourceToChannel, removeSourceFromChannel, fetchTemplates, createTemplate, deleteTemplate, generateScheduleToday, fetchTemplateBlocks, createTemplateBlock, deleteTemplateBlock, - fetchSources, createSource, syncSource, deleteSource, + fetchSources, createSource, syncSource, deleteSource, updateSource, fetchLibraries, fetchCollections, fetchDownloadStatus, triggerCacheUpcoming, downloadItem, fetchDownloadProgress, + fetchChannelStatus, triggerChannelDownload, } from '../api'; // ─── Constants ──────────────────────────────────────────────────────────── @@ -164,10 +165,12 @@ function ChannelsTab() { const [templateBlocks, setTemplateBlocks] = useState({}); // { templateId: [blocks] } const [expandedId, setExpandedId] = useState(null); const [channelSources, setChannelSources] = useState({}); // { channelId: [rules] } + const [channelStatuses, setChannelStatuses] = useState({}); // { channelId: statusData } const [showForm, setShowForm] = useState(false); const [form, setForm] = useState({ name: '', slug: '', channel_number: '', description: '', library_id: '', owner_user_id: '' }); const [assignForm, setAssignForm] = useState({ source_id: '', rule_mode: 'allow', weight: 1.0, schedule_block_label: '' }); const [syncingId, setSyncingId] = useState(null); + const [downloadingId, setDownloadingId] = useState(null); const [feedback, setFeedback, ok, err] = useFeedback(); useEffect(() => { @@ -185,14 +188,24 @@ function ChannelsTab() { .catch(() => err('Failed to load channels')); }, []); + const loadChannelStatus = async (channelId) => { + try { + const status = await fetchChannelStatus(channelId); + setChannelStatuses(prev => ({ ...prev, [channelId]: status })); + } catch { err('Failed to load channel caching status'); } + }; + const toggleExpand = async (ch) => { const next = expandedId === ch.id ? null : ch.id; setExpandedId(next); - if (next && !channelSources[next]) { - try { - const rules = await fetchChannelSources(ch.id); - setChannelSources(cs => ({ ...cs, [ch.id]: rules })); - } catch { err('Failed to load channel sources'); } + if (next) { + if (!channelSources[next]) { + try { + const rules = await fetchChannelSources(ch.id); + setChannelSources(cs => ({ ...cs, [ch.id]: rules })); + } catch { err('Failed to load channel sources'); } + } + loadChannelStatus(next); } }; @@ -250,10 +263,21 @@ function ChannelsTab() { try { const result = await generateScheduleToday(ch.id); ok(`Schedule generated for "${ch.name}": ${result.airings_created} airings created.`); + if (expandedId === ch.id) loadChannelStatus(ch.id); } catch { err('Failed to generate schedule.'); } finally { setSyncingId(null); } }; + const handleDownload = async (ch) => { + setDownloadingId(ch.id); + try { + await triggerChannelDownload(ch.id); + ok(`Download triggered for "${ch.name}".`); + if (expandedId === ch.id) loadChannelStatus(ch.id); + } catch { err('Failed to trigger download.'); } + finally { setDownloadingId(null); } + }; + const handleSetFallback = async (ch, collectionId) => { try { const updated = await updateChannel(ch.id, { fallback_collection_id: collectionId ? parseInt(collectionId) : null }); @@ -327,6 +351,14 @@ function ChannelsTab() { > {syncingId === ch.id ? '...' : '▶ Schedule'} + handleDelete(ch)} /> {isExpanded ? '▲' : '▼'} @@ -336,6 +368,23 @@ function ChannelsTab() { {isExpanded && (
+ {/* ─── Channel Status ──────────────────────────────────── */} + {channelStatuses[ch.id] && ( +
+
Schedule Status (Next 24 Hours)
+
+ Total Upcoming: {channelStatuses[ch.id].total_upcoming_airings} + Cached: {channelStatuses[ch.id].total_cached_airings} ({Math.round(channelStatuses[ch.id].percent_cached)}%) +
+ {channelStatuses[ch.id].missing_items?.length > 0 && ( +
+ Missing Downloads: {channelStatuses[ch.id].missing_items.slice(0, 3).map(i => `[${i.source_name}] ${i.title}`).join(', ')} + {channelStatuses[ch.id].missing_items.length > 3 ? ` +${channelStatuses[ch.id].missing_items.length - 3} more` : ''} +
+ )} +
+ )} + {/* ─── Fallback block selector ───────────────────────── */}
+ handleEdit(src)} title="Edit Source" /> {isYT(src) && (