feat(main): commit
This commit is contained in:
140
core/services/cache.py
Normal file
140
core/services/cache.py
Normal file
@@ -0,0 +1,140 @@
|
||||
"""
|
||||
Cache service — reusable download/prune logic used by both:
|
||||
- python manage.py cache_upcoming
|
||||
- POST /api/sources/cache-upcoming
|
||||
"""
|
||||
|
||||
import logging
|
||||
import pathlib
|
||||
from datetime import timedelta
|
||||
|
||||
from django.utils import timezone
|
||||
|
||||
from core.models import Airing, MediaItem, MediaSource
|
||||
from core.services.youtube import download_for_airing, YOUTUBE_SOURCE_TYPES
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def run_cache(hours: int = 24, prune_only: bool = False) -> dict:
|
||||
"""
|
||||
Scan Airings in the next `hours` hours, download any uncached YouTube
|
||||
videos, and prune stale local files.
|
||||
|
||||
Returns a summary dict suitable for JSON serialization.
|
||||
"""
|
||||
now = timezone.now()
|
||||
window_end = now + timedelta(hours=hours)
|
||||
|
||||
# ── Prune first ────────────────────────────────────────────────────────
|
||||
pruned = _prune(now)
|
||||
|
||||
if prune_only:
|
||||
return {"pruned": pruned, "downloaded": 0, "already_cached": 0, "failed": 0, "items": []}
|
||||
|
||||
# ── Find upcoming and currently playing YouTube-backed airings ──────────
|
||||
upcoming = (
|
||||
Airing.objects
|
||||
.filter(ends_at__gt=now, starts_at__lte=window_end)
|
||||
.select_related("media_item__media_source")
|
||||
)
|
||||
|
||||
youtube_items: dict[int, MediaItem] = {}
|
||||
for airing in upcoming:
|
||||
item = airing.media_item
|
||||
if item.media_source and item.media_source.source_type in YOUTUBE_SOURCE_TYPES:
|
||||
youtube_items[item.pk] = item
|
||||
|
||||
downloaded = already_cached = failed = 0
|
||||
items_status = []
|
||||
|
||||
for item in youtube_items.values():
|
||||
# Skip if already cached
|
||||
if item.cached_file_path and pathlib.Path(item.cached_file_path).exists():
|
||||
already_cached += 1
|
||||
items_status.append({
|
||||
"id": item.pk,
|
||||
"title": item.title,
|
||||
"status": "cached",
|
||||
"path": item.cached_file_path,
|
||||
})
|
||||
continue
|
||||
|
||||
try:
|
||||
local_path = download_for_airing(item)
|
||||
downloaded += 1
|
||||
items_status.append({
|
||||
"id": item.pk,
|
||||
"title": item.title,
|
||||
"status": "downloaded",
|
||||
"path": str(local_path),
|
||||
})
|
||||
except Exception as exc:
|
||||
failed += 1
|
||||
items_status.append({
|
||||
"id": item.pk,
|
||||
"title": item.title,
|
||||
"status": "failed",
|
||||
"error": str(exc),
|
||||
})
|
||||
logger.error("download_for_airing(%s) failed: %s", item.pk, exc)
|
||||
|
||||
logger.info(
|
||||
"run_cache(hours=%d): pruned=%d downloaded=%d cached=%d failed=%d",
|
||||
hours, pruned, downloaded, already_cached, failed,
|
||||
)
|
||||
return {
|
||||
"pruned": pruned,
|
||||
"downloaded": downloaded,
|
||||
"already_cached": already_cached,
|
||||
"failed": failed,
|
||||
"items": items_status,
|
||||
}
|
||||
|
||||
|
||||
def _prune(now) -> int:
|
||||
"""Delete local cache files whose airings have all ended."""
|
||||
pruned = 0
|
||||
stale = MediaItem.objects.filter(cached_file_path__isnull=False).exclude(
|
||||
airing__ends_at__gte=now
|
||||
)
|
||||
for item in stale:
|
||||
p = pathlib.Path(item.cached_file_path)
|
||||
if p.exists():
|
||||
try:
|
||||
p.unlink()
|
||||
pruned += 1
|
||||
except OSError as exc:
|
||||
logger.warning("Could not delete %s: %s", p, exc)
|
||||
item.cached_file_path = None
|
||||
item.cache_expires_at = None
|
||||
item.save(update_fields=["cached_file_path", "cache_expires_at"])
|
||||
return pruned
|
||||
|
||||
|
||||
def get_download_status() -> dict:
|
||||
"""
|
||||
Return a snapshot of all YouTube MediaItems and their cache status,
|
||||
useful for rendering the Downloads UI.
|
||||
"""
|
||||
items = (
|
||||
MediaItem.objects
|
||||
.filter(media_source__source_type__in=YOUTUBE_SOURCE_TYPES)
|
||||
.select_related("media_source")
|
||||
.order_by("media_source__name", "title")
|
||||
)
|
||||
|
||||
result = []
|
||||
for item in items:
|
||||
cached = bool(item.cached_file_path and pathlib.Path(item.cached_file_path).exists())
|
||||
result.append({
|
||||
"id": item.pk,
|
||||
"title": item.title,
|
||||
"source_name": item.media_source.name,
|
||||
"source_id": item.media_source.id,
|
||||
"youtube_video_id": item.youtube_video_id,
|
||||
"runtime_seconds": item.runtime_seconds,
|
||||
"cached": cached,
|
||||
"cached_path": item.cached_file_path if cached else None,
|
||||
})
|
||||
return {"items": result, "total": len(result), "cached": sum(1 for r in result if r["cached"])}
|
||||
Reference in New Issue
Block a user