import asyncio import json from datetime import datetime, timedelta from typing import List import dropbox import logging from modules.dropbox.client import dbx from modules.video.model import VideoMetadata # Initialize logger logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) # Dropbox client (already configured) # dbx = dropbox.Dropbox(app_key=APP_KEY, app_secret=APP_SECRET, oauth2_refresh_token=REFRESH_TOKEN) # Cache dictionary # Key: scripture_name, Value: {"timestamp": datetime, "data": List[VideoMetadata]} _video_cache: dict[str, dict] = {} CACHE_TTL = timedelta(hours=1) # Cache time-to-live async def fetch_video_urls_from_dropbox(scripture_name: str) -> list[VideoMetadata]: """ Fetch video metadata JSON from Dropbox with caching. :param scripture_name: Name of the scripture folder in Dropbox :return: List of VideoMetadata objects """ loop = asyncio.get_running_loop() # Check if we have cached data that is still fresh cache_entry = _video_cache.get(scripture_name) if cache_entry: age = datetime.now() - cache_entry["timestamp"] if age < CACHE_TTL: logger.info(f"Using cached video metadata for '{scripture_name}' (age: {age})") return cache_entry["data"] logger.info(f"Fetching video metadata for '{scripture_name}' from Dropbox") try: # Dropbox SDK is synchronous, run in executor metadata, res = await loop.run_in_executor( None, dbx.files_download, f"/{scripture_name}/video_metadata.json" ) data = res.content.decode("utf-8") # logger.info("video data = %s",data) json_list = json.loads(data) # Convert each JSON item to VideoMetadata video_data = [VideoMetadata(**item) for item in json_list] # Update cache _video_cache[scripture_name] = { "timestamp": datetime.now(), "data": video_data } logger.info(f"Cached {len(video_data)} videos for '{scripture_name}'") return video_data except Exception as e: logger.error(f"Error fetching video metadata for '{scripture_name}' from Dropbox", exc_info=e) # Fallback to cached data if available if cache_entry: logger.warning(f"Returning stale cached data for '{scripture_name}' due to Dropbox error") return cache_entry["data"] else: logger.warning(f"No cached data available for '{scripture_name}', returning empty list") return []