api / app.py
gary-boon
Fix HuggingFace Spaces cache permission error
a3e1f56
"""
HuggingFace Spaces entry point for Visualisable.ai
This runs the unified backend service for production deployment
"""
import os
import sys
import asyncio
import logging
from pathlib import Path
# Add backend to path
sys.path.append(str(Path(__file__).parent / "backend"))
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Import the FastAPI app from model_service
from backend.model_service import app
# HuggingFace Spaces specific configuration
if os.getenv("SPACE_ID"):
# Running on HuggingFace Spaces
logger.info(f"Running on HuggingFace Spaces: {os.getenv('SPACE_ID')}")
# Set cache directories for HuggingFace Spaces
os.environ["TRANSFORMERS_CACHE"] = "/tmp/transformers_cache"
os.environ["HF_HOME"] = "/tmp/hf_home"
os.environ["HUGGINGFACE_HUB_CACHE"] = "/tmp/huggingface_hub"
# Set production environment variables
os.environ["ENVIRONMENT"] = "production"
os.environ["MODEL_DEVICE"] = os.getenv("MODEL_DEVICE", "cpu") # Use CPU for HF Spaces free tier
# Enable CORS for the Space URL
space_host = os.getenv("SPACE_HOST", "")
if space_host:
from fastapi.middleware.cors import CORSMiddleware
app.add_middleware(
CORSMiddleware,
allow_origins=[f"https://{space_host}", "http://localhost:3000"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Export the app for Gradio or direct serving
if __name__ == "__main__":
import uvicorn
port = int(os.getenv("PORT", 7860)) # HuggingFace Spaces default port
uvicorn.run(app, host="0.0.0.0", port=port)