oversai-models / src /main.py
ogirald0's picture
Initial commit for Hugging Face deployment
18869bb
import uvicorn
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
import gradio as gr
from config import get_settings
from models.text_classification import TextClassificationModel
from api.models import router as models_router, registry
app = FastAPI(
title=get_settings().app_name,
description="API for managing and running ML models",
version="1.0.0",
docs_url="/docs",
redoc_url="/redoc",
)
# Configure CORS
app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # Modify this in production
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Register models
text_classifier = TextClassificationModel()
registry.register_model(
"text-classification",
text_classifier,
"/gradio/text-classification"
)
# Mount the models API router
app.include_router(
models_router,
prefix="/api/models",
tags=["models"]
)
# Mount Gradio interface
app = gr.mount_gradio_app(
app,
text_classifier.create_interface(),
path="/gradio/text-classification"
)
@app.get("/")
async def root():
"""Root endpoint returning basic API information."""
return {
"name": get_settings().app_name,
"version": "1.0.0",
"status": "running"
}
if __name__ == "__main__":
# Initialize settings
settings = get_settings()
uvicorn.run(
"main:app",
host=settings.host,
port=settings.port,
reload=settings.debug
)