from fastapi import FastAPI, Request from fastapi.responses import HTMLResponse from fastapi.templating import Jinja2Templates from ollama import chat import subprocess # Initialize FastAPI app app = FastAPI() # Set up template directory templates = Jinja2Templates(directory="templates") # Pull the model if it's not already downloaded def pull_model(): try: subprocess.run(["ollama", "pull", "mohamedo/bignova"], check=True) print("Model pulled successfully!") except subprocess.CalledProcessError: print("Error pulling model.") pull_model() # Root route to display the chat UI @app.get("/", response_class=HTMLResponse) async def home(request: Request): return templates.TemplateResponse("index.html", {"request": request}) # Route to interact with Ollama's chatbot @app.get("/chat/{message}") async def chat_with_ai(message: str): """ Endpoint to interact with the AI model. It accepts a message from the user and returns the model's response. """ # Send a message to the model and get the response response = chat( model="mohamedo/bignova", # Specify the model to use messages=[{ 'role': 'user', 'content': message }] ) # Return the AI's response return {"response": response['message']['content']}