FastFlowWrapper / app.py
nitrox's picture
Update app.py
dc0278a verified
raw
history blame
3.59 kB
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import StreamingResponse, JSONResponse
import os
from dotenv import load_dotenv
import requests
from typing import Dict, Any, List
from pydantic import BaseModel
load_dotenv()
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["*"],
allow_headers=["*"]
)
# Получаем переменные окружения
FLOWISE_API_BASE_URL = os.getenv("FLOWISE_API_BASE_URL")
FLOWISE_CHATFLOW_ID = os.getenv("FLOWISE_CHATFLOW_ID")
class ChatMessage(BaseModel):
role: str
content: str
class ChatCompletionRequest(BaseModel):
model: str
messages: List[ChatMessage]
temperature: float = 0.7
@app.get("/")
async def root():
return {"status": "FastFlowWrapper is running"}
@app.get("/v1/models")
async def get_models():
try:
# Запрашиваем список чатфлоу из Flowise
response = requests.get(f"{FLOWISE_API_BASE_URL}/chatflows")
response.raise_for_status()
chatflows = response.json()
# Преобразуем в формат OpenAI API
models = []
for chatflow in chatflows:
models.append({
"id": chatflow.get("id"),
"object": "model",
"created": 1677610602, # Фиксированная дата для примера
"owned_by": "flowise",
"permission": [],
"root": "flowise",
"parent": None
})
return {"object": "list", "data": models}
except requests.RequestException as e:
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/chat/completions")
async def create_chat_completion(request: ChatCompletionRequest):
try:
# Получаем последнее сообщение из диалога
last_message = request.messages[-1]
if last_message.role != "user":
raise HTTPException(status_code=400, detail="Last message must be from user")
# Формируем запрос к Flowise
flowise_request = {
"question": last_message.content
}
# Отправляем запрос к Flowise
response = requests.post(
f"{FLOWISE_API_BASE_URL}/prediction/{FLOWISE_CHATFLOW_ID}",
json=flowise_request
)
response.raise_for_status()
# Преобразуем ответ в формат OpenAI API
flowise_response = response.json()
return {
"id": "chatcmpl-" + os.urandom(12).hex(),
"object": "chat.completion",
"created": int(response.elapsed.total_seconds()),
"model": request.model,
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": flowise_response.get("text", "")
},
"finish_reason": "stop"
}
],
"usage": {
"prompt_tokens": len(last_message.content),
"completion_tokens": len(str(flowise_response.get("text", ""))),
"total_tokens": len(last_message.content) + len(str(flowise_response.get("text", "")))
}
}
except requests.RequestException as e:
raise HTTPException(status_code=500, detail=str(e))