File size: 3,592 Bytes
cfda68d
 
dc0278a
cfda68d
 
dc0278a
 
 
cfda68d
 
 
 
 
 
 
 
 
319adbb
cfda68d
 
dc0278a
 
 
 
 
 
 
 
 
 
 
 
 
cfda68d
 
dc0278a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import StreamingResponse, JSONResponse
import os
from dotenv import load_dotenv
import requests
from typing import Dict, Any, List
from pydantic import BaseModel

load_dotenv()

app = FastAPI()

app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_methods=["*"],
    allow_headers=["*"]
)

# Получаем переменные окружения
FLOWISE_API_BASE_URL = os.getenv("FLOWISE_API_BASE_URL")
FLOWISE_CHATFLOW_ID = os.getenv("FLOWISE_CHATFLOW_ID")

class ChatMessage(BaseModel):
    role: str
    content: str

class ChatCompletionRequest(BaseModel):
    model: str
    messages: List[ChatMessage]
    temperature: float = 0.7

@app.get("/")
async def root():
    return {"status": "FastFlowWrapper is running"}

@app.get("/v1/models")
async def get_models():
    try:
        # Запрашиваем список чатфлоу из Flowise
        response = requests.get(f"{FLOWISE_API_BASE_URL}/chatflows")
        response.raise_for_status()
        chatflows = response.json()
        
        # Преобразуем в формат OpenAI API
        models = []
        for chatflow in chatflows:
            models.append({
                "id": chatflow.get("id"),
                "object": "model",
                "created": 1677610602,  # Фиксированная дата для примера
                "owned_by": "flowise",
                "permission": [],
                "root": "flowise",
                "parent": None
            })
        
        return {"object": "list", "data": models}
    except requests.RequestException as e:
        raise HTTPException(status_code=500, detail=str(e))

@app.post("/v1/chat/completions")
async def create_chat_completion(request: ChatCompletionRequest):
    try:
        # Получаем последнее сообщение из диалога
        last_message = request.messages[-1]
        if last_message.role != "user":
            raise HTTPException(status_code=400, detail="Last message must be from user")
        
        # Формируем запрос к Flowise
        flowise_request = {
            "question": last_message.content
        }
        
        # Отправляем запрос к Flowise
        response = requests.post(
            f"{FLOWISE_API_BASE_URL}/prediction/{FLOWISE_CHATFLOW_ID}",
            json=flowise_request
        )
        response.raise_for_status()
        
        # Преобразуем ответ в формат OpenAI API
        flowise_response = response.json()
        return {
            "id": "chatcmpl-" + os.urandom(12).hex(),
            "object": "chat.completion",
            "created": int(response.elapsed.total_seconds()),
            "model": request.model,
            "choices": [
                {
                    "index": 0,
                    "message": {
                        "role": "assistant",
                        "content": flowise_response.get("text", "")
                    },
                    "finish_reason": "stop"
                }
            ],
            "usage": {
                "prompt_tokens": len(last_message.content),
                "completion_tokens": len(str(flowise_response.get("text", ""))),
                "total_tokens": len(last_message.content) + len(str(flowise_response.get("text", "")))
            }
        }
    except requests.RequestException as e:
        raise HTTPException(status_code=500, detail=str(e))