nitrox commited on
Commit
dc0278a
·
verified ·
1 Parent(s): 319adbb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +89 -1
app.py CHANGED
@@ -1,7 +1,11 @@
1
  from fastapi import FastAPI, HTTPException
2
  from fastapi.middleware.cors import CORSMiddleware
 
3
  import os
4
  from dotenv import load_dotenv
 
 
 
5
 
6
  load_dotenv()
7
 
@@ -14,6 +18,90 @@ app.add_middleware(
14
  allow_headers=["*"]
15
  )
16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  @app.get("/")
18
  async def root():
19
- return {"status": "FastFlowWrapper is running"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  from fastapi import FastAPI, HTTPException
2
  from fastapi.middleware.cors import CORSMiddleware
3
+ from fastapi.responses import StreamingResponse, JSONResponse
4
  import os
5
  from dotenv import load_dotenv
6
+ import requests
7
+ from typing import Dict, Any, List
8
+ from pydantic import BaseModel
9
 
10
  load_dotenv()
11
 
 
18
  allow_headers=["*"]
19
  )
20
 
21
+ # Получаем переменные окружения
22
+ FLOWISE_API_BASE_URL = os.getenv("FLOWISE_API_BASE_URL")
23
+ FLOWISE_CHATFLOW_ID = os.getenv("FLOWISE_CHATFLOW_ID")
24
+
25
+ class ChatMessage(BaseModel):
26
+ role: str
27
+ content: str
28
+
29
+ class ChatCompletionRequest(BaseModel):
30
+ model: str
31
+ messages: List[ChatMessage]
32
+ temperature: float = 0.7
33
+
34
  @app.get("/")
35
  async def root():
36
+ return {"status": "FastFlowWrapper is running"}
37
+
38
+ @app.get("/v1/models")
39
+ async def get_models():
40
+ try:
41
+ # Запрашиваем список чатфлоу из Flowise
42
+ response = requests.get(f"{FLOWISE_API_BASE_URL}/chatflows")
43
+ response.raise_for_status()
44
+ chatflows = response.json()
45
+
46
+ # Преобразуем в формат OpenAI API
47
+ models = []
48
+ for chatflow in chatflows:
49
+ models.append({
50
+ "id": chatflow.get("id"),
51
+ "object": "model",
52
+ "created": 1677610602, # Фиксированная дата для примера
53
+ "owned_by": "flowise",
54
+ "permission": [],
55
+ "root": "flowise",
56
+ "parent": None
57
+ })
58
+
59
+ return {"object": "list", "data": models}
60
+ except requests.RequestException as e:
61
+ raise HTTPException(status_code=500, detail=str(e))
62
+
63
+ @app.post("/v1/chat/completions")
64
+ async def create_chat_completion(request: ChatCompletionRequest):
65
+ try:
66
+ # Получаем последнее сообщение из диалога
67
+ last_message = request.messages[-1]
68
+ if last_message.role != "user":
69
+ raise HTTPException(status_code=400, detail="Last message must be from user")
70
+
71
+ # Формируем запрос к Flowise
72
+ flowise_request = {
73
+ "question": last_message.content
74
+ }
75
+
76
+ # Отправляем запрос к Flowise
77
+ response = requests.post(
78
+ f"{FLOWISE_API_BASE_URL}/prediction/{FLOWISE_CHATFLOW_ID}",
79
+ json=flowise_request
80
+ )
81
+ response.raise_for_status()
82
+
83
+ # Преобразуем ответ в формат OpenAI API
84
+ flowise_response = response.json()
85
+ return {
86
+ "id": "chatcmpl-" + os.urandom(12).hex(),
87
+ "object": "chat.completion",
88
+ "created": int(response.elapsed.total_seconds()),
89
+ "model": request.model,
90
+ "choices": [
91
+ {
92
+ "index": 0,
93
+ "message": {
94
+ "role": "assistant",
95
+ "content": flowise_response.get("text", "")
96
+ },
97
+ "finish_reason": "stop"
98
+ }
99
+ ],
100
+ "usage": {
101
+ "prompt_tokens": len(last_message.content),
102
+ "completion_tokens": len(str(flowise_response.get("text", ""))),
103
+ "total_tokens": len(last_message.content) + len(str(flowise_response.get("text", "")))
104
+ }
105
+ }
106
+ except requests.RequestException as e:
107
+ raise HTTPException(status_code=500, detail=str(e))