Spaces:
Running
Running
File size: 5,318 Bytes
5693654 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 |
import os
import sys
from dotenv import load_dotenv
# 加载环境变量
load_dotenv()
from fastapi import FastAPI, Depends, Request
from fastapi.responses import StreamingResponse
from fastapi.middleware.cors import CORSMiddleware
from app.utils.logger import logger
from app.utils.auth import verify_api_key
from app.deepclaude.deepclaude import DeepClaude
app = FastAPI(title="DeepClaude API")
# 从环境变量获取 CORS配置, API 密钥、地址以及模型名称
ALLOW_ORIGINS = os.getenv("ALLOW_ORIGINS", "*")
CLAUDE_API_KEY = os.getenv("CLAUDE_API_KEY")
CLAUDE_MODEL = os.getenv("CLAUDE_MODEL")
CLAUDE_PROVIDER = os.getenv("CLAUDE_PROVIDER", "anthropic") # Claude模型提供商, 默认为anthropic
CLAUDE_API_URL = os.getenv("CLAUDE_API_URL", "https://api.anthropic.com/v1/messages")
DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY")
DEEPSEEK_API_URL = os.getenv("DEEPSEEK_API_URL")
DEEPSEEK_MODEL = os.getenv("DEEPSEEK_MODEL")
IS_ORIGIN_REASONING = os.getenv("IS_ORIGIN_REASONING", "True").lower() == "true"
# CORS设置
allow_origins_list = ALLOW_ORIGINS.split(",") if ALLOW_ORIGINS else [] # 将逗号分隔的字符串转换为列表
app.add_middleware(
CORSMiddleware,
allow_origins=allow_origins_list,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# 创建 DeepClaude 实例, 提出为Global变量
if not DEEPSEEK_API_KEY or not CLAUDE_API_KEY:
logger.critical("请设置环境变量 CLAUDE_API_KEY 和 DEEPSEEK_API_KEY")
sys.exit(1)
deep_claude = DeepClaude(
DEEPSEEK_API_KEY,
CLAUDE_API_KEY,
DEEPSEEK_API_URL,
CLAUDE_API_URL,
CLAUDE_PROVIDER,
IS_ORIGIN_REASONING
)
# 验证日志级别
logger.debug("当前日志级别为 DEBUG")
logger.info("开始请求")
@app.get("/", dependencies=[Depends(verify_api_key)])
async def root():
logger.info("访问了根路径")
return {"message": "Welcome to DeepClaude API"}
@app.get("/v1/models")
async def list_models():
"""
获取可用模型列表
返回格式遵循 OpenAI API 标准
"""
models = [{
"id": "deepclaude",
"object": "model",
"created": 1677610602,
"owned_by": "deepclaude",
"permission": [{
"id": "modelperm-deepclaude",
"object": "model_permission",
"created": 1677610602,
"allow_create_engine": False,
"allow_sampling": True,
"allow_logprobs": True,
"allow_search_indices": False,
"allow_view": True,
"allow_fine_tuning": False,
"organization": "*",
"group": None,
"is_blocking": False
}],
"root": "deepclaude",
"parent": None
}]
return {"object": "list", "data": models}
@app.post("/v1/chat/completions", dependencies=[Depends(verify_api_key)])
async def chat_completions(request: Request):
"""处理聊天完成请求,支持流式和非流式输出
请求体格式应与 OpenAI API 保持一致,包含:
- messages: 消息列表
- model: 模型名称(可选)
- stream: 是否使用流式输出(可选,默认为 True)
- temperature: 随机性 (可选)
- top_p: top_p (可选)
- presence_penalty: 话题新鲜度(可选)
- frequency_penalty: 频率惩罚度(可选)
"""
try:
# 1. 获取基础信息
body = await request.json()
messages = body.get("messages")
# 2. 获取并验证参数
model_arg = (
get_and_validate_params(body)
)
stream = model_arg[4] # 获取 stream 参数
# 3. 根据 stream 参数返回相应的响应
if stream:
return StreamingResponse(
deep_claude.chat_completions_with_stream(
messages=messages,
model_arg=model_arg[:4], # 不传递 stream 参数
deepseek_model=DEEPSEEK_MODEL,
claude_model=CLAUDE_MODEL
),
media_type="text/event-stream"
)
else:
# 非流式输出
response = await deep_claude.chat_completions_without_stream(
messages=messages,
model_arg=model_arg[:4], # 不传递 stream 参数
deepseek_model=DEEPSEEK_MODEL,
claude_model=CLAUDE_MODEL
)
return response
except Exception as e:
logger.error(f"处理请求时发生错误: {e}")
return {"error": str(e)}
def get_and_validate_params(body):
"""提取获取和验证请求参数的函数"""
# TODO: 默认值设定允许自定义
temperature: float = body.get("temperature", 0.5)
top_p: float = body.get("top_p", 0.9)
presence_penalty: float = body.get("presence_penalty", 0.0)
frequency_penalty: float = body.get("frequency_penalty", 0.0)
stream: bool = body.get("stream", True)
if "sonnet" in body.get("model", ""): # Only Sonnet 设定 temperature 必须在 0 到 1 之间
if not isinstance(temperature, (float)) or temperature < 0.0 or temperature > 1.0:
raise ValueError("Sonnet 设定 temperature 必须在 0 到 1 之间")
return (temperature, top_p, presence_penalty, frequency_penalty, stream)
|