chipling's picture
Upload 17 files
af5e18d verified
import httpx
import asyncio
import random
import json
class XaiAPI:
headers = {
'accept': '*/*',
'accept-language': 'en-US,en;q=0.9,ja;q=0.8',
'content-type': 'application/json',
'origin': 'https://ai-sdk-starter-xai.vercel.app',
'referer': 'https://ai-sdk-starter-xai.vercel.app/',
'sec-ch-ua': '"Google Chrome";v="135", "Not-A.Brand";v="8", "Chromium";v="135"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36'
}
def __init__(self):
self.base_url = "https://ai-sdk-starter-xai.vercel.app/api/chat"
def get_model_list(self):
models = ["grok-3-mini", "grok-2-1212", "grok-3", "grok-3-fast", "grok-3-mini-fast"]
return models
def convert(messages):
converted = []
for message in messages:
role = message.get("role", "user")
content = message.get("content", "")
if isinstance(content, list):
parts = content
text_content = "\n".join([p.get("text", "") for p in content if p.get("type") == "text"])
else:
text_content = str(content)
parts = [{"type": "text", "text": text_content}]
if role == "assistant":
parts.insert(0, {"type": "step-start"})
converted.append({
"role": role,
"content": text_content,
"parts": parts
})
return converted
async def generate(self, json_data: dict):
messages = XaiAPI.convert(json_data["messages"])
request_data = {
"id": "".join(random.choices("0123456789abcdef", k=16)),
"messages": messages,
"selectedModel": json_data.get("model", "grok-2-1212"),
}
chunk_id = "chipling-xai-" + "".join(random.choices("0123456789abcdef", k=32))
created = int(asyncio.get_event_loop().time())
total_tokens = 0
try:
async with httpx.AsyncClient(timeout=None) as client:
async with client.stream(
"POST",
"https://ai-sdk-starter-xai.vercel.app/api/chat",
headers=XaiAPI.headers,
json=request_data
) as request_ctx:
if request_ctx.status_code == 200:
async for line in request_ctx.aiter_lines():
if line:
if line.startswith('0:'):
# Clean up the text and properly escape JSON characters
text = line[2:].strip()
if text.startswith('"') and text.endswith('"'):
text = text[1:-1]
text = text.replace('\\n', '\n').replace('\\', '')
response = {
"id": chunk_id,
"object": "chat.completion.chunk",
"created": created,
"model": json_data.get("model", "grok-2-1212"),
"choices": [{
"index": 0,
"text": text,
"logprobs": None,
"finish_reason": None
}],
"usage": None
}
yield f"data: {json.dumps(response)}\n\n"
total_tokens += 1
elif line.startswith('d:'):
final = {
"id": chunk_id,
"object": "chat.completion.chunk",
"created": created,
"model": json_data.get("model", "grok-2-1212"),
"choices": [],
"usage": {
"prompt_tokens": len(messages),
"completion_tokens": total_tokens,
"total_tokens": len(messages) + total_tokens
}
}
yield f"data: {json.dumps(final)}\n\n"
yield "data: [DONE]\n\n"
return
else:
yield f"data: [Unexpected status code: {request_ctx.status_code}]\n\n"
except Exception as e:
yield f"data: [Connection error: {str(e)}]\n\n"
class GroqAPI:
headers = {
'accept': '*/*',
'accept-language': 'en-US,en;q=0.9,ja;q=0.8',
'content-type': 'application/json',
'origin': 'https://ai-sdk-starter-groq.vercel.app',
'priority': 'u=1, i',
'referer': 'https://ai-sdk-starter-groq.vercel.app/',
'sec-ch-ua': '"Google Chrome";v="135", "Not-A.Brand";v="8", "Chromium";v="135"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36',
}
def __init__(self):
self.base_url = "https://ai-sdk-starter-groq.vercel.app/api/chat"
def get_model_list(self):
models = ['meta-llama/llama-4-scout-17b-16e-instruct', 'llama-3.1-8b-instant', 'llama-3.3-70b-versatile', 'deepseek-r1-distill-llama-70b']
return models
async def generate(self, json_data: dict):
messages = XaiAPI.convert(json_data["messages"])
request_data = {
"id": "".join(random.choices("0123456789abcdef", k=16)),
"messages": messages,
"selectedModel": json_data.get("model", "deepseek-r1-distill-llama-70b"),
}
chunk_id = "chipling-groq-" + "".join(random.choices("0123456789abcdef", k=32))
created = int(asyncio.get_event_loop().time())
total_tokens = 0
try:
async with httpx.AsyncClient(timeout=None) as client:
async with client.stream(
"POST",
"https://ai-sdk-starter-groq.vercel.app/api/chat",
headers=GroqAPI.headers,
json=request_data
) as request_ctx:
print(request_ctx.status_code)
if request_ctx.status_code == 200:
async for line in request_ctx.aiter_lines():
if line:
if line.startswith('0:'):
# Clean up the text and properly escape JSON characters
text = line[2:].strip()
if text.startswith('"') and text.endswith('"'):
text = text[1:-1]
text = text.replace('\\n', '\n').replace('\\', '')
response = {
"id": chunk_id,
"object": "chat.completion.chunk",
"created": created,
"model": json_data.get("model", "deepseek-r1-distill-llama-70b"),
"choices": [{
"index": 0,
"text": text,
"logprobs": None,
"finish_reason": None
}],
"usage": None
}
yield f"data: {json.dumps(response)}\n\n"
total_tokens += 1
elif line.startswith('d:'):
final = {
"id": chunk_id,
"object": "chat.completion.chunk",
"created": created,
"model": json_data.get("model", "deepseek-r1-distill-llama-70b"),
"choices": [],
"usage": {
"prompt_tokens": len(messages),
"completion_tokens": total_tokens,
"total_tokens": len(messages) + total_tokens
}
}
yield f"data: {json.dumps(final)}\n\n"
yield "data: [DONE]\n\n"
return
else:
yield f"data: [Unexpected status code: {request_ctx.status_code}]\n\n"
except Exception as e:
yield f"data: [Connection error: {str(e)}]\n\n"
class DeepinfraAPI:
headers = {
'accept': '*/*',
'accept-language': 'en-US,en;q=0.9,ja;q=0.8',
'content-type': 'application/json',
'origin': 'https://ai-sdk-starter-deepinfra.vercel.app',
'priority': 'u=1, i',
'referer': 'https://ai-sdk-starter-deepinfra.vercel.app/',
'sec-ch-ua': '"Google Chrome";v="135", "Not-A.Brand";v="8", "Chromium";v="135"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36'
}
def __init__(self):
self.base_url = "https://ai-sdk-starter-deepinfra.vercel.app/api/chat"
def get_model_list(self):
models = ["deepseek-ai/DeepSeek-R1", "meta-llama/Llama-3.3-70B-Instruct-Turbo", "Qwen/Qwen2.5-72B-Instruct"]
return models
async def generate(self, json_data: dict):
messages = XaiAPI.convert(json_data["messages"])
request_data = {
"id": "".join(random.choices("0123456789abcdef", k=16)),
"messages": messages,
"selectedModel": json_data.get("model"),
}
chunk_id = "chipling-deepinfra-" + "".join(random.choices("0123456789abcdef", k=32))
created = int(asyncio.get_event_loop().time())
total_tokens = 0
try:
async with httpx.AsyncClient(timeout=None) as client:
async with client.stream(
"POST",
self.base_url,
headers=DeepinfraAPI.headers,
json=request_data
) as request_ctx:
if request_ctx.status_code == 200:
async for line in request_ctx.aiter_lines():
if line:
if line.startswith('0:'):
text = line[2:].strip()
if text.startswith('"') and text.endswith('"'):
text = text[1:-1]
text = text.replace('\\n', '\n').replace('\\', '')
response = {
"id": chunk_id,
"object": "chat.completion.chunk",
"created": created,
"model": json_data.get("model", "deepseek-ai/DeepSeek-R1"),
"choices": [{
"index": 0,
"text": text,
"logprobs": None,
"finish_reason": None
}],
"usage": None
}
yield f"data: {json.dumps(response)}\n\n"
total_tokens += 1
elif line.startswith('d:'):
final = {
"id": chunk_id,
"object": "chat.completion.chunk",
"created": created,
"model": json_data.get("model", "deepseek-ai/DeepSeek-R1"),
"choices": [],
"usage": {
"prompt_tokens": len(messages),
"completion_tokens": total_tokens,
"total_tokens": len(messages) + total_tokens
}
}
yield f"data: {json.dumps(final)}\n\n"
yield "data: [DONE]\n\n"
return
else:
yield f"data: [Unexpected status code: {request_ctx.status_code}]\n\n"
except Exception as e:
yield f"data: [Connection error: {str(e)}]\n\n"