Spaces:
Sleeping
Sleeping
File size: 1,469 Bytes
e365a68 48c9787 e365a68 c6201fd e365a68 23f61d7 e365a68 2cfcc36 e365a68 2cfcc36 e365a68 2cfcc36 e365a68 23f61d7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
import os
import requests
import json
from dotenv import load_dotenv
# Load environment variables from .env file
load_dotenv(override=True)
# Retrieve API keys from environment
openai_api_key = os.getenv("OPENAI_API_KEY")
anthropic_api_key = os.getenv("ANTHROPIC_API_KEY")
# Warn if any API key is missing
if not openai_api_key:
print("❌ OpenAI API Key is missing!")
if not anthropic_api_key:
print("❌ Anthropic API Key is missing!")
# Model names
OPENAI_MODEL = "mistralai/Mistral-7B-Instruct-v0.3"
CLAUDE_MODEL = "mistralai/Mistral-7B-Instruct-v0.3"
def get_gpt_completion(prompt, system_message):
try:
response = openai.chat.completions.create(
model=OPENAI_MODEL,
messages=[
{"role": "system", "content": system_message},
{"role": "user", "content": prompt}
],
stream=False,
)
return response.choices[0].message.content
except Exception as e:
print(f"GPT error: {e}")
raise
# Call Anthropic's Claude model with prompt and system message
def get_claude_completion(prompt, system_message):
try:
result = claude.messages.create(
model=CLAUDE_MODEL,
max_tokens=2000,
system=system_message,
messages=[{"role": "user", "content": prompt}]
)
return result.content[0].text
except Exception as e:
print(f"Claude error: {e}")
raise
|