Spaces:
Running
Running
import openai | |
import logging | |
import os | |
logger = logging.getLogger("llm") | |
logging.basicConfig( | |
format="%(asctime)s %(levelname)-8s %(message)s", | |
level=logging.INFO, | |
datefmt="%Y-%m-%d %H:%M:%S", | |
) | |
MISTRAL_KEY=os.getenv('MISTRAL_API_KEY') | |
MISTRAL_URL="https://api.mistral.ai/v1" | |
OPENROUTER_KEY = os.getenv('OPENROUTER_API_KEY') | |
OPENROUTER_URL = "https://openrouter.ai/api/v1" | |
model_creds = { | |
'qwen2.5-vl-72b-instruct': { | |
"url": OPENROUTER_URL, | |
"key": OPENROUTER_KEY, | |
"model": "qwen/qwen2.5-vl-72b-instruct:free" | |
}, | |
'deepseek_v3': { | |
"url": OPENROUTER_URL, | |
"key": OPENROUTER_KEY, | |
"model": "deepseek/deepseek-chat:free" | |
}, | |
'llama-3.3-70b': { | |
"url": OPENROUTER_URL, | |
"key": OPENROUTER_KEY, | |
"model": "meta-llama/llama-3.3-70b-instruct:free" | |
}, | |
'mistral': { | |
"url": MISTRAL_URL, | |
"key": MISTRAL_KEY, | |
"model": "mistral-small-latest" | |
} | |
} | |
class LLM: | |
def __init__(self, model): | |
self.url = model_creds[model]["url"] | |
self.key = model_creds[model]["key"] | |
self.client = openai.OpenAI(api_key=self.key, base_url=self.url) | |
self.model = model_creds[model]["model"] | |
def chat(self, messages, temperature=0): | |
logger.info("LLM call") | |
response = self.client.chat.completions.create( | |
messages=messages, | |
model=self.model, | |
temperature=temperature | |
) | |
logger.info("LLM call completed") | |
return response | |