File size: 1,624 Bytes
9de1f87
 
790e088
9de1f87
 
 
 
 
 
 
 
790e088
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9de1f87
 
790e088
 
 
9de1f87
 
790e088
9de1f87
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import openai
import logging
import os

logger = logging.getLogger("llm")
logging.basicConfig(
    format="%(asctime)s %(levelname)-8s %(message)s",
    level=logging.INFO,
    datefmt="%Y-%m-%d %H:%M:%S",
)

MISTRAL_KEY=os.getenv('MISTRAL_API_KEY')
MISTRAL_URL="https://api.mistral.ai/v1"

OPENROUTER_KEY = os.getenv('OPENROUTER_API_KEY')
OPENROUTER_URL = "https://openrouter.ai/api/v1"

model_creds = {
    'qwen2.5-vl-72b-instruct': {
        "url": OPENROUTER_URL,
        "key": OPENROUTER_KEY,
        "model": "qwen/qwen2.5-vl-72b-instruct:free"
    },
    'deepseek_v3': {
        "url": OPENROUTER_URL,
        "key": OPENROUTER_KEY,
        "model": "deepseek/deepseek-chat:free"
    },
    'llama-3.3-70b': {
        "url": OPENROUTER_URL,
        "key": OPENROUTER_KEY,
        "model": "meta-llama/llama-3.3-70b-instruct:free"
    },
    'mistral': {
        "url": MISTRAL_URL,
        "key": MISTRAL_KEY,
        "model": "mistral-small-latest"
    }
}



class LLM:
    def __init__(self, model):
        self.url = model_creds[model]["url"]
        self.key = model_creds[model]["key"]
        self.client = openai.OpenAI(api_key=self.key, base_url=self.url)

        self.model = model_creds[model]["model"]

    def chat(self, messages, temperature=0):
        logger.info("LLM call")

        response = self.client.chat.completions.create(
                messages=messages,
                model=self.model,
                temperature=temperature
            )
        
        logger.info("LLM call completed")

        return response