File size: 1,511 Bytes
1c2b077
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
from openai import OpenAI
from app.config import DEEPSEEK_API_KEY

def get_chat_completion(
    user_prompt: str,
    system_prompt: str = "You are a helpful AI assistant.",
    model: str = "deepseek/deepseek-chat-v3-0324:free",
    api_key: str = DEEPSEEK_API_KEY,
) -> str:
    """
    Get chat completion from DeepSeek model via OpenRouter API.
    
    Args:
        user_prompt: The user's input prompt
        system_prompt: The system role prompt (default is generic assistant)
        model: The model to use (default is DeepSeek-V2.5)
        api_key: API key for OpenRouter (default from config)
    
    Returns:
        The generated response from the model
    """
    client = OpenAI(
        base_url="https://openrouter.ai/api/v1",
        api_key=api_key,
    )
    
    headers = {
        "HTTP-Referer": 'https://huggingface.co./spaces/Nekoko/NekoAI-Lab',
        "X-Title": "Meme-Search"
    }

    completion = client.chat.completions.create(
        extra_headers=headers,
        model=model,
        messages=[
            {"role": "system", "content": system_prompt},
            {"role": "user", "content": user_prompt},
        ],
    )
    
    content =  completion.choices[0].message.content
    print('>>>> LLM Response', content)
    return content


# Example usage:
if __name__ == "__main__":
    response = get_chat_completion(
        user_prompt="What's the capital of France?",
        system_prompt="You are a knowledgeable geography assistant."
    )
    print(response)