File size: 3,460 Bytes
c85e9e7
 
 
 
 
ecbd6bb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c85e9e7
ecbd6bb
c85e9e7
 
 
ecbd6bb
 
 
 
 
c85e9e7
ecbd6bb
c85e9e7
 
ecbd6bb
 
c85e9e7
 
ecbd6bb
c85e9e7
 
 
 
 
 
 
 
 
ecbd6bb
c85e9e7
 
 
ecbd6bb
 
 
 
 
c85e9e7
 
 
 
 
 
 
 
 
ecbd6bb
 
 
 
 
 
 
 
 
c85e9e7
ecbd6bb
 
 
 
 
c85e9e7
 
 
 
 
ecbd6bb
c85e9e7
 
ecbd6bb
c85e9e7
 
ecbd6bb
 
c85e9e7
 
ecbd6bb
c85e9e7
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
import openai
import gradio as gr
from functools import partial

### Utility Functions
def validate_api_key(api_key):
    """
    Validates the OpenAI API key by making a test request.
    Returns True if valid, False otherwise.
    """
    if not api_key:
        return False
    
    try:
        client = openai.OpenAI(api_key=api_key)
        # Make a minimal test request
        client.chat.completions.create(
            model="gpt-3.5-turbo",
            messages=[{"role": "user", "content": "test"}],
            max_tokens=5
        )
        return True
    except Exception:
        return False

def call_openai_api(user_message, api_key, model, temperature=0.1, max_tokens=1024):
    """
    Calls the OpenAI API and returns the response content.
    Parameters:
    - user_message (str): The input message to process
    - api_key (str): OpenAI API key
    - model (str): Model name (e.g., 'gpt-3.5-turbo', 'gpt-4')
    - temperature (float): Sampling temperature for the API
    - max_tokens (int): Maximum number of tokens to generate
    Returns:
    - str: The model's response content
    """
    try:
        client = openai.OpenAI(api_key=api_key)
        response = client.chat.completions.create(
            model=model,
            messages=[
                {'role': 'system', 'content': "You are a helpful assistant."},
                {'role': 'user', 'content': f"#### {user_message} ####"}
            ],
            temperature=temperature,
            max_tokens=max_tokens
        )
        return response.choices[0].message.content
    except Exception as e:
        return f"Error: {str(e)}"

def inference(user_message, api_key):
    """
    Generates responses from multiple models for comparison.
    """
    if not validate_api_key(api_key):
        return "Invalid API Key. Please enter a valid OpenAI API key.", "Invalid API Key. Please enter a valid OpenAI API key."
    
    gpt_35_response = call_openai_api(user_message, api_key, model='gpt-3.5-turbo', temperature=0.1)
    gpt_4_response = call_openai_api(user_message, api_key, model='gpt-4', temperature=0.1)
    return gpt_35_response, gpt_4_response

### Gradio Interface
def launch_demo():
    """
    Launches the Gradio interface for the application.
    """
    with gr.Blocks() as demo:
        gr.Markdown('<center><h1>OpenAI LLM Explorer</h1></center>')
        gr.Markdown("Enter your OpenAI API key and prompt to compare results from multiple OpenAI models.")
        
        # API Key input
        api_key = gr.Textbox(
            label='OpenAI API Key',
            placeholder="Enter your OpenAI API key...",
            type="password"  # Masks the API key
        )
        
        # Input prompt
        prompt = gr.Textbox(
            label='Prompt',
            lines=5,
            placeholder="Enter your prompt here..."
        )
        
        # Outputs for each model
        with gr.Row():
            gpt_35_output = gr.Textbox(label='GPT-3.5-Turbo Output', lines=6)
            gpt_4_output = gr.Textbox(label='GPT-4 Output', lines=6)
        
        # Button for generating outputs
        generate_btn = gr.Button("Generate", variant='primary')
        
        # Bind inputs and outputs
        generate_btn.click(
            inference,
            inputs=[prompt, api_key],
            outputs=[gpt_35_output, gpt_4_output]
        )
        
    demo.launch(share=True)

if __name__ == "__main__":
    launch_demo()