File size: 7,365 Bytes
811bed9
5e1d6fb
811bed9
5e1d6fb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22c1c37
5e1d6fb
 
 
 
 
 
 
811bed9
 
 
 
 
5e1d6fb
 
811bed9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5e1d6fb
 
811bed9
5e1d6fb
 
 
 
 
 
811bed9
 
5e1d6fb
 
 
811bed9
 
5e1d6fb
 
 
 
811bed9
5e1d6fb
 
 
811bed9
5e1d6fb
 
811bed9
5e1d6fb
811bed9
5e1d6fb
 
 
 
 
ee73624
811bed9
5e1d6fb
811bed9
5e1d6fb
811bed9
5e1d6fb
 
 
22c1c37
5e1d6fb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
811bed9
5e1d6fb
 
 
 
 
811bed9
5e1d6fb
 
811bed9
 
 
 
5e1d6fb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
811bed9
 
 
 
 
 
 
 
5e1d6fb
811bed9
5e1d6fb
 
811bed9
5e1d6fb
 
22c1c37
5e1d6fb
811bed9
 
 
 
 
 
 
 
 
 
 
5e1d6fb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
811bed9
 
5e1d6fb
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
import gradio as gr
import time

# Try importing OpenAI with error handling
try:
    from openai import OpenAI
except ImportError:
    print("OpenAI package not installed. Installing now...")
    import subprocess
    import sys
    subprocess.check_call([sys.executable, "-m", "pip", "install", "openai"])
    from openai import OpenAI

def test_api_connection(api_key):
    """Test the API connection and return a status message"""
    if not api_key or api_key.strip() == "":
        return False, "API key is required"
    
    try:
        client = OpenAI(
            base_url="https://openrouter.ai/api/v1",
            api_key=api_key.strip(),
        )
        # Simple request to test connection
        client.models.list()
        return True, "API connection successful"
    except Exception as e:
        return False, f"API connection failed: {str(e)}"

def generate_solution(api_key, problem_statement, progress=gr.Progress()):
    """Generate solution with progress updates"""
    progress(0, desc="Starting...")
    
    # Input validation
    if not api_key or api_key.strip() == "":
        return "Error: OpenRouter API key is required"
    
    if not problem_statement or problem_statement.strip() == "":
        return "Error: Please provide a problem statement"
    
    progress(0.1, desc="Validating API key...")
    # Test API connection first
    success, message = test_api_connection(api_key)
    if not success:
        return f"Error: {message}"
    
    progress(0.3, desc="Sending request to AI model...")
    try:
        client = OpenAI(
            base_url="https://openrouter.ai/api/v1",
            api_key=api_key.strip(),
        )
        
        progress(0.5, desc="Generating solution...")
        completion = client.chat.completions.create(
            model="open-r1/olympiccoder-7b:free",
            messages=[
                {
                    "role": "system",
                    "content": "You are a competitive programming expert. Provide a correct solution with clear reasoning. First output the code, then explain the approach."
                },
                {
                    "role": "user",
                    "content": f"Solve this problem:\n{problem_statement}"
                }
            ],
            temperature=0.3,
            max_tokens=2048
        )
        
        progress(0.8, desc="Processing response...")
        response = completion.choices[0].message.content
        
        progress(0.9, desc="Formatting output...")
        formatted_response = format_response(response)
        
        progress(1.0, desc="Done!")
        return formatted_response
    
    except Exception as e:
        error_message = str(e)
        print(f"Error occurred: {error_message}")
        return f"Error: {error_message}"

def format_response(response):
    """Format the AI response into markdown with code blocks"""
    if not response:
        return "Error: Received empty response from AI model"
    
    formatted = []
    lines = response.split('\n')
    
    # Find code blocks
    in_code = False
    code_blocks = []
    current_block = []
    
    for line in lines:
        if line.strip().startswith('```'):
            if in_code:
                code_blocks.append(current_block)
                current_block = []
            else:
                current_block = []
            in_code = not in_code
            continue
        
        if in_code:
            current_block.append(line)
    
    # Handle unclosed code block
    if in_code and current_block:
        code_blocks.append(current_block)
    
    # Process code blocks and explanations
    processed_response = response
    
    # Replace each code block with properly formatted markdown
    for i, block in enumerate(code_blocks):
        # Skip language identifier if present
        if block and any(block[0].strip().lower() == lang for lang in ["python", "java", "c++", "cpp", "javascript"]):
            code_content = '\n'.join(block[1:])
            lang = block[0].strip().lower()
        else:
            code_content = '\n'.join(block)
            lang = "python"  # Default to Python
        
        # Create search pattern for the original block
        if i < len(code_blocks) - 1 or not in_code:
            block_text = '```' + '\n' + '\n'.join(block) + '\n' + '```'
        else:
            block_text = '```' + '\n' + '\n'.join(block)
        
        # Replace with properly formatted block
        formatted_block = f"```{lang}\n{code_content}\n```"
        processed_response = processed_response.replace(block_text, formatted_block)
    
    return processed_response

# Define the Gradio interface
with gr.Blocks(title="Competitive Programming Assistant", theme=gr.themes.Soft()) as app:
    gr.Markdown("# 🏆 Competitive Programming Assistant")
    gr.Markdown("Powered by OlympicCoder-7B via OpenRouter AI")
    
    with gr.Row():
        with gr.Column():
            api_key = gr.Textbox(
                label="OpenRouter API Key",
                type="password",
                placeholder="Enter your API key here...",
                value=""
            )
            
            test_btn = gr.Button("Test API Connection", variant="secondary")
            api_status = gr.Textbox(label="API Status", interactive=False)
            
            test_btn.click(
                fn=test_api_connection,
                inputs=[api_key],
                outputs=[gr.Checkbox(visible=False), api_status]
            )
    
    with gr.Row():
        problem_input = gr.Textbox(
            label="Problem Statement",
            lines=5,
            placeholder="Paste your programming problem here..."
        )
    
    submit_btn = gr.Button("Generate Solution", variant="primary", size="lg")
    
    # Add a status indicator
    status = gr.Markdown("Ready to generate solutions")
    
    # Output area
    solution_output = gr.Markdown(label="Solution")
    
    # Example problems
    gr.Examples(
        examples=[
            [
                "Given an array of integers, find two numbers such that they add up to a specific target number."
            ],
            [
                "Implement a function to calculate the minimum number of operations required to transform one string into another using only insertion, deletion, and substitution."
            ]
        ],
        inputs=[problem_input]
    )
    
    # Set up event handlers with additional status updates
    def on_submit_click(api_key, problem):
        return "Generating solution... Please wait."
    
    submit_btn.click(
        fn=on_submit_click,
        inputs=[api_key, problem_input],
        outputs=[status],
        queue=False
    ).then(
        fn=generate_solution,
        inputs=[api_key, problem_input],
        outputs=[solution_output],
        queue=True
    ).success(
        fn=lambda: "Solution generated successfully!",
        inputs=None,
        outputs=[status]
    ).error(
        fn=lambda: "An error occurred. Please check your inputs and try again.",
        inputs=None,
        outputs=[status]
    )

if __name__ == "__main__":
    try:
        print("Starting Competitive Programming Assistant...")
        app.launch(server_port=7860, share=True, debug=True)
    except Exception as e:
        print(f"Error launching app: {str(e)}")