File size: 7,411 Bytes
a47572b
16565e1
 
7fe04ec
16565e1
 
 
 
 
fd1f204
 
 
 
16565e1
fd1f204
16565e1
9109896
 
16565e1
9109896
 
16565e1
 
fd1f204
9109896
fd1f204
a47572b
9109896
fd1f204
 
 
 
 
 
16565e1
a47572b
16565e1
a47572b
fd1f204
 
 
 
 
 
 
 
 
 
 
 
 
 
a47572b
fd1f204
16565e1
a47572b
fd1f204
16565e1
9109896
fd1f204
16565e1
a47572b
fd1f204
16565e1
 
fd1f204
 
 
a47572b
fd1f204
16565e1
 
fd1f204
16565e1
 
 
fd1f204
 
 
 
 
16565e1
fd1f204
16565e1
 
fd1f204
 
 
 
 
 
 
16565e1
fd1f204
 
16565e1
 
 
 
 
 
 
a47572b
fd1f204
9109896
16565e1
fd1f204
 
 
16565e1
fd1f204
9109896
16565e1
 
fd1f204
 
 
16565e1
a47572b
fd1f204
 
 
 
16565e1
 
fd1f204
9109896
fd1f204
9109896
 
fd1f204
 
9109896
16565e1
a47572b
 
fd1f204
 
9109896
 
a47572b
9109896
a47572b
fd1f204
599d7c0
 
fd1f204
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
# --- Helper Functions ---

# !!! WARNING: This function is adapted to the requested format and LOSES features !!!
def get_openai_response_simplified(prompt, model="GPT-4.1", system_prompt="", chat_history=None):
    """
    Gets a response using the client.responses.create format.
    NOTE: This is NON-STREAMING and handles history/system prompt crudely.
    Advanced parameters (temp, top_p etc.) are NOT supported by this structure.
    """
    print("--- Entering get_openai_response_simplified ---") # DEBUG
    print(f"Received prompt: {prompt}") # DEBUG
    print(f"Received model: {model}") # DEBUG
    print(f"Received history (first few): {chat_history[:2] if chat_history else 'None'}") # DEBUG

    today_day = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
    formatted_input = ""
    effective_system_prompt = f"Today's date is: {today_day}. {system_prompt}".strip()
    if effective_system_prompt:
         formatted_input += f"System: {effective_system_prompt}\n\n"
    if chat_history:
        for turn in chat_history:
             if len(turn) == 2 and turn[0] is not None and turn[1] is not None:
                 formatted_input += f"User: {turn[0]}\nAssistant: {turn[1]}\n"
    formatted_input += f"User: {prompt}\nAssistant:"

    print(f"Formatted Input for API: \n{formatted_input}\n---") # DEBUG

    try:
        print("Attempting client.responses.create call...") # DEBUG
        # Check if method seems to exist before calling (basic check)
        if not (hasattr(client, 'responses') and hasattr(client.responses, 'create')):
             print("ERROR: client.responses.create method NOT FOUND!") # DEBUG
             return "Error: The API call structure 'client.responses.create' is not available in the OpenAI client."

        response = client.responses.create(
            model=model,
            input=formatted_input
        )
        print(f"API call supposedly succeeded. Response object: {response}") # DEBUG

        # Check if response has the expected attribute BEFORE accessing it
        if hasattr(response, 'output_text'):
            output = response.output_text
            print(f"Extracted output_text: {output}") # DEBUG
            print("--- Exiting get_openai_response_simplified (Success) ---") # DEBUG
            return output
        else:
            print("ERROR: Response object does NOT have 'output_text' attribute.") # DEBUG
            print("--- Exiting get_openai_response_simplified (Error) ---") # DEBUG
            return "Error: API response format is unexpected (missing output_text)."

    # Keep specific error handling
    except openai.APIConnectionError as e:
        print(f"ERROR caught in get_openai_response_simplified: APIConnectionError: {e}") # DEBUG
        return f"Error: Could not connect to OpenAI API. {e}"
    except openai.RateLimitError as e:
        print(f"ERROR caught in get_openai_response_simplified: RateLimitError: {e}") # DEBUG
        return f"Error: Rate limit exceeded. Please try again later. {e}"
    except openai.AuthenticationError as e:
        print(f"ERROR caught in get_openai_response_simplified: AuthenticationError: {e}") # DEBUG
        return f"Error: Authentication failed. Check your API key. {e}"
    except openai.APIStatusError as e:
        print(f"ERROR caught in get_openai_response_simplified: APIStatusError: {e}") # DEBUG
        return f"Error: OpenAI API returned an error (Status: {e.status_code}). {e}"
    except AttributeError as e:
        # This might catch the error if the initial check fails or response has wrong structure
        print(f"ERROR caught in get_openai_response_simplified: AttributeError: {e}") # DEBUG
        return f"Error: Problem with API call structure or response format. {e}"
    except Exception as e:
        print(f"ERROR caught in get_openai_response_simplified: Unexpected Exception: {e}") # DEBUG
        return f"An unexpected error occurred: {e}"


# !!! WARNING: This update function is now NON-STREAMING !!!
def update_ui_simplified(message, chat_history, model, system_prompt, history_length):
    """Updates the Gradio UI WITHOUT streaming."""
    print("\n--- Entering update_ui_simplified ---") # DEBUG
    print(f"Received message: {message}") # DEBUG
    print(f"Current chat_history length: {len(chat_history)}") # DEBUG
    print(f"Requested history_length: {history_length}") # DEBUG

    if not message:
        print("Empty message received, returning.") # DEBUG
        return "", chat_history # Return original history if message is empty

    # Ensure history_length is an int for slicing
    try:
        hist_len_int = int(history_length)
    except ValueError:
        print(f"Warning: Invalid history_length '{history_length}', defaulting to {MAX_HISTORY_LENGTH}")
        hist_len_int = MAX_HISTORY_LENGTH

    # Keep only the specified length of history for the *next* call
    history_for_api = chat_history[-hist_len_int:] if hist_len_int > 0 else []
    print(f"History slice for API length: {len(history_for_api)}") # DEBUG

    # Call the simplified, non-streaming function
    bot_response = get_openai_response_simplified(
        prompt=message,
        model=model,
        system_prompt=system_prompt,
        chat_history=history_for_api # Pass the potentially trimmed history
    )
    print(f"Received from API helper: {bot_response}") # DEBUG

    # Append the user message and the *complete* bot response
    # Make sure chat_history is treated as a list
    if chat_history is None:
       chat_history = []
    chat_history.append((message, bot_response))
    print(f"Appended to chat_history. New length: {len(chat_history)}") # DEBUG

    # Update UI only once with the full response
    # Always display history based on the slider length for visibility
    visible_history = chat_history[-hist_len_int:] if hist_len_int > 0 else []
    print(f"Visible history length: {len(visible_history)}") # DEBUG
    print("--- Exiting update_ui_simplified ---") # DEBUG
    return "", visible_history # Clear input, return updated history

# --- Gradio Interface (Ensure Simplified Handlers are Used) ---
# ... (Keep the Gradio Blocks definition as in the previous example) ...

# --- Event Handlers (Check these carefully) ---
    inputs_simplified = [
        msg, chatbot, model_select, system_prompt_textbox,
        history_length_slider # Make sure history_length_slider is included
    ]
    outputs = [msg, chatbot]

    send.click(
        update_ui_simplified, # Ensure this is the correct function
        inputs=inputs_simplified, # Ensure this list matches the function args
        outputs=outputs,
        queue=True
    )
    msg.submit(
        update_ui_simplified, # Ensure this is the correct function
        inputs=inputs_simplified, # Ensure this list matches the function args
        outputs=outputs,
        queue=True
    )
    clear.click(lambda: (None, []), None, outputs=[msg, chatbot], queue=False)

# ... (Rest of the Gradio code and launch) ...

if __name__ == "__main__":
    # Make sure API key is loaded (add check if needed)
    if not API_KEY:
        print("FATAL ERROR: OPENAI_API_KEY is not set. The application cannot function.")
        # Optionally, you could display this error in the Gradio UI as well
        # and prevent the launch or disable input components.
    else:
        print("OpenAI API Key found.")
        demo.queue()
        demo.launch()