diegocp01's picture
Update app.py
7fe04ec verified
# --- Helper Functions ---
# !!! WARNING: This function is adapted to the requested format and LOSES features !!!
def get_openai_response_simplified(prompt, model="GPT-4.1", system_prompt="", chat_history=None):
"""
Gets a response using the client.responses.create format.
NOTE: This is NON-STREAMING and handles history/system prompt crudely.
Advanced parameters (temp, top_p etc.) are NOT supported by this structure.
"""
print("--- Entering get_openai_response_simplified ---") # DEBUG
print(f"Received prompt: {prompt}") # DEBUG
print(f"Received model: {model}") # DEBUG
print(f"Received history (first few): {chat_history[:2] if chat_history else 'None'}") # DEBUG
today_day = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
formatted_input = ""
effective_system_prompt = f"Today's date is: {today_day}. {system_prompt}".strip()
if effective_system_prompt:
formatted_input += f"System: {effective_system_prompt}\n\n"
if chat_history:
for turn in chat_history:
if len(turn) == 2 and turn[0] is not None and turn[1] is not None:
formatted_input += f"User: {turn[0]}\nAssistant: {turn[1]}\n"
formatted_input += f"User: {prompt}\nAssistant:"
print(f"Formatted Input for API: \n{formatted_input}\n---") # DEBUG
try:
print("Attempting client.responses.create call...") # DEBUG
# Check if method seems to exist before calling (basic check)
if not (hasattr(client, 'responses') and hasattr(client.responses, 'create')):
print("ERROR: client.responses.create method NOT FOUND!") # DEBUG
return "Error: The API call structure 'client.responses.create' is not available in the OpenAI client."
response = client.responses.create(
model=model,
input=formatted_input
)
print(f"API call supposedly succeeded. Response object: {response}") # DEBUG
# Check if response has the expected attribute BEFORE accessing it
if hasattr(response, 'output_text'):
output = response.output_text
print(f"Extracted output_text: {output}") # DEBUG
print("--- Exiting get_openai_response_simplified (Success) ---") # DEBUG
return output
else:
print("ERROR: Response object does NOT have 'output_text' attribute.") # DEBUG
print("--- Exiting get_openai_response_simplified (Error) ---") # DEBUG
return "Error: API response format is unexpected (missing output_text)."
# Keep specific error handling
except openai.APIConnectionError as e:
print(f"ERROR caught in get_openai_response_simplified: APIConnectionError: {e}") # DEBUG
return f"Error: Could not connect to OpenAI API. {e}"
except openai.RateLimitError as e:
print(f"ERROR caught in get_openai_response_simplified: RateLimitError: {e}") # DEBUG
return f"Error: Rate limit exceeded. Please try again later. {e}"
except openai.AuthenticationError as e:
print(f"ERROR caught in get_openai_response_simplified: AuthenticationError: {e}") # DEBUG
return f"Error: Authentication failed. Check your API key. {e}"
except openai.APIStatusError as e:
print(f"ERROR caught in get_openai_response_simplified: APIStatusError: {e}") # DEBUG
return f"Error: OpenAI API returned an error (Status: {e.status_code}). {e}"
except AttributeError as e:
# This might catch the error if the initial check fails or response has wrong structure
print(f"ERROR caught in get_openai_response_simplified: AttributeError: {e}") # DEBUG
return f"Error: Problem with API call structure or response format. {e}"
except Exception as e:
print(f"ERROR caught in get_openai_response_simplified: Unexpected Exception: {e}") # DEBUG
return f"An unexpected error occurred: {e}"
# !!! WARNING: This update function is now NON-STREAMING !!!
def update_ui_simplified(message, chat_history, model, system_prompt, history_length):
"""Updates the Gradio UI WITHOUT streaming."""
print("\n--- Entering update_ui_simplified ---") # DEBUG
print(f"Received message: {message}") # DEBUG
print(f"Current chat_history length: {len(chat_history)}") # DEBUG
print(f"Requested history_length: {history_length}") # DEBUG
if not message:
print("Empty message received, returning.") # DEBUG
return "", chat_history # Return original history if message is empty
# Ensure history_length is an int for slicing
try:
hist_len_int = int(history_length)
except ValueError:
print(f"Warning: Invalid history_length '{history_length}', defaulting to {MAX_HISTORY_LENGTH}")
hist_len_int = MAX_HISTORY_LENGTH
# Keep only the specified length of history for the *next* call
history_for_api = chat_history[-hist_len_int:] if hist_len_int > 0 else []
print(f"History slice for API length: {len(history_for_api)}") # DEBUG
# Call the simplified, non-streaming function
bot_response = get_openai_response_simplified(
prompt=message,
model=model,
system_prompt=system_prompt,
chat_history=history_for_api # Pass the potentially trimmed history
)
print(f"Received from API helper: {bot_response}") # DEBUG
# Append the user message and the *complete* bot response
# Make sure chat_history is treated as a list
if chat_history is None:
chat_history = []
chat_history.append((message, bot_response))
print(f"Appended to chat_history. New length: {len(chat_history)}") # DEBUG
# Update UI only once with the full response
# Always display history based on the slider length for visibility
visible_history = chat_history[-hist_len_int:] if hist_len_int > 0 else []
print(f"Visible history length: {len(visible_history)}") # DEBUG
print("--- Exiting update_ui_simplified ---") # DEBUG
return "", visible_history # Clear input, return updated history
# --- Gradio Interface (Ensure Simplified Handlers are Used) ---
# ... (Keep the Gradio Blocks definition as in the previous example) ...
# --- Event Handlers (Check these carefully) ---
inputs_simplified = [
msg, chatbot, model_select, system_prompt_textbox,
history_length_slider # Make sure history_length_slider is included
]
outputs = [msg, chatbot]
send.click(
update_ui_simplified, # Ensure this is the correct function
inputs=inputs_simplified, # Ensure this list matches the function args
outputs=outputs,
queue=True
)
msg.submit(
update_ui_simplified, # Ensure this is the correct function
inputs=inputs_simplified, # Ensure this list matches the function args
outputs=outputs,
queue=True
)
clear.click(lambda: (None, []), None, outputs=[msg, chatbot], queue=False)
# ... (Rest of the Gradio code and launch) ...
if __name__ == "__main__":
# Make sure API key is loaded (add check if needed)
if not API_KEY:
print("FATAL ERROR: OPENAI_API_KEY is not set. The application cannot function.")
# Optionally, you could display this error in the Gradio UI as well
# and prevent the launch or disable input components.
else:
print("OpenAI API Key found.")
demo.queue()
demo.launch()