CultriX commited on
Commit
69c7141
·
verified ·
1 Parent(s): 3c1c644

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -10
app.py CHANGED
@@ -311,6 +311,7 @@ def process_conversation_generator(task_message: str, api_key: str,
311
  Runs the multi-agent conversation in a background thread and yields conversation history updates
312
  as a tuple: (chat update, log state update).
313
  """
 
314
  def run_conversation():
315
  asyncio.run(multi_agent_conversation(task_message, log_queue, api_key, human_event, human_input_queue))
316
 
@@ -320,20 +321,18 @@ def process_conversation_generator(task_message: str, api_key: str,
320
  while conversation_thread.is_alive() or not log_queue.empty():
321
  try:
322
  msg = log_queue.get(timeout=0.1)
323
- if isinstance(msg, tuple):
324
- if msg[0] in ("update", "result"):
325
- chat_update = gr.update(value=convert_history(msg[1]), visible=True)
326
- log_text = conversation_to_text(msg[1])
327
- state_update = gr.update(value=log_text)
328
- yield (chat_update, state_update)
329
- else:
330
- # Optionally handle log messages.
331
- pass
332
  except queue.Empty:
333
  pass
334
  time.sleep(0.1)
335
 
336
- yield (gr.update(visible=True), gr.update())
337
 
338
  # -------------------- Multi-Agent Chat Function --------------------
339
  def multi_agent_chat(message: str, openai_api_key: str = None) -> Generator[Any, None, None]:
 
311
  Runs the multi-agent conversation in a background thread and yields conversation history updates
312
  as a tuple: (chat update, log state update).
313
  """
314
+ last_log_text = ""
315
  def run_conversation():
316
  asyncio.run(multi_agent_conversation(task_message, log_queue, api_key, human_event, human_input_queue))
317
 
 
321
  while conversation_thread.is_alive() or not log_queue.empty():
322
  try:
323
  msg = log_queue.get(timeout=0.1)
324
+ if isinstance(msg, tuple) and msg[0] in ("update", "result"):
325
+ chat_update = gr.update(value=convert_history(msg[1]), visible=True)
326
+ last_log_text = conversation_to_text(msg[1])
327
+ state_update = gr.update(value=last_log_text)
328
+ yield (chat_update, state_update)
329
+ else:
330
+ pass
 
 
331
  except queue.Empty:
332
  pass
333
  time.sleep(0.1)
334
 
335
+ yield (gr.update(visible=True), gr.update(value=last_log_text))
336
 
337
  # -------------------- Multi-Agent Chat Function --------------------
338
  def multi_agent_chat(message: str, openai_api_key: str = None) -> Generator[Any, None, None]: