MilanM commited on
Commit
54cbccc
Β·
verified Β·
1 Parent(s): 39f3acd

Update app_chat.py

Browse files
Files changed (1) hide show
  1. app_chat.py +8 -1
app_chat.py CHANGED
@@ -56,6 +56,7 @@ if 'current_page' not in st.session_state:
56
  def initialize_session_state():
57
  if 'chat_history' not in st.session_state:
58
  st.session_state.chat_history = []
 
59
 
60
  def setup_client():
61
  credentials = Credentials(
@@ -100,6 +101,11 @@ def generate_response(watsonx_llm, prompt_data, params):
100
  for chunk in generated_response:
101
  yield chunk
102
 
 
 
 
 
 
103
  def chat_interface():
104
  st.subheader("Jimmy")
105
 
@@ -140,7 +146,8 @@ def chat_interface():
140
 
141
  # Generate and stream response
142
  with st.chat_message("Jimmy", avatar="πŸ˜’"):
143
- stream = generate_response(watsonx_llm, prompt_data, params)
 
144
  response = st.write_stream(stream)
145
 
146
  # Add AI response to chat history
 
56
  def initialize_session_state():
57
  if 'chat_history' not in st.session_state:
58
  st.session_state.chat_history = []
59
+ st.session_state.chat_history.append({"role": "system", "content": genparam.SYSTEM_PROMPT})
60
 
61
  def setup_client():
62
  credentials = Credentials(
 
101
  for chunk in generated_response:
102
  yield chunk
103
 
104
+ def generate_chat_response(watsonx_llm, prompt_data, params):
105
+ generated_response = watsonx_llm.chat_stream(messages=chat_history, params=params)
106
+ for chunk in generated_response:
107
+ yield chunk
108
+
109
  def chat_interface():
110
  st.subheader("Jimmy")
111
 
 
146
 
147
  # Generate and stream response
148
  with st.chat_message("Jimmy", avatar="πŸ˜’"):
149
+ # stream = generate_response(watsonx_llm, prompt_data, params)
150
+ stream = generate_chat_response(watsonx_llm, chat_history, params)
151
  response = st.write_stream(stream)
152
 
153
  # Add AI response to chat history