fix
Browse files
app.py
CHANGED
@@ -125,7 +125,7 @@ def chat_with_model(messages):
|
|
125 |
output_text += token_str
|
126 |
|
127 |
# Early stopping if user reappears
|
128 |
-
if "\nUser
|
129 |
output_text = output_text.split("\nUser")[0].rstrip()
|
130 |
messages[-1]["content"] = output_text
|
131 |
break
|
@@ -145,7 +145,7 @@ def chat_with_model(messages):
|
|
145 |
messages[-1]["content"] = output_text
|
146 |
|
147 |
# Wait for thread to finish
|
148 |
-
current_model.to("cpu")
|
149 |
torch.cuda.empty_cache()
|
150 |
|
151 |
messages[-1]["content"] = output_text
|
|
|
125 |
output_text += token_str
|
126 |
|
127 |
# Early stopping if user reappears
|
128 |
+
if "\nUser" in output_text:
|
129 |
output_text = output_text.split("\nUser")[0].rstrip()
|
130 |
messages[-1]["content"] = output_text
|
131 |
break
|
|
|
145 |
messages[-1]["content"] = output_text
|
146 |
|
147 |
# Wait for thread to finish
|
148 |
+
# current_model.to("cpu")
|
149 |
torch.cuda.empty_cache()
|
150 |
|
151 |
messages[-1]["content"] = output_text
|