Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -45,7 +45,9 @@ if user_input:
|
|
45 |
|
46 |
# Add formatted examples to the final prompt
|
47 |
formatted_prompt = final_prompt.format_messages(dialogue=user_input)
|
48 |
-
|
|
|
|
|
49 |
|
50 |
# Get response from model with correct explicit parameters
|
51 |
llm = HuggingFaceEndpoint(
|
@@ -54,7 +56,9 @@ if user_input:
|
|
54 |
temperature=0.3, # Explicitly passing temperature here
|
55 |
max_new_tokens=128 # Explicitly passing max_new_tokens here
|
56 |
)
|
57 |
-
|
|
|
|
|
58 |
|
59 |
# Output the summary
|
60 |
st.subheader("π Summary:")
|
|
|
45 |
|
46 |
# Add formatted examples to the final prompt
|
47 |
formatted_prompt = final_prompt.format_messages(dialogue=user_input)
|
48 |
+
|
49 |
+
# Convert the list of messages into a single string
|
50 |
+
prompt_string = "\n".join([msg["content"] for msg in formatted_prompt])
|
51 |
|
52 |
# Get response from model with correct explicit parameters
|
53 |
llm = HuggingFaceEndpoint(
|
|
|
56 |
temperature=0.3, # Explicitly passing temperature here
|
57 |
max_new_tokens=128 # Explicitly passing max_new_tokens here
|
58 |
)
|
59 |
+
|
60 |
+
# Run the LLM with the prompt string
|
61 |
+
response = llm(prompt_string)
|
62 |
|
63 |
# Output the summary
|
64 |
st.subheader("π Summary:")
|