zoya23 commited on
Commit
302df45
Β·
verified Β·
1 Parent(s): a8c4c26

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -15
app.py CHANGED
@@ -27,18 +27,10 @@ few_shot_prompt = FewShotChatMessagePromptTemplate(
27
  # Now add intro system message + user input separately
28
  final_prompt = ChatPromptTemplate.from_messages([
29
  SystemMessage(content="The following are examples of dialogues and their summaries."),
30
- *few_shot_prompt.get_messages(), # Use get_messages() instead of .messages
31
  ("human", "Summarize the following dialog:\n\n{dialogue}")
32
  ])
33
 
34
- # Load Pegasus model from HF inference API
35
- llm = HuggingFaceEndpoint(
36
- repo_id="google/pegasus-xsum",
37
- task="text2text-generation",
38
- model_kwargs={"temperature": 0.3, "max_new_tokens": 128}
39
- )
40
-
41
- # Streamlit UI
42
  st.set_page_config(page_title="DialogSum Few-Shot Summarizer", page_icon="🧠")
43
  st.title("🧠 Few-Shot Dialog Summarizer")
44
  st.markdown("Uses real examples from `dialogsum` to guide the summary output.")
@@ -46,12 +38,21 @@ st.markdown("Uses real examples from `dialogsum` to guide the summary output.")
46
  user_input = st.text_area("✍️ Paste your dialogue here:", height=200)
47
 
48
  if user_input:
49
- # Format messages
50
- messages = final_prompt.format_messages(dialogue=user_input)
51
-
52
- # Get response
53
- response = llm(messages)
54
 
55
- # Output
 
 
 
 
 
 
 
 
 
 
 
 
56
  st.subheader("πŸ“Œ Summary:")
57
  st.write(response)
 
27
  # Now add intro system message + user input separately
28
  final_prompt = ChatPromptTemplate.from_messages([
29
  SystemMessage(content="The following are examples of dialogues and their summaries."),
 
30
  ("human", "Summarize the following dialog:\n\n{dialogue}")
31
  ])
32
 
33
+ # Streamlit UI setup
 
 
 
 
 
 
 
34
  st.set_page_config(page_title="DialogSum Few-Shot Summarizer", page_icon="🧠")
35
  st.title("🧠 Few-Shot Dialog Summarizer")
36
  st.markdown("Uses real examples from `dialogsum` to guide the summary output.")
 
38
  user_input = st.text_area("✍️ Paste your dialogue here:", height=200)
39
 
40
  if user_input:
41
+ # Prepare messages for the final prompt (include few-shot examples here directly)
42
+ formatted_prompt = few_shot_prompt.format_messages(dialogue=user_input)
 
 
 
43
 
44
+ # Add formatted examples to the final prompt
45
+ formatted_prompt = final_prompt.format_messages(dialogue=user_input)
46
+ formatted_prompt = formatted_prompt + formatted_prompt
47
+
48
+ # Get response from model
49
+ llm = HuggingFaceEndpoint(
50
+ repo_id="google/pegasus-xsum",
51
+ task="text2text-generation",
52
+ model_kwargs={"temperature": 0.3, "max_new_tokens": 128}
53
+ )
54
+ response = llm(formatted_prompt)
55
+
56
+ # Output the summary
57
  st.subheader("πŸ“Œ Summary:")
58
  st.write(response)