zoya23 commited on
Commit
e65d3fb
Β·
verified Β·
1 Parent(s): 8e16424

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -9
app.py CHANGED
@@ -2,8 +2,9 @@ import streamlit as st
2
  from datasets import load_dataset
3
  from langchain.llms import HuggingFaceEndpoint
4
  from langchain.prompts import FewShotChatMessagePromptTemplate, ChatPromptTemplate
 
5
 
6
- # Load dataset from HuggingFace
7
  @st.cache_data
8
  def load_examples(n=3):
9
  dataset = load_dataset("knkarthick/dialogsum", split="train[:20]")
@@ -11,22 +12,26 @@ def load_examples(n=3):
11
 
12
  examples = load_examples()
13
 
14
- # Format examples
15
  example_prompt = ChatPromptTemplate.from_messages([
16
  ("human", "Summarize the following dialog:\n\n{dialogue}"),
17
  ("ai", "{summary}")
18
  ])
19
 
20
- # Few-shot setup
21
  few_shot_prompt = FewShotChatMessagePromptTemplate(
22
- examples=examples,
23
  example_prompt=example_prompt,
24
- suffix="Summarize the following dialog:\n\n{dialogue}",
25
- input_variables=["dialogue"],
26
- prefix="The following are examples of dialogues and their summaries."
27
  )
28
 
29
- # Load HF summarizer model (Pegasus)
 
 
 
 
 
 
 
30
  llm = HuggingFaceEndpoint(
31
  repo_id="google/pegasus-xsum",
32
  task="text2text-generation",
@@ -41,7 +46,12 @@ st.markdown("Uses real examples from `dialogsum` to guide the summary output.")
41
  user_input = st.text_area("✍️ Paste your dialogue here:", height=200)
42
 
43
  if user_input:
44
- messages = few_shot_prompt.format_messages(dialogue=user_input)
 
 
 
45
  response = llm(messages)
 
 
46
  st.subheader("πŸ“Œ Summary:")
47
  st.write(response)
 
2
  from datasets import load_dataset
3
  from langchain.llms import HuggingFaceEndpoint
4
  from langchain.prompts import FewShotChatMessagePromptTemplate, ChatPromptTemplate
5
+ from langchain.schema.messages import SystemMessage
6
 
7
+ # Load few-shot examples from dialogsum
8
  @st.cache_data
9
  def load_examples(n=3):
10
  dataset = load_dataset("knkarthick/dialogsum", split="train[:20]")
 
12
 
13
  examples = load_examples()
14
 
15
+ # Template for each example
16
  example_prompt = ChatPromptTemplate.from_messages([
17
  ("human", "Summarize the following dialog:\n\n{dialogue}"),
18
  ("ai", "{summary}")
19
  ])
20
 
21
+ # Few-shot prompt template (no prefix/suffix here)
22
  few_shot_prompt = FewShotChatMessagePromptTemplate(
 
23
  example_prompt=example_prompt,
24
+ examples=examples
 
 
25
  )
26
 
27
+ # Now add intro system message + user input separately
28
+ final_prompt = ChatPromptTemplate.from_messages([
29
+ SystemMessage(content="The following are examples of dialogues and their summaries."),
30
+ *few_shot_prompt.messages,
31
+ ("human", "Summarize the following dialog:\n\n{dialogue}")
32
+ ])
33
+
34
+ # Load Pegasus model from HF inference API
35
  llm = HuggingFaceEndpoint(
36
  repo_id="google/pegasus-xsum",
37
  task="text2text-generation",
 
46
  user_input = st.text_area("✍️ Paste your dialogue here:", height=200)
47
 
48
  if user_input:
49
+ # Format messages
50
+ messages = final_prompt.format_messages(dialogue=user_input)
51
+
52
+ # Get response
53
  response = llm(messages)
54
+
55
+ # Output
56
  st.subheader("πŸ“Œ Summary:")
57
  st.write(response)