zoya23 commited on
Commit
8e16424
Β·
verified Β·
1 Parent(s): 2a5dd60

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -39
app.py CHANGED
@@ -1,48 +1,47 @@
1
  import streamlit as st
2
- from langchain.prompts import FewShotChatMessagePromptTemplate
3
- from langchain.llms import HuggingFaceHub
4
  from datasets import load_dataset
 
 
5
 
6
- # Load the dataset
7
- dataset = load_dataset("knkarthick/dialogsum", split="train[:1%]") # Load a small subset of the dataset for testing
 
 
 
8
 
9
- # Extract the input (dialogue) and output (summary) from the dataset
10
- examples = [
11
- {
12
- "input": dialogue['dialogue'], # Assuming 'dialogue' field contains the conversation text
13
- "output": dialogue['summary'] # Assuming 'summary' field contains the summary
14
- }
15
- for dialogue in dataset
16
- ]
17
 
18
- # Create FewShotChatMessagePromptTemplate without prefix and suffix
19
- example_prompt = FewShotChatMessagePromptTemplate(
 
 
 
 
 
 
20
  examples=examples,
21
- input_variables=["input"] # only the required fields
 
 
 
 
 
 
 
 
 
 
22
  )
23
 
24
  # Streamlit UI
25
- st.title("πŸ“ Text Summarizer using Few-Shot Prompt")
26
-
27
- input_text = st.text_area("Enter the text you want to summarize:")
28
-
29
- if st.button("Summarize"):
30
- if input_text.strip():
31
- # Format the prompt
32
- formatted_message = example_prompt.format(input=input_text)
33
-
34
- with st.expander("πŸ” Prompt Preview"):
35
- st.markdown(f"**Formatted Prompt:** {formatted_message}")
36
-
37
- # Load the model from Hugging Face (replace with your choice of model)
38
- model = HuggingFaceHub(
39
- repo_id="google/pegasus-xsum", # You can replace with any model available in Hugging Face
40
- model_kwargs={"temperature": 0.7}
41
- )
42
-
43
- # Generate the summary
44
- summary = model(formatted_message)
45
- st.success("βœ… Summary:")
46
- st.write(summary)
47
- else:
48
- st.warning("Please enter some text!")
 
1
  import streamlit as st
 
 
2
  from datasets import load_dataset
3
+ from langchain.llms import HuggingFaceEndpoint
4
+ from langchain.prompts import FewShotChatMessagePromptTemplate, ChatPromptTemplate
5
 
6
+ # Load dataset from HuggingFace
7
+ @st.cache_data
8
+ def load_examples(n=3):
9
+ dataset = load_dataset("knkarthick/dialogsum", split="train[:20]")
10
+ return [{"dialogue": row["dialogue"], "summary": row["summary"]} for row in dataset.select(range(n))]
11
 
12
+ examples = load_examples()
 
 
 
 
 
 
 
13
 
14
+ # Format examples
15
+ example_prompt = ChatPromptTemplate.from_messages([
16
+ ("human", "Summarize the following dialog:\n\n{dialogue}"),
17
+ ("ai", "{summary}")
18
+ ])
19
+
20
+ # Few-shot setup
21
+ few_shot_prompt = FewShotChatMessagePromptTemplate(
22
  examples=examples,
23
+ example_prompt=example_prompt,
24
+ suffix="Summarize the following dialog:\n\n{dialogue}",
25
+ input_variables=["dialogue"],
26
+ prefix="The following are examples of dialogues and their summaries."
27
+ )
28
+
29
+ # Load HF summarizer model (Pegasus)
30
+ llm = HuggingFaceEndpoint(
31
+ repo_id="google/pegasus-xsum",
32
+ task="text2text-generation",
33
+ model_kwargs={"temperature": 0.3, "max_new_tokens": 128}
34
  )
35
 
36
  # Streamlit UI
37
+ st.set_page_config(page_title="DialogSum Few-Shot Summarizer", page_icon="🧠")
38
+ st.title("🧠 Few-Shot Dialog Summarizer")
39
+ st.markdown("Uses real examples from `dialogsum` to guide the summary output.")
40
+
41
+ user_input = st.text_area("✍️ Paste your dialogue here:", height=200)
42
+
43
+ if user_input:
44
+ messages = few_shot_prompt.format_messages(dialogue=user_input)
45
+ response = llm(messages)
46
+ st.subheader("πŸ“Œ Summary:")
47
+ st.write(response)