zoya23 commited on
Commit
793bade
Β·
verified Β·
1 Parent(s): 52d4ec9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -27
app.py CHANGED
@@ -1,41 +1,52 @@
1
  import streamlit as st
 
 
2
  from langchain.prompts.few_shot import FewShotChatMessagePromptTemplate
3
  from langchain.prompts.example_selector import LengthBasedExampleSelector
4
 
5
- # Example dialogues + summaries
6
- examples = [
7
- {
8
- "input": "Doctor: What symptoms are you experiencing?\nPatient: I have a sore throat and runny nose.",
9
- "output": "The patient reports sore throat and runny nose."
10
- },
11
- {
12
- "input": "Patient: I've been feeling dizzy since morning.\nDoctor: Did you eat anything unusual?\nPatient: No, just normal food.",
13
- "output": "The patient has dizziness but no unusual food intake."
14
- }
15
- ]
16
-
17
- # Create Few-Shot Prompt Template
18
- prompt = FewShotChatMessagePromptTemplate.from_examples(
 
 
 
 
 
19
  examples=examples,
20
- example_selector=LengthBasedExampleSelector(examples=examples, max_length=100),
21
  input_variables=["input"],
22
- prefix="You are a medical assistant that summarizes doctor-patient conversations. Examples:",
23
- suffix="Now summarize this conversation:\n{input}"
24
  )
25
 
26
  # Streamlit UI
27
- st.title("πŸ’¬ Few-Shot Summarizer (No Model)")
28
-
29
- input_text = st.text_area("Paste your medical conversation here:")
30
 
31
- if st.button("Generate Prompt"):
32
  if input_text.strip():
33
- messages = prompt.format_messages(input=input_text)
 
34
 
35
- st.subheader("πŸ“‹ Prompt Generated:")
36
- for msg in messages:
37
- st.markdown(f"**{msg.type.upper()}**:\n```\n{msg.content}\n```")
38
 
39
- st.info("⚠️ This is just a prompt, not an actual summary.")
 
 
 
 
40
  else:
41
- st.warning("Please enter some conversation text.")
 
1
  import streamlit as st
2
+ from datasets import load_dataset
3
+ from transformers import pipeline
4
  from langchain.prompts.few_shot import FewShotChatMessagePromptTemplate
5
  from langchain.prompts.example_selector import LengthBasedExampleSelector
6
 
7
+ # Load dataset (small subset)
8
+ @st.cache_data
9
+ def load_examples():
10
+ dataset = load_dataset("knkarthick/dialogsum", split="train[:5]") # Take only 5 for speed
11
+ examples = []
12
+ for example in dataset:
13
+ examples.append({
14
+ "input": example["dialogue"],
15
+ "output": example["summary"]
16
+ })
17
+ return examples
18
+
19
+ examples = load_examples()
20
+
21
+ # Set up the summarization model
22
+ summarizer = pipeline("summarization", model="t5-small")
23
+
24
+ # Few-shot prompt template
25
+ example_prompt = FewShotChatMessagePromptTemplate.from_examples(
26
  examples=examples,
27
+ example_selector=LengthBasedExampleSelector(examples=examples, max_length=1000),
28
  input_variables=["input"],
29
+ prefix="You are a helpful assistant that summarizes dialogues. Examples:",
30
+ suffix="Now summarize this:\n{input}"
31
  )
32
 
33
  # Streamlit UI
34
+ st.title("πŸ’¬ Dialogue Summarizer using Few-Shot Prompt + T5")
35
+ input_text = st.text_area("πŸ“ Paste your conversation:")
 
36
 
37
+ if st.button("Generate Summary"):
38
  if input_text.strip():
39
+ # Create prompt
40
+ messages = example_prompt.format_messages(input=input_text)
41
 
42
+ with st.expander("πŸ“‹ Generated Prompt"):
43
+ for msg in messages:
44
+ st.markdown(f"**{msg.type.upper()}**:\n```\n{msg.content}\n```")
45
 
46
+ # Generate summary using model
47
+ full_prompt = "\n".join([m.content for m in messages if m.type == "human"])
48
+ summary = summarizer("summarize: " + input_text, max_length=80, min_length=15, do_sample=False)[0]['summary_text']
49
+ st.success("βœ… Summary:")
50
+ st.write(summary)
51
  else:
52
+ st.warning("Please enter some text.")