zoya23 commited on
Commit
34cb346
Β·
verified Β·
1 Parent(s): b40112c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -8
app.py CHANGED
@@ -1,6 +1,8 @@
1
  import streamlit as st
2
  from datasets import load_dataset
3
- from transformers import pipeline
 
 
4
  from langchain.prompts.few_shot import FewShotChatMessagePromptTemplate
5
  from langchain.prompts.example_selector import LengthBasedExampleSelector
6
 
@@ -18,8 +20,8 @@ def load_examples():
18
 
19
  examples = load_examples()
20
 
21
- # Set up the summarization model
22
- summarizer = pipeline("summarization", model="google/pegasus-xsum")
23
 
24
  # Few-shot prompt template
25
  example_prompt = FewShotChatMessagePromptTemplate.from_examples(
@@ -31,21 +33,24 @@ example_prompt = FewShotChatMessagePromptTemplate.from_examples(
31
  )
32
 
33
  # Streamlit UI
34
- st.title("πŸ’¬ Dialogue Summarizer using Few-Shot Prompt + T5")
35
  input_text = st.text_area("πŸ“ Paste your conversation:")
36
 
37
  if st.button("Generate Summary"):
38
  if input_text.strip():
39
- # Create prompt
40
  messages = example_prompt.format_messages(input=input_text)
41
 
42
  with st.expander("πŸ“‹ Generated Prompt"):
43
  for msg in messages:
44
  st.markdown(f"**{msg.type.upper()}**:\n```\n{msg.content}\n```")
45
 
46
- # Generate summary using model
47
- full_prompt = "\n".join([m.content for m in messages if m.type == "human"])
48
- summary = summarizer("summarize: " + input_text, max_length=80, min_length=15, do_sample=False)[0]['summary_text']
 
 
 
49
  st.success("βœ… Summary:")
50
  st.write(summary)
51
  else:
 
1
  import streamlit as st
2
  from datasets import load_dataset
3
+ from langchain.chains import LLMChain
4
+ from langchain.llms import HuggingFaceHub
5
+ from langchain.prompts import PromptTemplate
6
  from langchain.prompts.few_shot import FewShotChatMessagePromptTemplate
7
  from langchain.prompts.example_selector import LengthBasedExampleSelector
8
 
 
20
 
21
  examples = load_examples()
22
 
23
+ # Set up the HuggingFaceHub model (T5)
24
+ llm = HuggingFaceHub(repo_id="google/pegasus-xsum", model_kwargs={"temperature": 0.7})
25
 
26
  # Few-shot prompt template
27
  example_prompt = FewShotChatMessagePromptTemplate.from_examples(
 
33
  )
34
 
35
  # Streamlit UI
36
+ st.title("πŸ’¬ Dialogue Summarizer using Few-Shot Prompt + T5 (via Langchain)")
37
  input_text = st.text_area("πŸ“ Paste your conversation:")
38
 
39
  if st.button("Generate Summary"):
40
  if input_text.strip():
41
+ # Create prompt using FewShotChatMessagePromptTemplate
42
  messages = example_prompt.format_messages(input=input_text)
43
 
44
  with st.expander("πŸ“‹ Generated Prompt"):
45
  for msg in messages:
46
  st.markdown(f"**{msg.type.upper()}**:\n```\n{msg.content}\n```")
47
 
48
+ # Create the prompt chain
49
+ prompt_template = PromptTemplate(input_variables=["input"], template="{input}")
50
+ chain = LLMChain(llm=llm, prompt=prompt_template)
51
+
52
+ # Get the summary from the model
53
+ summary = chain.run(input_text)
54
  st.success("βœ… Summary:")
55
  st.write(summary)
56
  else: