zoya23 commited on
Commit
43d6abb
Β·
verified Β·
1 Parent(s): dda3762

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -20
app.py CHANGED
@@ -1,16 +1,14 @@
1
  import streamlit as st
2
  from langchain.prompts import FewShotChatMessagePromptTemplate
3
  from langchain.prompts.example_selector import LengthBasedExampleSelector
4
- from langchain_huggingface import HuggingFaceEndpoint, HuggingFacePipeline
5
- from langchain.chains import LLMChain
6
- from langchain.prompts import PromptTemplate
7
  from datasets import load_dataset
8
- from transformers import pipeline
9
 
10
- # Load dataset (using knkarthick/dialogsum as an example)
11
  @st.cache_data
12
  def load_examples():
13
- dataset = load_dataset("knkarthick/dialogsum", split="train[:5]") # Take only 5 for speed
 
14
  examples = []
15
  for example in dataset:
16
  examples.append({
@@ -19,13 +17,9 @@ def load_examples():
19
  })
20
  return examples
21
 
 
22
  examples = load_examples()
23
 
24
- # Load the Hugging Face model
25
- hf_endpoint = HuggingFaceEndpoint(
26
- endpoint_url="https://api-inference.huggingface.co/models/google/pegasus-xsum" # or any model you like
27
- )
28
-
29
  # Create FewShotChatMessagePromptTemplate
30
  example_prompt = FewShotChatMessagePromptTemplate.from_examples(
31
  examples=examples,
@@ -35,10 +29,13 @@ example_prompt = FewShotChatMessagePromptTemplate.from_examples(
35
  suffix="Now summarize this:\n{input}"
36
  )
37
 
38
- # Streamlit UI
39
- st.title("πŸ’¬ Dialogue Summarizer using Few-Shot Prompt + T5 (via Langchain)")
40
 
41
- input_text = st.text_area("πŸ“ Paste your conversation:")
 
 
 
42
 
43
  if st.button("Generate Summary"):
44
  if input_text.strip():
@@ -49,12 +46,9 @@ if st.button("Generate Summary"):
49
  for msg in messages:
50
  st.markdown(f"**{msg.type.upper()}**:\n```\n{msg.content}\n```")
51
 
52
- # Set up HuggingFacePipeline with the model endpoint
53
- hf_pipeline = HuggingFacePipeline(pipeline="summarization", model=hf_endpoint)
54
-
55
- # Generate summary
56
- summary = hf_pipeline(messages[0].content)
57
  st.success("βœ… Summary:")
58
- st.write(summary[0]['summary_text'])
59
  else:
60
  st.warning("Please enter some text.")
 
1
  import streamlit as st
2
  from langchain.prompts import FewShotChatMessagePromptTemplate
3
  from langchain.prompts.example_selector import LengthBasedExampleSelector
4
+ from langchain.llms import HuggingFaceHub
 
 
5
  from datasets import load_dataset
 
6
 
7
+ # Load dataset (you can use any summarization dataset here)
8
  @st.cache_data
9
  def load_examples():
10
+ # Using 'knkarthick/dialogsum' as an example dataset
11
+ dataset = load_dataset("knkarthick/dialogsum", split="train[:5]") # Load a subset for testing
12
  examples = []
13
  for example in dataset:
14
  examples.append({
 
17
  })
18
  return examples
19
 
20
+ # Load few-shot examples from the dataset
21
  examples = load_examples()
22
 
 
 
 
 
 
23
  # Create FewShotChatMessagePromptTemplate
24
  example_prompt = FewShotChatMessagePromptTemplate.from_examples(
25
  examples=examples,
 
29
  suffix="Now summarize this:\n{input}"
30
  )
31
 
32
+ # Set up Hugging Face model (you can replace it with any other available model)
33
+ llm = HuggingFaceHub(repo_id="t5-small", task="summarization")
34
 
35
+ # Streamlit UI setup
36
+ st.title("πŸ“ Dialogue Summarizer using Few-Shot Prompt + T5")
37
+
38
+ input_text = st.text_area("πŸ“ Paste your conversation here:")
39
 
40
  if st.button("Generate Summary"):
41
  if input_text.strip():
 
46
  for msg in messages:
47
  st.markdown(f"**{msg.type.upper()}**:\n```\n{msg.content}\n```")
48
 
49
+ # Get the summary using the Hugging Face model
50
+ response = llm(messages[0].content)
 
 
 
51
  st.success("βœ… Summary:")
52
+ st.write(response['text'])
53
  else:
54
  st.warning("Please enter some text.")