Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,8 @@
|
|
1 |
import streamlit as st
|
2 |
from datasets import load_dataset
|
3 |
-
from
|
|
|
|
|
4 |
from langchain.prompts.few_shot import FewShotChatMessagePromptTemplate
|
5 |
from langchain.prompts.example_selector import LengthBasedExampleSelector
|
6 |
|
@@ -18,8 +20,8 @@ def load_examples():
|
|
18 |
|
19 |
examples = load_examples()
|
20 |
|
21 |
-
# Set up the
|
22 |
-
|
23 |
|
24 |
# Few-shot prompt template
|
25 |
example_prompt = FewShotChatMessagePromptTemplate.from_examples(
|
@@ -31,21 +33,24 @@ example_prompt = FewShotChatMessagePromptTemplate.from_examples(
|
|
31 |
)
|
32 |
|
33 |
# Streamlit UI
|
34 |
-
st.title("π¬ Dialogue Summarizer using Few-Shot Prompt + T5")
|
35 |
input_text = st.text_area("π Paste your conversation:")
|
36 |
|
37 |
if st.button("Generate Summary"):
|
38 |
if input_text.strip():
|
39 |
-
# Create prompt
|
40 |
messages = example_prompt.format_messages(input=input_text)
|
41 |
|
42 |
with st.expander("π Generated Prompt"):
|
43 |
for msg in messages:
|
44 |
st.markdown(f"**{msg.type.upper()}**:\n```\n{msg.content}\n```")
|
45 |
|
46 |
-
#
|
47 |
-
|
48 |
-
|
|
|
|
|
|
|
49 |
st.success("β
Summary:")
|
50 |
st.write(summary)
|
51 |
else:
|
|
|
1 |
import streamlit as st
|
2 |
from datasets import load_dataset
|
3 |
+
from langchain.chains import LLMChain
|
4 |
+
from langchain.llms import HuggingFaceHub
|
5 |
+
from langchain.prompts import PromptTemplate
|
6 |
from langchain.prompts.few_shot import FewShotChatMessagePromptTemplate
|
7 |
from langchain.prompts.example_selector import LengthBasedExampleSelector
|
8 |
|
|
|
20 |
|
21 |
examples = load_examples()
|
22 |
|
23 |
+
# Set up the HuggingFaceHub model (T5)
|
24 |
+
llm = HuggingFaceHub(repo_id="google/pegasus-xsum", model_kwargs={"temperature": 0.7})
|
25 |
|
26 |
# Few-shot prompt template
|
27 |
example_prompt = FewShotChatMessagePromptTemplate.from_examples(
|
|
|
33 |
)
|
34 |
|
35 |
# Streamlit UI
|
36 |
+
st.title("π¬ Dialogue Summarizer using Few-Shot Prompt + T5 (via Langchain)")
|
37 |
input_text = st.text_area("π Paste your conversation:")
|
38 |
|
39 |
if st.button("Generate Summary"):
|
40 |
if input_text.strip():
|
41 |
+
# Create prompt using FewShotChatMessagePromptTemplate
|
42 |
messages = example_prompt.format_messages(input=input_text)
|
43 |
|
44 |
with st.expander("π Generated Prompt"):
|
45 |
for msg in messages:
|
46 |
st.markdown(f"**{msg.type.upper()}**:\n```\n{msg.content}\n```")
|
47 |
|
48 |
+
# Create the prompt chain
|
49 |
+
prompt_template = PromptTemplate(input_variables=["input"], template="{input}")
|
50 |
+
chain = LLMChain(llm=llm, prompt=prompt_template)
|
51 |
+
|
52 |
+
# Get the summary from the model
|
53 |
+
summary = chain.run(input_text)
|
54 |
st.success("β
Summary:")
|
55 |
st.write(summary)
|
56 |
else:
|