TinyChat / app.py
theotherdylan's picture
remove chunking
13dde62
raw
history blame
1.08 kB
from transformers import pipeline
import gradio
history = []
def get_history_messages():
messages = []
for user, assist in history:
messages.append({"role": "user", "content": user})
messages.append({"role": "assistant", "content": assist})
return messages
def predict(prompt):
print("Predict:", prompt)
pipe = pipeline("conversational", model="cognitivecomputations/TinyDolphin-2.8-1.1b")
response = pipe(
[
*get_history_messages(),
{"role": "user", "content": prompt}
],
)
history.append((prompt, response.messages[-1]["content"]))
print("Predict done")
return "", history
with gradio.Blocks(fill_height=True) as demo:
chat = gradio.Chatbot(scale=1)
with gradio.Row(variant="compact"):
prompt = gradio.Textbox(show_label=False, scale=6, autofocus=True)
button = gradio.Button(scale=1)
for handler in [button.click, prompt.submit]:
handler(predict, inputs=[prompt], outputs=[prompt, chat])
if __name__ == '__main__':
demo.launch()