frankrobotics's picture
Update app.py
d8ecca8 verified
raw
history blame
661 Bytes
import gradio as gr
import torch
from transformers import pipeline
model_id = "deepseek-ai/DeepSeek-R1"
pipe = pipeline(
"text-generation",
model=model_id,
torch_dtype=torch.bfloat16,
device_map="auto",
trust_remote_code=True,
)
messages = [
{"role": "system", "content": "You are a pirate chatbot who always responds in pirate speak!"},
{"role": "user", "content": "Who are you?"},
]
def model(params):
outputs = pipe(
messages,
max_new_tokens=256,
)
output = outputs[0]["generated_text"][-1]
print(output)
return output
app = gr.Interface(fn=model, inputs="textbox", outputs="textbox")
app.launch()