GenAICoder commited on
Commit
b6627c8
·
verified ·
1 Parent(s): b4bed9a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -13
app.py CHANGED
@@ -1,18 +1,16 @@
1
- # Use a pipeline as a high-level helper
2
- from transformers import pipeline
3
  import streamlit as st
 
4
 
5
- st.title("AI Chatbot")
 
 
6
 
7
- st.write("Ask the AI a question and get an answer!")
8
 
9
- user_question = st.chat_input("Enter your question:")
10
 
11
- messages = [
12
- {"role": "user", "content": "Who are you?"},
13
- ]
14
- pipe = pipeline("text-generation", model="deepseek-ai/DeepSeek-R1-Zero", trust_remote_code=True)
15
- pipe(messages)
16
-
17
-
18
- st.write(pipe(messages))
 
 
 
1
  import streamlit as st
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
 
4
+ model_name = "deepseek-ai/DeepSeek-V3-Base"
5
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
6
+ model = AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True, quantization_config=None)
7
 
8
+ st.title("DeepSeek Chatbot")
9
 
10
+ prompt = st.text_input("Enter your message:")
11
 
12
+ if prompt:
13
+ inputs = tokenizer.encode(prompt, return_tensors="pt")
14
+ outputs = model.generate(inputs)
15
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
16
+ st.write(response)