Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,8 +1,10 @@
|
|
1 |
import requests
|
|
|
|
|
2 |
import os
|
3 |
|
4 |
print(os.getcwd())
|
5 |
-
|
6 |
url = "https://huggingface.co/ngxson/DeepSeek-R1-Distill-Qwen-7B-abliterated-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-7B-abliterated-Q4_K_M.gguf?download=true"
|
7 |
file_name = "DeepSeek-R1-Distill-Qwen-7B-abliterated-Q4_K_M.gguf"
|
8 |
|
@@ -16,14 +18,47 @@ if response.status_code == 200:
|
|
16 |
else:
|
17 |
print("Failed to download. Status code:", response.status_code)
|
18 |
|
19 |
-
|
20 |
-
#
|
21 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
-
#
|
24 |
-
|
|
|
25 |
|
26 |
-
#
|
27 |
-
|
28 |
-
|
|
|
29 |
|
|
|
1 |
import requests
|
2 |
+
import streamlit as st
|
3 |
+
from llama_cpp import Llama
|
4 |
import os
|
5 |
|
6 |
print(os.getcwd())
|
7 |
+
#===================Download Model==============================
|
8 |
url = "https://huggingface.co/ngxson/DeepSeek-R1-Distill-Qwen-7B-abliterated-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-7B-abliterated-Q4_K_M.gguf?download=true"
|
9 |
file_name = "DeepSeek-R1-Distill-Qwen-7B-abliterated-Q4_K_M.gguf"
|
10 |
|
|
|
18 |
else:
|
19 |
print("Failed to download. Status code:", response.status_code)
|
20 |
|
21 |
+
#==================================Streamlit App====================================
|
22 |
+
# Set Streamlit page config
|
23 |
+
st.set_page_config(page_title="Llama Chatbot", page_icon="🤖")
|
24 |
+
|
25 |
+
# Model path (Ensure the model is in the correct directory)
|
26 |
+
MODEL_PATH = file_name
|
27 |
+
|
28 |
+
# Check if model exists, else download it
|
29 |
+
if not os.path.exists(MODEL_PATH):
|
30 |
+
st.warning("Model not found! Please download it first.")
|
31 |
+
st.stop()
|
32 |
+
|
33 |
+
# Load Llama model
|
34 |
+
st.session_state["llm"] = Llama(model_path=MODEL_PATH)
|
35 |
+
|
36 |
+
# Streamlit UI
|
37 |
+
st.title("🤖 Llama GGUF Chatbot")
|
38 |
+
|
39 |
+
if "messages" not in st.session_state:
|
40 |
+
st.session_state["messages"] = []
|
41 |
+
|
42 |
+
# Display chat history
|
43 |
+
for message in st.session_state["messages"]:
|
44 |
+
with st.chat_message(message["role"]):
|
45 |
+
st.markdown(message["content"])
|
46 |
+
|
47 |
+
# User input
|
48 |
+
user_input = st.chat_input("Type your message here...")
|
49 |
+
|
50 |
+
if user_input:
|
51 |
+
# Display user message
|
52 |
+
st.session_state["messages"].append({"role": "user", "content": user_input})
|
53 |
+
with st.chat_message("user"):
|
54 |
+
st.markdown(user_input)
|
55 |
|
56 |
+
# Generate response
|
57 |
+
with st.spinner("Thinking..."):
|
58 |
+
response = st.session_state["llm"](user_input)["choices"][0]["text"]
|
59 |
|
60 |
+
# Display bot response
|
61 |
+
st.session_state["messages"].append({"role": "assistant", "content": response})
|
62 |
+
with st.chat_message("assistant"):
|
63 |
+
st.markdown(response)
|
64 |
|