Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
import os
|
2 |
|
3 |
-
# Set writable cache directory
|
4 |
-
os.environ['SENTENCE_TRANSFORMERS_HOME'] = '/
|
5 |
-
os.environ['TRANSFORMERS_CACHE'] = '/
|
6 |
|
7 |
from fastapi import FastAPI
|
8 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
@@ -10,6 +10,7 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
10 |
# Ensure the directory exists
|
11 |
os.makedirs(os.environ['TRANSFORMERS_CACHE'], exist_ok=True)
|
12 |
|
|
|
13 |
model_name = "mynuddin/chatbot"
|
14 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
15 |
model = AutoModelForCausalLM.from_pretrained(model_name).to("cpu")
|
|
|
1 |
import os
|
2 |
|
3 |
+
# Set writable cache directory inside the container
|
4 |
+
os.environ['SENTENCE_TRANSFORMERS_HOME'] = '/app/hf_home'
|
5 |
+
os.environ['TRANSFORMERS_CACHE'] = '/app/hf_home'
|
6 |
|
7 |
from fastapi import FastAPI
|
8 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
|
10 |
# Ensure the directory exists
|
11 |
os.makedirs(os.environ['TRANSFORMERS_CACHE'], exist_ok=True)
|
12 |
|
13 |
+
# Load model
|
14 |
model_name = "mynuddin/chatbot"
|
15 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
16 |
model = AutoModelForCausalLM.from_pretrained(model_name).to("cpu")
|