Spaces:
Sleeping
Sleeping
add to reqirements
Browse files- model.py +3 -0
- requirements.txt +3 -1
model.py
CHANGED
@@ -3,6 +3,9 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
3 |
import warnings
|
4 |
|
5 |
|
|
|
|
|
|
|
6 |
torch_dtype = torch.bfloat16
|
7 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
8 |
model_name = "mosaicml/mpt-7b"
|
|
|
3 |
import warnings
|
4 |
|
5 |
|
6 |
+
|
7 |
+
|
8 |
+
|
9 |
torch_dtype = torch.bfloat16
|
10 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
11 |
model_name = "mosaicml/mpt-7b"
|
requirements.txt
CHANGED
@@ -4,4 +4,6 @@ sentencepiece==0.1.*
|
|
4 |
torch==2.*
|
5 |
transformers==4.*
|
6 |
uvicorn[standard]==0.17.*
|
7 |
-
streamlit-ace
|
|
|
|
|
|
4 |
torch==2.*
|
5 |
transformers==4.*
|
6 |
uvicorn[standard]==0.17.*
|
7 |
+
streamlit-ace
|
8 |
+
einops
|
9 |
+
langchain
|