ramwar commited on
Commit
1b41f61
·
1 Parent(s): 139cd9c

clear transformers cache first

Browse files
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -2,6 +2,7 @@ import os
2
  import requests
3
  import io
4
  import time
 
5
  from getpass import getpass
6
  from langchain import HuggingFaceHub
7
  from langchain.embeddings import HuggingFaceEmbeddings
@@ -27,9 +28,10 @@ question_examples = [
27
  ]
28
 
29
  os.environ["HUGGINGFACEHUB_API_TOKEN"] = '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'
30
-
31
  REPO_ID = "declare-lab/flan-alpaca-large"
32
 
 
 
33
  llm = HuggingFaceHub(
34
  repo_id=REPO_ID,
35
  model_kwargs={"temperature":0, "max_length":512}
 
2
  import requests
3
  import io
4
  import time
5
+ import transformers
6
  from getpass import getpass
7
  from langchain import HuggingFaceHub
8
  from langchain.embeddings import HuggingFaceEmbeddings
 
28
  ]
29
 
30
  os.environ["HUGGINGFACEHUB_API_TOKEN"] = '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'
 
31
  REPO_ID = "declare-lab/flan-alpaca-large"
32
 
33
+ transformers.utils.move_cache()
34
+
35
  llm = HuggingFaceHub(
36
  repo_id=REPO_ID,
37
  model_kwargs={"temperature":0, "max_length":512}