xnetba commited on
Commit
969c331
·
1 Parent(s): ae4523f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -6
app.py CHANGED
@@ -1,11 +1,11 @@
1
  from transformers import AutoModelForCausalLM, AutoTokenizer
2
  import torch
3
- #set up the model (large version of DialoGPT)
4
- tokenizer = AutoTokenizer.from_pretrained("timdettmers/guanaco-65b-merged")
5
- model = AutoModelForCausalLM.from_pretrained("timdettmers/guanaco-65b-merged")
6
 
7
- #Defining a predict function
 
 
8
 
 
9
  def predict(input, history=[]):
10
  # tokenize the new input sentence
11
  new_user_input_ids = tokenizer.encode(input + tokenizer.eos_token, return_tensors='pt')
@@ -22,11 +22,10 @@ def predict(input, history=[]):
22
  return response, history
23
 
24
  #creating a gradio interface
25
-
26
  import gradio as gr
27
 
28
  demo = gr.Interface(fn=predict,
29
- examples=["How many"],
30
  inputs=["text", "state"],
31
  outputs=["chatbot", "state"])
32
 
 
1
  from transformers import AutoModelForCausalLM, AutoTokenizer
2
  import torch
 
 
 
3
 
4
+ #set up the model
5
+ tokenizer = AutoTokenizer.from_pretrained("OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5")
6
+ model = AutoModelForCausalLM.from_pretrained("OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5")
7
 
8
+ #Defining a predict function
9
  def predict(input, history=[]):
10
  # tokenize the new input sentence
11
  new_user_input_ids = tokenizer.encode(input + tokenizer.eos_token, return_tensors='pt')
 
22
  return response, history
23
 
24
  #creating a gradio interface
 
25
  import gradio as gr
26
 
27
  demo = gr.Interface(fn=predict,
28
+ examples=["How many birds exist on Earth"],
29
  inputs=["text", "state"],
30
  outputs=["chatbot", "state"])
31