theotherdylan commited on
Commit
13dde62
·
1 Parent(s): 0083c1d

remove chunking

Browse files
Files changed (1) hide show
  1. app.py +4 -6
app.py CHANGED
@@ -13,6 +13,7 @@ def get_history_messages():
13
 
14
 
15
  def predict(prompt):
 
16
  pipe = pipeline("conversational", model="cognitivecomputations/TinyDolphin-2.8-1.1b")
17
  response = pipe(
18
  [
@@ -20,12 +21,9 @@ def predict(prompt):
20
  {"role": "user", "content": prompt}
21
  ],
22
  )
23
- history.append((prompt, ""))
24
- message = ""
25
- for chunk in response:
26
- message += chunk.messages[-1]["content"]
27
- history[-1] = (prompt, message)
28
- yield "", history
29
 
30
  with gradio.Blocks(fill_height=True) as demo:
31
  chat = gradio.Chatbot(scale=1)
 
13
 
14
 
15
  def predict(prompt):
16
+ print("Predict:", prompt)
17
  pipe = pipeline("conversational", model="cognitivecomputations/TinyDolphin-2.8-1.1b")
18
  response = pipe(
19
  [
 
21
  {"role": "user", "content": prompt}
22
  ],
23
  )
24
+ history.append((prompt, response.messages[-1]["content"]))
25
+ print("Predict done")
26
+ return "", history
 
 
 
27
 
28
  with gradio.Blocks(fill_height=True) as demo:
29
  chat = gradio.Chatbot(scale=1)