srbmihaicode commited on
Commit
5175e14
·
verified ·
1 Parent(s): 0408a46

Removed history and parameters requirements

Browse files
Files changed (1) hide show
  1. app.py +15 -22
app.py CHANGED
@@ -4,24 +4,21 @@ from huggingface_hub import InferenceClient
4
  app = Flask(__name__)
5
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
6
 
7
- def generate_response(message, history, system_message, max_tokens, temperature, top_p):
8
- messages = [{"role": "system", "content": system_message}]
9
-
10
- for val in history:
11
- if val[0]:
12
- messages.append({"role": "user", "content": val[0]})
13
- if val[1]:
14
- messages.append({"role": "assistant", "content": val[1]})
15
 
 
 
16
  messages.append({"role": "user", "content": message})
 
17
  response = ""
18
-
19
  for message in client.chat_completion(
20
  messages,
21
- max_tokens=max_tokens,
22
  stream=True,
23
- temperature=temperature,
24
- top_p=top_p,
25
  ):
26
  token = message.choices[0].delta.content
27
  response += token
@@ -39,15 +36,15 @@ def generate_journal_suggestion(current_page):
39
  [{"role": "user", "content": suggestion_prompt}],
40
  max_tokens=150,
41
  stream=True,
42
- temperature=0.7,
43
- top_p=0.9,
44
  ):
45
  token = message.choices[0].delta.content
46
  suggestion_response += token
47
 
48
  return suggestion_response
49
 
50
- @app.route("/", methods=["POST","GET"])
51
  def home():
52
  return "Hi!"
53
 
@@ -56,17 +53,13 @@ def chat():
56
  try:
57
  data = request.json
58
  message = data.get("message", "")
59
- history = data.get("history", [])
60
  system_message = data.get("system_message", "You are a friendly chatbot.")
61
- max_tokens = data.get("max_tokens", 512)
62
- temperature = data.get("temperature", 0.7)
63
- top_p = data.get("top_p", 0.95)
64
  journal_page = data.get("journal_page", "")
65
 
66
- if not isinstance(history, list) or not all(isinstance(pair, list) for pair in history):
67
- return jsonify({"error": "Invalid history format. It should be a list of [message, response] pairs."}), 400
68
 
69
- response = generate_response(message, history, system_message, max_tokens, temperature, top_p)
70
 
71
  suggestion = ""
72
  if journal_page:
 
4
  app = Flask(__name__)
5
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
6
 
7
+ DEFAULT_MAX_TOKENS = 512
8
+ DEFAULT_TEMPERATURE = 0.7
9
+ DEFAULT_TOP_P = 0.95
 
 
 
 
 
10
 
11
+ def generate_response(message, system_message):
12
+ messages = [{"role": "system", "content": system_message}]
13
  messages.append({"role": "user", "content": message})
14
+
15
  response = ""
 
16
  for message in client.chat_completion(
17
  messages,
18
+ max_tokens=DEFAULT_MAX_TOKENS,
19
  stream=True,
20
+ temperature=DEFAULT_TEMPERATURE,
21
+ top_p=DEFAULT_TOP_P,
22
  ):
23
  token = message.choices[0].delta.content
24
  response += token
 
36
  [{"role": "user", "content": suggestion_prompt}],
37
  max_tokens=150,
38
  stream=True,
39
+ temperature=DEFAULT_TEMPERATURE,
40
+ top_p=DEFAULT_TOP_P,
41
  ):
42
  token = message.choices[0].delta.content
43
  suggestion_response += token
44
 
45
  return suggestion_response
46
 
47
+ @app.route("/", methods=["POST", "GET"])
48
  def home():
49
  return "Hi!"
50
 
 
53
  try:
54
  data = request.json
55
  message = data.get("message", "")
 
56
  system_message = data.get("system_message", "You are a friendly chatbot.")
 
 
 
57
  journal_page = data.get("journal_page", "")
58
 
59
+ if not message:
60
+ return jsonify({"error": "Message is required."}), 400
61
 
62
+ response = generate_response(message, system_message)
63
 
64
  suggestion = ""
65
  if journal_page: