srbmihaicode commited on
Commit
1fa889b
·
verified ·
1 Parent(s): 121b0b3

Updated from print to log

Browse files
Files changed (1) hide show
  1. app.py +10 -10
app.py CHANGED
@@ -1,8 +1,9 @@
1
  from flask import Flask, request, jsonify
2
  from huggingface_hub import InferenceClient
 
3
 
4
  app = Flask(__name__)
5
- app.debug = True
6
  client = InferenceClient("meta-llama/Llama-3.1-8B-Instruct")
7
  DEFAULT_MAX_TOKENS = 512
8
  DEFAULT_TEMPERATURE = 0.7
@@ -11,11 +12,11 @@ DEFAULT_TOP_P = 0.95
11
  def generate_journal_suggestion(current_page):
12
  try:
13
  suggestion_prompt = (
14
- f"""Pe baza înregistrării din jurnal: '{current_page}', generează o singură întrebare pe care utilizatorul ar putea să și-o pună în jurnalul său.
15
  Întrebarea ar trebui să încurajeze reflecția personală mai profundă, explorarea sentimentelor sau clarificarea obiectivelor."""
16
  )
17
- print("Generated suggestion prompt:", suggestion_prompt)
18
-
19
  suggestion_response = ""
20
  response_stream = client.chat_completion(
21
  [
@@ -26,17 +27,17 @@ def generate_journal_suggestion(current_page):
26
  temperature=DEFAULT_TEMPERATURE,
27
  top_p=DEFAULT_TOP_P,
28
  )
29
- print("Response stream received.")
30
-
31
  for message in response_stream:
32
- print("Message received:", message)
33
  token = message.choices[0].delta.content
34
  suggestion_response += token
35
 
36
  return suggestion_response
37
 
38
  except Exception as e:
39
- print("An error occurred:", e)
40
  return jsonify({"error": str(e)}), 500
41
 
42
  @app.route("/", methods=["POST", "GET"])
@@ -51,14 +52,13 @@ def chat():
51
  system_message = data.get("system_message", "You are a friendly chatbot.")
52
  journal_page = data.get("journal_page", "")
53
 
54
-
55
  suggestion = ""
56
  if journal_page:
57
  suggestion = generate_journal_suggestion(journal_page)
58
 
59
  return jsonify({"journal_suggestion": suggestion})
60
  except Exception as e:
61
- print(e)
62
  return jsonify({"error": str(e)}), 500
63
 
64
  if __name__ == "__main__":
 
1
  from flask import Flask, request, jsonify
2
  from huggingface_hub import InferenceClient
3
+ from gunicorn.glogging import Logger
4
 
5
  app = Flask(__name__)
6
+ logger = Logger()
7
  client = InferenceClient("meta-llama/Llama-3.1-8B-Instruct")
8
  DEFAULT_MAX_TOKENS = 512
9
  DEFAULT_TEMPERATURE = 0.7
 
12
  def generate_journal_suggestion(current_page):
13
  try:
14
  suggestion_prompt = (
15
+ f"""Pe baza înregistrării din jurnal: '{current_page}', generează o singură întrebare pe care utilizatorul ar putea să și-o pună într-un jurnal.
16
  Întrebarea ar trebui să încurajeze reflecția personală mai profundă, explorarea sentimentelor sau clarificarea obiectivelor."""
17
  )
18
+ logger.info("Generated suggestion prompt: %s", suggestion_prompt)
19
+
20
  suggestion_response = ""
21
  response_stream = client.chat_completion(
22
  [
 
27
  temperature=DEFAULT_TEMPERATURE,
28
  top_p=DEFAULT_TOP_P,
29
  )
30
+ logger.info("Response stream received.")
31
+
32
  for message in response_stream:
33
+ logger.info("Message received: %s", message)
34
  token = message.choices[0].delta.content
35
  suggestion_response += token
36
 
37
  return suggestion_response
38
 
39
  except Exception as e:
40
+ logger.error("An error occurred: %s", e)
41
  return jsonify({"error": str(e)}), 500
42
 
43
  @app.route("/", methods=["POST", "GET"])
 
52
  system_message = data.get("system_message", "You are a friendly chatbot.")
53
  journal_page = data.get("journal_page", "")
54
 
 
55
  suggestion = ""
56
  if journal_page:
57
  suggestion = generate_journal_suggestion(journal_page)
58
 
59
  return jsonify({"journal_suggestion": suggestion})
60
  except Exception as e:
61
+ logger.error("Error in chat endpoint: %s", e)
62
  return jsonify({"error": str(e)}), 500
63
 
64
  if __name__ == "__main__":