Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -1,22 +1,6 @@
|
|
1 |
from flask import Flask, request, jsonify
|
2 |
from huggingface_hub import InferenceClient
|
3 |
-
from gunicorn.glogging import Logger
|
4 |
-
class Config:
|
5 |
-
loglevel = "info"
|
6 |
-
# errorlog = "error.log" # Log file for errors
|
7 |
-
# accesslog = "access.log" # Log file for access logs
|
8 |
-
syslog = False
|
9 |
-
syslog_facility = "daemon"
|
10 |
-
syslog_prefix = "gunicorn"
|
11 |
-
capture_output = True
|
12 |
-
logconfig = None
|
13 |
-
logconfig_json = None
|
14 |
-
logconfig_dict = None
|
15 |
-
disable_redirect_access_to_syslog = False
|
16 |
-
|
17 |
-
cfg = Config()
|
18 |
app = Flask(__name__)
|
19 |
-
logger = Logger(cfg)
|
20 |
client = InferenceClient("meta-llama/Llama-3.1-8B-Instruct")
|
21 |
DEFAULT_MAX_TOKENS = 512
|
22 |
DEFAULT_TEMPERATURE = 0.7
|
@@ -28,7 +12,7 @@ def generate_journal_suggestion(current_page):
|
|
28 |
f"""Pe baza înregistrării din jurnal: '{current_page}', generează o singură întrebare pe care utilizatorul ar putea să și-o pună într-un jurnal.
|
29 |
Întrebarea ar trebui să încurajeze reflecția personală mai profundă, explorarea sentimentelor sau clarificarea obiectivelor."""
|
30 |
)
|
31 |
-
logger.info("Generated suggestion prompt: %s", suggestion_prompt)
|
32 |
|
33 |
suggestion_response = ""
|
34 |
response_stream = client.chat_completion(
|
@@ -40,7 +24,7 @@ def generate_journal_suggestion(current_page):
|
|
40 |
temperature=DEFAULT_TEMPERATURE,
|
41 |
top_p=DEFAULT_TOP_P,
|
42 |
)
|
43 |
-
logger.info("Response stream received.")
|
44 |
|
45 |
for message in response_stream:
|
46 |
logger.info("Message received: %s", message)
|
@@ -50,8 +34,7 @@ def generate_journal_suggestion(current_page):
|
|
50 |
return suggestion_response
|
51 |
|
52 |
except Exception as e:
|
53 |
-
|
54 |
-
return jsonify({"error": str(e)}), 500
|
55 |
|
56 |
@app.route("/", methods=["POST", "GET"])
|
57 |
def home():
|
@@ -59,20 +42,16 @@ def home():
|
|
59 |
|
60 |
@app.route("/chat", methods=["POST"])
|
61 |
def chat():
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
journal_page = data.get("journal_page", "")
|
67 |
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
|
72 |
-
|
73 |
-
except Exception as e:
|
74 |
-
logger.error("Error in chat endpoint: %s", e)
|
75 |
-
return jsonify({"error": str(e)}), 500
|
76 |
|
77 |
if __name__ == "__main__":
|
78 |
app.run(debug=True)
|
|
|
1 |
from flask import Flask, request, jsonify
|
2 |
from huggingface_hub import InferenceClient
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
app = Flask(__name__)
|
|
|
4 |
client = InferenceClient("meta-llama/Llama-3.1-8B-Instruct")
|
5 |
DEFAULT_MAX_TOKENS = 512
|
6 |
DEFAULT_TEMPERATURE = 0.7
|
|
|
12 |
f"""Pe baza înregistrării din jurnal: '{current_page}', generează o singură întrebare pe care utilizatorul ar putea să și-o pună într-un jurnal.
|
13 |
Întrebarea ar trebui să încurajeze reflecția personală mai profundă, explorarea sentimentelor sau clarificarea obiectivelor."""
|
14 |
)
|
15 |
+
# logger.info("Generated suggestion prompt: %s", suggestion_prompt)
|
16 |
|
17 |
suggestion_response = ""
|
18 |
response_stream = client.chat_completion(
|
|
|
24 |
temperature=DEFAULT_TEMPERATURE,
|
25 |
top_p=DEFAULT_TOP_P,
|
26 |
)
|
27 |
+
# logger.info("Response stream received.")
|
28 |
|
29 |
for message in response_stream:
|
30 |
logger.info("Message received: %s", message)
|
|
|
34 |
return suggestion_response
|
35 |
|
36 |
except Exception as e:
|
37 |
+
return jsonify({"error": str(e)})
|
|
|
38 |
|
39 |
@app.route("/", methods=["POST", "GET"])
|
40 |
def home():
|
|
|
42 |
|
43 |
@app.route("/chat", methods=["POST"])
|
44 |
def chat():
|
45 |
+
data = request.json
|
46 |
+
message = data.get("message", "")
|
47 |
+
system_message = data.get("system_message", "You are a friendly chatbot.")
|
48 |
+
journal_page = data.get("journal_page", "")
|
|
|
49 |
|
50 |
+
suggestion = ""
|
51 |
+
if journal_page:
|
52 |
+
suggestion = generate_journal_suggestion(journal_page)
|
53 |
|
54 |
+
return jsonify({"journal_suggestion": suggestion})
|
|
|
|
|
|
|
55 |
|
56 |
if __name__ == "__main__":
|
57 |
app.run(debug=True)
|