Spaces:
Paused
Paused
Commit
·
0408a46
1
Parent(s):
db97814
Added journal page functionality
Browse files
app.py
CHANGED
@@ -1,11 +1,9 @@
|
|
1 |
from flask import Flask, request, jsonify
|
2 |
from huggingface_hub import InferenceClient
|
3 |
|
4 |
-
# Initialize Flask app and Hugging Face client
|
5 |
app = Flask(__name__)
|
6 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
7 |
|
8 |
-
# Helper function to generate a response from the AI model
|
9 |
def generate_response(message, history, system_message, max_tokens, temperature, top_p):
|
10 |
messages = [{"role": "system", "content": system_message}]
|
11 |
|
@@ -18,7 +16,6 @@ def generate_response(message, history, system_message, max_tokens, temperature,
|
|
18 |
messages.append({"role": "user", "content": message})
|
19 |
response = ""
|
20 |
|
21 |
-
# Streaming response from the Hugging Face model
|
22 |
for message in client.chat_completion(
|
23 |
messages,
|
24 |
max_tokens=max_tokens,
|
@@ -31,10 +28,29 @@ def generate_response(message, history, system_message, max_tokens, temperature,
|
|
31 |
|
32 |
return response
|
33 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
34 |
@app.route("/", methods=["POST","GET"])
|
35 |
def home():
|
36 |
return "Hi!"
|
37 |
-
|
38 |
@app.route("/chat", methods=["POST"])
|
39 |
def chat():
|
40 |
try:
|
@@ -45,15 +61,18 @@ def chat():
|
|
45 |
max_tokens = data.get("max_tokens", 512)
|
46 |
temperature = data.get("temperature", 0.7)
|
47 |
top_p = data.get("top_p", 0.95)
|
|
|
48 |
|
49 |
-
# Validate inputs
|
50 |
if not isinstance(history, list) or not all(isinstance(pair, list) for pair in history):
|
51 |
return jsonify({"error": "Invalid history format. It should be a list of [message, response] pairs."}), 400
|
52 |
|
53 |
-
# Generate AI response
|
54 |
response = generate_response(message, history, system_message, max_tokens, temperature, top_p)
|
55 |
|
56 |
-
|
|
|
|
|
|
|
|
|
57 |
except Exception as e:
|
58 |
return jsonify({"error": str(e)}), 500
|
59 |
|
|
|
1 |
from flask import Flask, request, jsonify
|
2 |
from huggingface_hub import InferenceClient
|
3 |
|
|
|
4 |
app = Flask(__name__)
|
5 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
6 |
|
|
|
7 |
def generate_response(message, history, system_message, max_tokens, temperature, top_p):
|
8 |
messages = [{"role": "system", "content": system_message}]
|
9 |
|
|
|
16 |
messages.append({"role": "user", "content": message})
|
17 |
response = ""
|
18 |
|
|
|
19 |
for message in client.chat_completion(
|
20 |
messages,
|
21 |
max_tokens=max_tokens,
|
|
|
28 |
|
29 |
return response
|
30 |
|
31 |
+
def generate_journal_suggestion(current_page):
|
32 |
+
suggestion_prompt = (
|
33 |
+
f"Pe baza înregistrării din jurnal: '{current_page}', sugerează ce ar putea scrie utilizatorul în continuare în jurnalul său. "
|
34 |
+
"Concentrează-te pe încurajarea reflecției mai profunde, stabilirea obiectivelor sau explorarea ideilor conexe."
|
35 |
+
)
|
36 |
+
suggestion_response = ""
|
37 |
+
|
38 |
+
for message in client.chat_completion(
|
39 |
+
[{"role": "user", "content": suggestion_prompt}],
|
40 |
+
max_tokens=150,
|
41 |
+
stream=True,
|
42 |
+
temperature=0.7,
|
43 |
+
top_p=0.9,
|
44 |
+
):
|
45 |
+
token = message.choices[0].delta.content
|
46 |
+
suggestion_response += token
|
47 |
+
|
48 |
+
return suggestion_response
|
49 |
+
|
50 |
@app.route("/", methods=["POST","GET"])
|
51 |
def home():
|
52 |
return "Hi!"
|
53 |
+
|
54 |
@app.route("/chat", methods=["POST"])
|
55 |
def chat():
|
56 |
try:
|
|
|
61 |
max_tokens = data.get("max_tokens", 512)
|
62 |
temperature = data.get("temperature", 0.7)
|
63 |
top_p = data.get("top_p", 0.95)
|
64 |
+
journal_page = data.get("journal_page", "")
|
65 |
|
|
|
66 |
if not isinstance(history, list) or not all(isinstance(pair, list) for pair in history):
|
67 |
return jsonify({"error": "Invalid history format. It should be a list of [message, response] pairs."}), 400
|
68 |
|
|
|
69 |
response = generate_response(message, history, system_message, max_tokens, temperature, top_p)
|
70 |
|
71 |
+
suggestion = ""
|
72 |
+
if journal_page:
|
73 |
+
suggestion = generate_journal_suggestion(journal_page)
|
74 |
+
|
75 |
+
return jsonify({"response": response, "journal_suggestion": suggestion})
|
76 |
except Exception as e:
|
77 |
return jsonify({"error": str(e)}), 500
|
78 |
|