journal / app.py
srbmihaicode's picture
Update app.py
4824e79 verified
raw
history blame
2.1 kB
from flask import Flask, request, jsonify
from huggingface_hub import InferenceClient
app = Flask(__name__)
client = InferenceClient("meta-llama/Llama-3.1-8B-Instruct")
DEFAULT_MAX_TOKENS = 512
DEFAULT_TEMPERATURE = 0.7
DEFAULT_TOP_P = 0.95
def generate_journal_suggestion(current_page):
try:
suggestion_prompt = (
f"""Pe baza înregistrării din jurnal: '{current_page}', generează o singură întrebare pe care utilizatorul ar putea să și-o pună în jurnalul său.
Întrebarea ar trebui să încurajeze reflecția personală mai profundă, explorarea sentimentelor sau clarificarea obiectivelor."""
)
print("Generated suggestion prompt:", suggestion_prompt)
suggestion_response = ""
response_stream = client.chat_completion(
[
{"role": "user", "content": suggestion_prompt}
],
max_tokens=150,
stream=True,
temperature=DEFAULT_TEMPERATURE,
top_p=DEFAULT_TOP_P,
)
print("Response stream received.")
for message in response_stream:
print("Message received:", message)
token = message.choices[0].delta.content
suggestion_response += token
return suggestion_response
except Exception as e:
print("An error occurred:", e)
return jsonify({"error": str(e)}), 500
@app.route("/", methods=["POST", "GET"])
def home():
return "Hi!"
@app.route("/chat", methods=["POST"])
def chat():
try:
data = request.json
message = data.get("message", "")
system_message = data.get("system_message", "You are a friendly chatbot.")
journal_page = data.get("journal_page", "")
suggestion = ""
if journal_page:
suggestion = journal_page #generate_journal_suggestion(journal_page)
return jsonify({"journal_suggestion": suggestion})
except Exception as e:
print(e)
return jsonify({"error": str(e)}), 500
if __name__ == "__main__":
app.run(debug=True)