import streamlit as st
import os
from PIL import Image
# Set the page layout
st.set_page_config(layout="wide")
import json
import base64
import time
from dotenv import load_dotenv
import os
import requests
import pickle
import numpy as np
# Load model once
with open("best_clf.pkl", "rb") as file:
best_clf = pickle.load(file)
# Try loading environment variables locally
try:
from dotenv import load_dotenv
load_dotenv()
except:
pass
# Get the token from environment variables
HF_TOKEN = os.environ.get("HF_TOKEN")
def query_huggingface_model(selected_model: dict, input_data, input_type="text",max_tokens=512,task="text-classification",temperature=0.7, top_p=0.9 ):
API_URL = selected_model.get("url")
headers = {"Authorization": f"Bearer {HF_TOKEN}"}
try:
if input_type == "text":
if task == "text-generation":
payload = {
"messages": [
{
"role": "user",
"content": input_data
}
],
"model":selected_model.get("model")
}
else:
payload = {
"inputs": input_data ,
}
response = requests.post(API_URL, headers=headers, json=payload)
elif input_type == "image":
with open(input_data, "rb") as f:
data = f.read()
response = requests.post(API_URL, headers=headers, data=data)
else:
return {"error": f"Unsupported input_type: {input_type}"}
response.raise_for_status()
return response.json()
except requests.exceptions.RequestException as e:
return {"error": str(e)}
def extract_response_content(response):
print(f"Response is: {response}")
# For text generation or image captioning
if isinstance(response, list):
if response and isinstance(response[0], dict) and "generated_text" in response[0]:
return response[0]["generated_text"]
elif response and isinstance(response[0], list) and "label" in response[0][0]:
# For text classification
return [(item["label"], round(item["score"], 3)) for item in response[0]]
# For OpenAI-style chat responses
elif isinstance(response, dict):
if "choices" in response and isinstance(response["choices"], list):
try:
return response["choices"][0]["message"]["content"]
except (KeyError, IndexError, TypeError):
return "Error: Could not extract message from choices"
elif "error" in response:
return f"Error: {response['error']}"
return "Unknown response format"
# --- Step 1 ---
if 'name' not in st.session_state:
st.session_state.name = "Paul"
if 'gender' not in st.session_state:
st.session_state.gender = "Male"
if 'age' not in st.session_state:
st.session_state.age = 25
if 'currentSmoker' not in st.session_state:
st.session_state.currentSmoker = "Yes"
if 'cigsPerDay' not in st.session_state:
st.session_state.cigsPerDay = 0
if 'BPMeds' not in st.session_state:
st.session_state.BPMeds = False
if 'diabetes' not in st.session_state:
st.session_state.diabetes = False
# --- Step 2 ---
if 'totChol' not in st.session_state:
st.session_state.totChol = 180 # mg/dL
if 'sysBP' not in st.session_state:
st.session_state.sysBP = 120 # mmHg
if 'diaBP' not in st.session_state:
st.session_state.diaBP = 80 # mmHg
# --- Step 3 ---
if 'BMI' not in st.session_state:
st.session_state.BMI = 22.0
if 'heartRate' not in st.session_state:
st.session_state.heartRate = 70 # bpm
if 'glucose' not in st.session_state:
st.session_state.glucose = 90 # mg/dL
# Optional: prediction result
if 'Risk' not in st.session_state:
st.session_state.Risk = 1
if 'proba' not in st.session_state:
st.session_state.proba = 80
if "framework" not in st.session_state:
st.session_state.framework = "gen"
# Initialize state
if "form1" not in st.session_state:
st.session_state.form1 = "back"
if "form2" not in st.session_state:
st.session_state.form2 = "back"
if "form3" not in st.session_state:
st.session_state.form3 = "back"
if "form4" not in st.session_state:
st.session_state.form4 = "back"
if "form5" not in st.session_state:
st.session_state.form5 = "back"
if "form6" not in st.session_state:
st.session_state.form6 = "back"
if st.session_state.form1 == "next":
start1 = 46
back1 = "#6dc9e4"
background = "#f3f3f4"
fill = '#6dc9e4'
back2 = "#4794ff"
back3 = "#6dc9e4"
back4 = "#6dc9e4"
elif st.session_state.form1 == "back":
start1 = 0
back1 = "#4794ff"
back2 = "#6dc9e4"
back3 = "#6dc9e4"
back4 = "#6dc9e4"
background =" white"
fill = "#f7f7f7"
##################################
if st.session_state.form2 == "next":
start2 = 46
background2 = "#f3f3f4"
fill2 = '#6dc9e4'
back3 = "#4794ff"
back2 = "#6dc9e4"
back1 = "#6dc9e4"
back4 = "#6dc9e4"
elif st.session_state.form2 == "back":
start2 = 0
background2 =" white"
fill2 = "#f7f7f7"
####################################
if st.session_state.form3 == "next":
start3 = 46
background3 = "#f3f3f4"
back4 = "#4794ff"
back2 = "#6dc9e4"
back1 = "#6dc9e4"
back3 = "#6dc9e4"
fill3 = '#6dc9e4'
border = "grey"
elif st.session_state.form3 == "back":
start3 = 0
background3 =" white"
fill3 = "#f7f7f7"
####################################
if st.session_state.form4 == "next":
start4 = 46
background4 = "#f3f3f4"
fill4 = '#6dc9e4'
back4 = "#6dc9e4"
elif st.session_state.form4 == "back":
start4 = 0
background4 =" white"
fill4 = "#f7f7f7"
if st.session_state.framework == "gen":
encoded_logo = "hugging.png"
main_bg_ext = "png"
main_bg = "image.gif"
st.markdown(
f"""
""",
unsafe_allow_html=True,
)
# Overlay container
st.markdown(
f"""
""",
unsafe_allow_html=True,
)
st.markdown("""
""", unsafe_allow_html=True)
st.markdown(
f"""
HeartCheck AI
""",
unsafe_allow_html=True,
)
st.markdown(
f"""
HeartCheck uses intelligent risk analysis to predict your likelihood of
heart disease.
βempowering you with personalized insights, early warnings, and lifestyle tips to keep your heart healthy and strong.
""",
unsafe_allow_html=True,
)
with st.container(key = "main"):
col1,col2 = st.columns([3,3])
with col1:
with st.container(key= "side"):
if st.button("1",key="step_1"):
st.session_state.form1 = "back"
st.session_state.form2 = "back"
st.session_state.form3 = "back"
st.session_state.form4 = "back"
st.session_state.form5 = "back"
st.rerun()
if st.button("2",key="step_2"):
st.session_state.form2 = "back"
st.session_state.form3 = "back"
st.session_state.form4 = "back"
st.session_state.form5 = "back"
st.rerun()
if st.button("3",key="step_3"):
st.session_state.form3 = "back"
st.session_state.form4 = "back"
st.session_state.form5 = "back"
st.rerun()
if st.button("4",key="step_4"):
st.session_state.form4 = "back"
st.session_state.form5 = "back"
st.rerun()
with col2:
if st.session_state.form1 == "back":
with st.container(key="form1"):
st.write("π§ Step 1: Personal Info")
with st.container(key="form-head"):
st.image("icon.png")
with st.form( key="first"):
with st.container(key="form-content"):
# Input fields
st.session_state.name = st.text_input("Name", value=st.session_state.name)
st.session_state.age = st.number_input("Age", min_value=0, max_value=120, step=1, value=st.session_state.age)
st.session_state.gender = st.radio("Sex:", ["Male", "Female"], horizontal=True, index=0 if st.session_state.gender == "Male" else 1)
# Navigation buttons
col1, col2 = st.columns([4, 1])
next = col2.form_submit_button("Next ")
if next:
st.session_state.form1 = "next"
st.rerun()
elif st.session_state.form1 == "next" and st.session_state.form2 == "back":
with st.container(key="form2"):
st.write("π¬ Step 2: Clinical History")
st.radio("Do you currently smoke?", ["Yes", "No"], horizontal=True, key="currentSmoker")
print(st.session_state.currentSmoker)
with st.form("form_step_2"):
with st.container(key="form-content1"):
# Show 'cigsPerDay' only if smoker
if st.session_state.currentSmoker == "Yes":
if st.session_state.cigsPerDay == 0:
st.session_state.cigsPerDay = 1
else:
st.session_state.cigsPerDay = st.session_state.cigsPerDay
print(f"tessst:{st.session_state['currentSmoker']}")
st.session_state.cigsPerDay = st.number_input("How many cigarettes per day?", min_value=1, max_value=60, step=1,value = st.session_state.cigsPerDay)
else:
st.session_state.cigsPerDay = 0 # default to 0 if non-smoker
r1,r2 = st.columns([6,3])
with r1:
if st.session_state.BPMeds == "Yes":
bp = 0
else:
bp = 1
st.session_state.BPMeds = st.radio("Do you take blood pressure medication?", ["Yes", "No"], horizontal=True,index = bp)
with r2:
if st.session_state.diabetes == "Yes":
db = 0
else:
db = 1
st.session_state.diabetes = st.radio("Do you have diabetes?", ["Yes", "No"], horizontal=True,index = db )
col1, col2 = st.columns([4,1])
back = col1.form_submit_button("Back")
if back:
st.session_state.form1 = "back"
st.rerun()
next = col2.form_submit_button("Next")
if next:
st.session_state.form2 = "next"
st.rerun()
elif st.session_state.form2 == "next" and st.session_state.form3 == "back":
with st.container(key="form2"):
st.write("π Step 3: Vital Signs & Cholesterol")
with st.form("form_step_2"):
with st.container(key="form-content2"):
# Step 3 inputs
st.session_state.totChol = st.number_input("Total Cholesterol (mg/dL)", min_value=100, max_value=400, step=1,value= st.session_state.totChol)
st.session_state.sysBP = st.number_input("Systolic Blood Pressure (mmHg)", min_value=80, max_value=250, step=1,value = st.session_state.sysBP)
st.session_state.diaBP = st.number_input("Diastolic Blood Pressure (mmHg)", min_value=50, max_value=150, step=1,value= st.session_state.diaBP)
col1, col2 = st.columns([4,1])
back = col1.form_submit_button("Back")
if back:
st.session_state.form2 = "back"
st.rerun()
next = col2.form_submit_button("Next")
if next:
st.session_state.form3 = "next"
st.rerun()
elif st.session_state.form3 == "next" and st.session_state.form4 == "back":
with st.container(key="form3"):
st.write("π§ͺ Step 4: Body Metrics & Glucose")
with st.form("form_step_3"):
with st.container(key="form-content3"):
# Step 3 inputs
st.session_state.BMI = st.number_input("Body Mass Index (BMI)", min_value=10.0, max_value=60.0, step=0.1,value=st.session_state.BMI)
st.session_state.heartRate = st.number_input("Heart Rate (bpm)", min_value=40, max_value=200, step=1,value= st.session_state.heartRate)
st.session_state.glucose = st.number_input("Glucose Level (mg/dL)", min_value=50, max_value=300, step=1,value= st.session_state.glucose)
col1, col2 = st.columns([4,1])
back = col1.form_submit_button("Back")
if back:
st.session_state.form3 = "back"
st.rerun()
next = col2.form_submit_button("predict")
if next:
st.session_state.form4 = "next"
st.rerun()
elif st.session_state.form4 == "next" and st.session_state.form5 == "back":
# Construct input array from collected values
new_data = np.array([[
1 if st.session_state.gender == "Male" else 0, # gender
st.session_state.age,
1 if st.session_state.currentSmoker == "Yes" else 0,
float(st.session_state.cigsPerDay),
1.0 if st.session_state.BPMeds else 0.0,
1 if st.session_state.diabetes else 0,
st.session_state.totChol,
st.session_state.sysBP,
st.session_state.diaBP,
st.session_state.BMI,
st.session_state.heartRate,
st.session_state.glucose
]])
loading_placeholder = st.empty()
with loading_placeholder.container():
# Make prediction
with st.spinner("Analyzing your heart health..."):
st.image('load.gif', use_container_width=True)
time.sleep(3) # Wait for 1 second
# Remove the loading image
loading_placeholder.empty()
prediction = best_clf.predict(new_data)
prediction_proba = best_clf.predict_proba(new_data)
st.session_state.Risk = prediction
risk_percent = prediction_proba[0][1]*100
risk_label = "At Risk of having a heart failure"
st.session_state.proba = risk_percent
name = st.session_state.name # Get from session or fallback
with st.container(key = "result"):
# Display result
st.markdown(f"""
Hi {name} π, you are
{risk_percent:.2f}%
{risk_label}
""", unsafe_allow_html=True)
st.session_state.form5 = "next"
if st.button("explain the result", key = "explain"):
st.session_state.form5 = "next"
st.rerun()
elif st.session_state.form5 == "next" :
def generate_stream_response(text):
# Yield the string one character at a time (for streaming)
for char in text:
yield char
time.sleep(0.02)
selected_model = {
"url": "https://router.huggingface.co/nebius/v1/chat/completions", # Replace with the Hugging Face API URL for your model
"model": "deepseek-ai/DeepSeek-V3" # Replace with the model name
}
task = "text-generation"
prompt = f"""
Hi! A person named {st.session_state.name} has just been assessed for heart disease risk.
π **Prediction**: {"High Risk" if st.session_state.Risk == 1 else "Low Risk"}
π **Risk Percentage**: {st.session_state.proba:.2f}%
π **Input Parameters**:
- Sex: {st.session_state.gender}
- Age: {st.session_state.age}
- Current Smoker: {st.session_state.currentSmoker}
- Cigarettes per Day: {st.session_state.cigsPerDay}
- On Blood Pressure Meds: {"Yes" if st.session_state.BPMeds else "No"}
- Has Diabetes: {"Yes" if st.session_state.diabetes else "No"}
- Total Cholesterol: {st.session_state.totChol} mg/dL
- Systolic BP: {st.session_state.sysBP} mmHg
- Diastolic BP: {st.session_state.diaBP} mmHg
- BMI: {st.session_state.BMI}
- Heart Rate: {st.session_state.heartRate} bpm
- Glucose: {st.session_state.glucose} mg/dL
π¬ Please give a personalized, kind, and easy-to-understand explanation of this result. Include practical lifestyle advice and possible early warning signs to watch for. Use an encouraging, empathetic tone.and sign with {selected_model['model']}
"""
with st.container(key = "expert"):
with st.spinner("Model is Analysing your Results..."):
result = query_huggingface_model(selected_model, prompt , input_type="text",task=task)
response = extract_response_content(result)
st.markdown(f"""

Personalized Heart Health Advice
""", unsafe_allow_html=True)
st.write_stream(generate_stream_response(response)) # This will stream the text one character at a time