nroy8 commited on
Commit
cf6d982
·
verified ·
1 Parent(s): 37edcf6

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -0
app.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+
4
+ # ✅ Choose a public model that is available on Hugging Face
5
+ MODEL_NAME = "mistralai/Mistral-7B-Instruct" # Alternative: "microsoft/BioGPT-Large"
6
+
7
+ # ✅ Load the tokenizer and model
8
+ try:
9
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
10
+ model = AutoModelForCausalLM.from_pretrained(MODEL_NAME)
11
+ except Exception as e:
12
+ print(f"Error loading model: {e}")
13
+ model = None # Prevents crashing if model doesn't load
14
+
15
+ def diagnose(symptoms):
16
+ if model is None:
17
+ return "⚠️ Error: AI model failed to load. Try again later."
18
+
19
+ prompt = f"I have the following symptoms: {symptoms}. What could it be?"
20
+ inputs = tokenizer(prompt, return_tensors="pt")
21
+
22
+ # ✅ Generate AI response
23
+ output = model.generate(**inputs, max_length=200)
24
+ response = tokenizer.decode(output[0], skip_special_tokens=True)
25
+
26
+ return response
27
+
28
+ # ✅ Create a simple web UI
29
+ interface = gr.Interface(
30
+ fn=diagnose,
31
+ inputs="text",
32
+ outputs="text",
33
+ title="AI Symptom Checker",
34
+ description="Enter your symptoms, and the AI will suggest possible conditions."
35
+ )
36
+
37
+ # ✅ Launch the web app
38
+ if __name__ == "__main__":
39
+ interface.launch()