File size: 2,959 Bytes
022342b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 |
from flask import Flask, request, jsonify
import numpy as np
import tensorflow as tf
from tensorflow.lite.python.interpreter import Interpreter
import os
import google.generativeai as genai
app = Flask(__name__)
# Load the TensorFlow Lite model
interpreter = Interpreter(model_path="model.tflite")
interpreter.allocate_tensors()
# Get input and output details
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
# Define categories
data_cat = ['disposable cups', 'paper', 'plastic bottle']
img_height, img_width = 224, 224
# Configure Gemini API
GEMINI_API_KEY = os.getenv('GEMINI_API_KEY', 'AIzaSyBx0A7BA-nKVZOiVn39JXzdGKgeGQqwAFg')
genai.configure(api_key=GEMINI_API_KEY)
# Initialize Gemini model
gemini_model = genai.GenerativeModel('gemini-pro')
@app.route('/predict', methods=['POST'])
def predict():
if 'image' not in request.files:
return jsonify({"error": "No image uploaded"}), 400
file = request.files['image']
try:
# Preprocess the image
img = tf.image.decode_image(file.read(), channels=3)
img = tf.image.resize(img, [img_height, img_width])
img_bat = np.expand_dims(img, 0).astype(np.float32)
# Set input tensor
interpreter.set_tensor(input_details[0]['index'], img_bat)
# Run inference
interpreter.invoke()
# Get the result
output_data = interpreter.get_tensor(output_details[0]['index'])
predicted_class = data_cat[np.argmax(output_data)]
confidence = np.max(output_data) * 100
# Generate sustainability insights with Gemini API
prompt = f"""
You are a sustainability-focused AI. Analyze the {predicted_class} (solid dry waste)
and generate the top three innovative, eco-friendly recommendations for repurposing it.
Each recommendation should:
- Provide a title
- Be practical and easy to implement
- Be environmentally beneficial
- Include a one or two-sentence explanation
Format each recommendation with a clear title followed by the explanation on a new line.
"""
try:
# Generate response using the correct method
response = gemini_model.generate_content(prompt)
insights = response.text.strip() # Assuming generate_content returns a string or a response with 'text'
except Exception as e:
insights = f"Error generating insights: {str(e)}"
print(f"Gemini API error: {str(e)}") # For debugging
# Prepare the response
return jsonify({
"class": predicted_class,
"confidence": confidence,
"insights": insights
})
except Exception as e:
return jsonify({"error": str(e)}), 500
if __name__ == "__main__":
app.run(debug=True)
|