|
|
|
|
|
import os |
|
import tensorflow as tf |
|
from pymongo import MongoClient |
|
from flask import Flask, request, jsonify, render_template |
|
from huggingface_hub import login |
|
from services.disease_detection import PoultryFarmBot |
|
from services.llama_service import llama2_response |
|
|
|
|
|
app = Flask(__name__, template_folder="templates", static_folder="static") |
|
|
|
|
|
from auth.auth_routes import auth_bp |
|
from routes.health_routes import health_bp |
|
from routes.inventory_routes import inventory_bp |
|
from routes.usage_routes import usage_bp |
|
|
|
app.register_blueprint(usage_bp, url_prefix='/api/usage') |
|
app.register_blueprint(inventory_bp, url_prefix='/api/inventory') |
|
app.register_blueprint(health_bp, url_prefix='/api/health') |
|
app.register_blueprint(auth_bp, url_prefix='/auth') |
|
|
|
|
|
tok = os.getenv('HF_Token') |
|
if tok: |
|
login(token=tok, add_to_git_credential=True) |
|
else: |
|
print("Warning: Hugging Face token not found in environment variables.") |
|
|
|
|
|
MONGO_URI = os.getenv("MONGO_URI") |
|
client = MongoClient(MONGO_URI) |
|
db = client.poultry_farm |
|
|
|
|
|
print("TensorFlow version:", tf.__version__) |
|
print("Eager execution:", tf.executing_eagerly()) |
|
print("TensorFlow GPU Available:", tf.config.list_physical_devices('GPU')) |
|
|
|
|
|
from tensorflow.keras import mixed_precision |
|
if len(tf.config.list_physical_devices('GPU')) > 0: |
|
policy = mixed_precision.Policy('mixed_float16') |
|
mixed_precision.set_global_policy(policy) |
|
print("Using mixed precision with GPU") |
|
else: |
|
print("Using CPU without mixed precision") |
|
|
|
|
|
bot = PoultryFarmBot(db=db) |
|
|
|
|
|
|
|
@app.route('/') |
|
def index(): |
|
return render_template('index.html') |
|
|
|
|
|
|
|
@app.route('/api/chat', methods=['POST']) |
|
def chat(): |
|
data = request.json |
|
message = data.get('message') |
|
image = data.get('image') |
|
|
|
if image: |
|
|
|
diagnosis, name, status, recom = bot.predict(image) |
|
return jsonify({ |
|
"response": f"Disease: {name}, Status: {status}, Recommendation: {recom}" |
|
}) |
|
|
|
|
|
response = llama2_response(message) |
|
return jsonify({"response": response}) |
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
app.run(debug=True) |
|
|