File size: 6,576 Bytes
1e5e843
 
50296ea
fc33cc9
50296ea
 
 
 
 
ec08f1b
 
1e5e843
 
50296ea
 
 
 
 
 
 
 
ad9f33c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50296ea
 
 
6823ab1
 
fc33cc9
50296ea
1cef079
50296ea
6823ab1
50296ea
 
 
6823ab1
 
50296ea
 
 
 
 
6823ab1
50296ea
 
 
ec08f1b
6823ab1
 
 
 
 
 
ec08f1b
 
6823ab1
 
 
 
ec08f1b
 
 
 
 
 
6823ab1
50296ea
6823ab1
50296ea
 
6823ab1
ec08f1b
50296ea
 
6823ab1
ec08f1b
50296ea
 
0693545
6823ab1
1e5e843
ec08f1b
 
50296ea
 
 
6823ab1
 
 
 
 
 
 
ad9f33c
50296ea
1e5e843
0693545
 
 
 
 
 
1e5e843
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0693545
1e5e843
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
# app.py

# Import necessary libraries
import os
import logging
import dotenv
import gradio as gr
import numpy as np
from pymongo import MongoClient
from datetime import datetime
from werkzeug.security import generate_password_hash, check_password_hash
from utils import PoultryFarmBot, llama3_response
from transformers import AutoModelForCausalLM, AutoTokenizer

# Load environment variables from .env file
dotenv.load_dotenv()

# Setup logging for better monitoring
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

# MongoDB Setup for logging and audit
MONGO_URI = os.getenv("MONGO_URI")
logger.info("Connecting to MongoDB.")
client = MongoClient(MONGO_URI)
db = client.poultry_farm  # Connect to the 'poultry_farm' database
enquiries_collection = db.enquiries  # Collection to store farmer enquiries
users_collection = db.users  # Collection to store user credentials
logs_collection = db.logs  # Collection to store application logs

def log_to_db(level, message):
    log_entry = {
        "level": level,
        "message": message,
        "timestamp": datetime.utcnow()
    }
    logs_collection.insert_one(log_entry)

# Override logger methods to also log to MongoDB
class MongoHandler(logging.Handler):
    def emit(self, record):
        log_entry = self.format(record)
        log_to_db(record.levelname, log_entry)

mongo_handler = MongoHandler()
mongo_handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
mongo_handler.setFormatter(formatter)
logger.addHandler(mongo_handler)

# Hugging Face token setup
tok = os.getenv('HF_TOKEN')
if tok:
    # Log in to Hugging Face using the token from environment variables
    logger.info("Logging in to Hugging Face.")
else:
    logger.warning("Hugging Face token not found in environment variables.")

# Initialize the bot instance
logger.info("Initializing PoultryFarmBot instance.")
bot = PoultryFarmBot(db)

# Load Llama 3.2 model and tokenizer for text generation
logger.info("Loading Llama 3.2 model and tokenizer.")
model_name = "meta-llama/Llama-3.2-3B"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)

# Set the padding token to EOS token or add a new padding token
if tokenizer.pad_token is None:
    logger.info("Adding padding token to tokenizer.")
    tokenizer.add_special_tokens({'pad_token': '[PAD]'})
    model.resize_token_embeddings(len(tokenizer))

def chatbot_response(image, text, username, password):
    """
    Handle user input and generate appropriate responses.

    Args:
        image (numpy.ndarray): Image input for disease detection.
        text (str): Text input for general queries.
        username (str): Username for authentication.
        password (str): Password for authentication.

    Returns:
        str: Response generated by the chatbot.
    """
    user = bot.authenticate_user(username, password)
    if not user:
        return "Authentication failed. Please check your username and password."

    user_id = user['_id']

    # If an image is provided, diagnose the disease
    if image is not None:
        logger.info("Image input detected. Proceeding with disease diagnosis.")
        diagnosis, name, status, recom = bot.diagnose_disease(image)
        if name and status and recom:
            logger.info("Diagnosis complete.")
            bot.log_enquiry("image", "Image Enquiry", diagnosis, user_id)
            return diagnosis
        else:
            logger.warning("Diagnosis incomplete.")
            bot.log_enquiry("image", "Image Enquiry", diagnosis, user_id)
            return diagnosis
    else:
        # Generate a response using Llama 3.2 for general text input
        logger.info("Text input detected. Generating response.")
        response = llama3_response(text, tokenizer, model)
        bot.log_enquiry("text", text, response, user_id)
        return response

# Gradio interface
def build_gradio_interface():
    """
    Build the Gradio interface for the chatbot.

    Returns:
        gr.Blocks: Gradio Blocks object representing the chatbot interface.
    """
    logger.info("Building Gradio interface.")
    with gr.Blocks(theme=gr.themes.Base()):
        gr.Markdown("# 🐔 Poultry Management Chatbot")
        gr.Markdown("Welcome! This chatbot helps you manage your poultry with ease. You can upload an image for disease diagnosis or ask any questions about poultry management.")

        chat_history = gr.Chatbot()
        with gr.Row():
            with gr.Column(scale=1):
                fecal_image = gr.Image(
                    label="Upload Image of Poultry Feces (Optional)",
                    type="numpy",
                    elem_id="image-upload",
                    show_label=True,
                )
            with gr.Column(scale=2):
                user_input = gr.Textbox(
                    label="Ask a question",
                    placeholder="Ask about poultry management...",
                    lines=3,
                    elem_id="user-input",
                )
                username = gr.Textbox(
                    label="Username",
                    placeholder="Enter your username",
                    lines=1,
                    elem_id="username-input",
                )
                password = gr.Textbox(
                    label="Password",
                    placeholder="Enter your password",
                    type="password",
                    lines=1,
                    elem_id="password-input",
                )

        output_box = gr.Textbox(
            label="Response",
            placeholder="Response will appear here...",
            interactive=False,
            lines=10,
            elem_id="output-box",
        )

        submit_button = gr.Button(
            "Submit",
            variant="primary",
            elem_id="submit-button"
        )
        # Connect the submit button to the chatbot response function
        submit_button.click(
            fn=chatbot_response,
            inputs=[fecal_image, user_input, username, password],
            outputs=[output_box]
        )
    logger.info("Gradio interface built successfully.")
    return chatbot_interface

# Launch the Gradio interface
if __name__ == "__main__":
    logger.info("Launching Gradio interface.")
    interface = build_gradio_interface()
    # Launch the interface with queuing enabled for concurrent requests
    interface.queue().launch(debug=True, share=True)