Spaces:
Sleeping
Sleeping
import os | |
import gradio as gr | |
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer, pipeline | |
from langchain_community.document_loaders import PyPDFLoader | |
from langchain.text_splitter import RecursiveCharacterTextSplitter | |
from langchain_community.vectorstores import FAISS | |
from langchain_huggingface import HuggingFaceEmbeddings | |
from langchain.llms import HuggingFacePipeline | |
from langchain.chains import RetrievalQA | |
# Load PDF | |
loader = PyPDFLoader("Medical_Book.pdf") | |
documents = loader.load() | |
# Split text | |
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=20) | |
all_splits = text_splitter.split_documents(documents) | |
# Embeddings | |
embedding_model = "sentence-transformers/all-MiniLM-L6-v2" | |
embeddings = HuggingFaceEmbeddings(model_name=embedding_model) | |
vectorstore = FAISS.from_documents(all_splits, embeddings) | |
# Load lightweight model (Flan-T5) | |
model_name = "google/flan-t5-base" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForSeq2SeqLM.from_pretrained(model_name) | |
# LangChain wrapper | |
pipe = pipeline("text2text-generation", model=model, tokenizer=tokenizer, max_new_tokens=256) | |
llm = HuggingFacePipeline(pipeline=pipe) | |
# RetrievalQA chain (no chat history, only answer) | |
qa_chain = RetrievalQA.from_chain_type(llm=llm, retriever=vectorstore.as_retriever(), return_source_documents=False) | |
# Gradio app | |
def chatbot_response(question, max_tokens, temperature): | |
pipe.model.config.max_new_tokens = int(max_tokens) | |
pipe.model.config.temperature = float(temperature) | |
result = qa_chain.run(question) | |
return result.strip() | |
interface = gr.Interface( | |
fn=chatbot_response, | |
inputs=[ | |
gr.Textbox(label="Your Medical Question", placeholder="e.g. What are the symptoms of pneumonia?"), | |
gr.Slider(label="Max Tokens", minimum=10, maximum=512, value=256, step=1), | |
gr.Slider(label="Temperature", minimum=0.1, maximum=1.0, value=0.7, step=0.01) | |
], | |
outputs=gr.Textbox(label="Answer"), | |
title="🩺 Light Medical Chatbot", | |
description="Ask medical questions answered from the Medical Book using a lightweight Flan-T5 model." | |
) | |
interface.launch(share=True) | |