File size: 1,930 Bytes
4031a24
 
 
 
 
 
 
b8986a1
 
 
99bf340
b8986a1
 
 
4031a24
b8986a1
 
 
 
4031a24
99bf340
4031a24
 
99bf340
 
4031a24
 
 
 
b8986a1
4031a24
b8986a1
 
 
 
99bf340
b8986a1
4031a24
b8986a1
 
 
4031a24
 
 
 
 
 
99bf340
 
 
 
b8986a1
4031a24
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
"""
backend.py
----------
This module handles the core processing of clinical queries. It retrieves relevant PubMed abstracts
via the retrieval pipeline and then uses BioGPT to generate a clinical answer. Designed for speed and clarity.
"""

from transformers import pipeline
from retrieval import get_relevant_pubmed_docs

# Use Microsoft BioGPT-Large-PubMedQA for generation.
MODEL_NAME = "microsoft/BioGPT-Large-PubMedQA"
qa_pipeline = pipeline("text-generation", model=MODEL_NAME)

# In-memory cache for retrieved documents (used for knowledge graph visualization).
docs_cache = {}

def process_medical_query(query: str):
    """
    Processes a clinical query by:
    
    1. Retrieving relevant PubMed abstracts.
    2. Generating an answer using BioGPT.
    
    Returns:
      - final_answer: The generated answer.
      - sub_questions: (Empty list; previously used for self-critique but omitted for speed.)
      - initial_answer: The same as final_answer in this streamlined version.
      - critique: (Empty string; omitted for performance.)
    """
    # Retrieve relevant documents using the retrieval pipeline.
    relevant_docs = get_relevant_pubmed_docs(query)
    docs_cache[query] = relevant_docs

    if not relevant_docs:
        return ("No documents found for this query.", [], "", "")

    # Combine abstracts into a context.
    context_text = "\n\n".join(relevant_docs)
    prompt = f"Question: {query}\nContext: {context_text}\nAnswer:"

    try:
        generation = qa_pipeline(prompt, max_new_tokens=100, truncation=True)
    except Exception as e:
        generation = None
        print(f"[ERROR] BioGPT generation failed: {e}")

    if generation and isinstance(generation, list):
        answer = generation[0]["generated_text"]
    else:
        answer = "No answer found."

    # Return the answer along with empty placeholders for sub-questions and critique.
    return answer, [], answer, ""