File size: 4,173 Bytes
825fb76 04f8251 825fb76 04f8251 825fb76 04f8251 825fb76 04f8251 825fb76 04f8251 825fb76 04f8251 825fb76 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 |
import streamlit as st
from transformers import pipeline
from reportlab.lib.pagesizes import letter
from reportlab.pdfgen import canvas
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, PageBreak
import os
# Load Hugging Face Token (Ensure it's set in Env Variables)
HF_TOKEN = os.getenv("HF_TOKEN")
# β
Optimized Model (Flan-T5 for Lower Memory Usage)
MODEL_NAME = "google/flan-t5-large"
# π Load Model Efficiently (Avoid Reloading)
@st.cache_resource
def load_model():
try:
return pipeline("text2text-generation", model=MODEL_NAME, token=HF_TOKEN)
except Exception as e:
st.error(f"β Error loading model: {str(e)}")
return None
# Load once and reuse
generator = load_model()
# π Function to Generate Functional Requirement Document
def generate_functional_requirements(topic):
if generator is None:
return "Error: Model failed to load."
sections = {
"Introduction": [
"Overview and Purpose",
"Intended Users"
],
"Scope": [
"System Description",
"Key Functionalities"
],
"Functional Specifications": [
"User Roles",
"Core Features"
],
"Security & Compliance": [
"Regulatory Requirements",
"Data Protection"
],
"Future Enhancements": [
"Potential Feature Expansions",
"Roadmap & Next Steps"
]
}
document = [] # Store paragraphs in a structured way
styles = getSampleStyleSheet()
for section, subsections in sections.items():
document.append(Paragraph(f"<b>{section}</b>", styles['Title']))
document.append(Spacer(1, 10))
for subsection in subsections:
prompt = f"Write a **detailed 300-word section** on '{subsection}' for the topic '{topic}' in banking. Provide structured paragraphs with examples."
output = generator(prompt, max_length=1024, do_sample=True, temperature=0.7)
if output and isinstance(output, list) and len(output) > 0 and "generated_text" in output[0]:
document.append(Paragraph(f"<b>{subsection}</b>", styles['Heading2']))
document.append(Spacer(1, 6))
document.append(Paragraph(output[0]["generated_text"], styles['Normal']))
document.append(Spacer(1, 10))
else:
return "Error: Model failed to generate text."
document.append(PageBreak()) # Add a page break after each major section
return document
# π Function to Save Generated Content as PDF
def save_to_pdf(content, filename):
if not content:
st.error("β Error: No content available to write to the PDF.")
return
doc = SimpleDocTemplate(filename, pagesize=letter)
doc.build(content)
# π Streamlit UI
def main():
st.title("π AI-Powered Functional Requirement Generator for Banking")
banking_topics = [
"Core Banking System", "Loan Management System", "Payment Processing Gateway",
"Risk and Fraud Detection", "Regulatory Compliance Management", "Digital Banking APIs",
"Customer Onboarding & KYC", "Treasury Management", "Wealth & Portfolio Management"
]
topic = st.selectbox("Select a Banking Functional Requirement Topic", banking_topics)
if st.button("Generate Functional Requirement Document"):
with st.spinner("Generating... This may take a while."):
content = generate_functional_requirements(topic)
if isinstance(content, str) and "Error" in content:
st.error(content)
else:
filename = "functional_requirement.pdf"
save_to_pdf(content, filename)
st.success("β
Document Generated Successfully!")
st.download_button("π₯ Download PDF", data=open(filename, "rb"), file_name=filename, mime="application/pdf")
os.remove(filename) # Cleanup after download
if __name__ == "__main__":
main() |