Update app.py
Browse files
app.py
CHANGED
@@ -27,7 +27,7 @@
|
|
27 |
# - **Hugging Face Model**: "sentence-transformers/all-MiniLM-L6-v2"
|
28 |
|
29 |
# Choose your model from the above options:
|
30 |
-
MODEL_NAME = "
|
31 |
|
32 |
# ---------------------------------------------------------------------------
|
33 |
# Code Below to Load, Generate, and Save Functional Requirement Documents
|
@@ -39,82 +39,94 @@ from reportlab.pdfgen import canvas
|
|
39 |
import os
|
40 |
|
41 |
# Load Hugging Face Token from Environment (Set this in Hugging Face Spaces Secrets)
|
42 |
-
HF_TOKEN = os.getenv("HF_TOKEN")
|
43 |
|
44 |
-
# Model
|
45 |
-
MODEL_NAME = "
|
46 |
|
47 |
-
# Load Model
|
48 |
@st.cache_resource
|
49 |
def load_model():
|
50 |
try:
|
51 |
-
# Initialize the Hugging Face pipeline with the selected model for text generation.
|
52 |
return pipeline("text-generation", model=MODEL_NAME, token=HF_TOKEN)
|
53 |
except Exception as e:
|
54 |
-
st.error(f"β Error loading model: {str(e)}")
|
55 |
-
return None
|
56 |
|
57 |
-
#
|
58 |
-
generator = load_model()
|
59 |
|
60 |
-
# Function to
|
61 |
def generate_functional_requirements(topic):
|
62 |
if generator is None:
|
63 |
-
return "Error: Model
|
64 |
|
65 |
-
#
|
66 |
prompt = f"""
|
67 |
-
Generate a
|
68 |
-
The document should
|
69 |
|
70 |
-
1
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
81 |
"""
|
82 |
-
|
83 |
-
# Generate content using the text generation model
|
84 |
-
output = generator(prompt, max_length=3000, do_sample=True, temperature=0.7) # Generate long-form content
|
85 |
-
generated_text = output[0]['generated_text']
|
86 |
-
|
87 |
-
if not generated_text.strip():
|
88 |
-
return "No content generated. Please try again." # Handle the case where the model generates an empty response
|
89 |
-
|
90 |
-
return generated_text # Return the generated content
|
91 |
|
92 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
93 |
def save_to_pdf(content, filename):
|
94 |
-
c = canvas.Canvas(filename, pagesize=letter)
|
95 |
-
c.setFont("Helvetica", 10)
|
96 |
|
97 |
-
text = c.beginText(40, 750) # Set
|
98 |
-
text.setLeading(14) #
|
99 |
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
c.
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
# Streamlit UI - User Interface for interacting with the app
|
114 |
def main():
|
115 |
-
st.title("π AI-Powered Functional Requirement Generator for Banking")
|
116 |
|
117 |
-
#
|
118 |
banking_topics = [
|
119 |
"Core Banking System",
|
120 |
"Loan Management System",
|
@@ -127,21 +139,19 @@ def main():
|
|
127 |
"Wealth & Portfolio Management"
|
128 |
]
|
129 |
|
130 |
-
# Dropdown menu to select a topic
|
131 |
topic = st.selectbox("Select a Banking Functional Requirement Topic", banking_topics)
|
132 |
|
133 |
-
# Button to trigger the document generation
|
134 |
if st.button("Generate Functional Requirement Document"):
|
135 |
-
with st.spinner("Generating... This may take a while."):
|
136 |
-
content = generate_functional_requirements(topic) # Generate
|
|
|
137 |
if "Error" in content:
|
138 |
-
st.error(content)
|
139 |
else:
|
140 |
-
|
141 |
-
filename
|
142 |
-
|
143 |
-
st.
|
144 |
-
st.download_button(label="π₯ Download PDF", data=open(filename, "rb"), file_name=filename, mime="application/pdf") # Provide a download link for the PDF
|
145 |
|
146 |
if __name__ == "__main__":
|
147 |
-
main()
|
|
|
27 |
# - **Hugging Face Model**: "sentence-transformers/all-MiniLM-L6-v2"
|
28 |
|
29 |
# Choose your model from the above options:
|
30 |
+
MODEL_NAME = "mistralai/Mistral-7B-Instruct-v0.1" # Change this to one of the other models based on your needs.
|
31 |
|
32 |
# ---------------------------------------------------------------------------
|
33 |
# Code Below to Load, Generate, and Save Functional Requirement Documents
|
|
|
39 |
import os
|
40 |
|
41 |
# Load Hugging Face Token from Environment (Set this in Hugging Face Spaces Secrets)
|
42 |
+
HF_TOKEN = os.getenv("HF_TOKEN")
|
43 |
|
44 |
+
# π Recommended Model: Mistral-7B (Lightweight & Open-Source)
|
45 |
+
MODEL_NAME = "mistralai/Mistral-7B-Instruct-v0.1"
|
46 |
|
47 |
+
# π Load Model Efficiently (Caching to avoid reloading)
|
48 |
@st.cache_resource
|
49 |
def load_model():
|
50 |
try:
|
|
|
51 |
return pipeline("text-generation", model=MODEL_NAME, token=HF_TOKEN)
|
52 |
except Exception as e:
|
53 |
+
st.error(f"β Error loading model: {str(e)}")
|
54 |
+
return None
|
55 |
|
56 |
+
generator = load_model() # Load once and reuse
|
|
|
57 |
|
58 |
+
# π Function to Generate Functional Requirement Document
|
59 |
def generate_functional_requirements(topic):
|
60 |
if generator is None:
|
61 |
+
return "Error: Model failed to load."
|
62 |
|
63 |
+
# Structured Prompt for Better Document Output
|
64 |
prompt = f"""
|
65 |
+
Generate a detailed functional requirements document for {topic} in the banking sector.
|
66 |
+
The document should follow this structured format:
|
67 |
|
68 |
+
1οΈβ£ **Introduction**
|
69 |
+
- Overview
|
70 |
+
- Purpose
|
71 |
+
- Intended Users
|
72 |
+
|
73 |
+
2οΈβ£ **Scope**
|
74 |
+
- System Description
|
75 |
+
- Key Functionalities
|
76 |
+
|
77 |
+
3οΈβ£ **Functional Specifications**
|
78 |
+
- Core Features
|
79 |
+
- User Roles & Permissions
|
80 |
+
- Transaction Processing
|
81 |
+
|
82 |
+
4οΈβ£ **System Features**
|
83 |
+
- Security & Compliance
|
84 |
+
- Performance Metrics
|
85 |
+
|
86 |
+
5οΈβ£ **Regulatory & Compliance**
|
87 |
+
- Central Bank Regulations
|
88 |
+
- Data Protection & Privacy
|
89 |
+
|
90 |
+
6οΈβ£ **Conclusion**
|
91 |
+
- Summary
|
92 |
+
- Future Enhancements
|
93 |
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
94 |
|
95 |
+
document = "" # Initialize empty document
|
96 |
+
|
97 |
+
for _ in range(4): # Generate 4 chunks of text
|
98 |
+
output = generator(prompt, max_length=1024, do_sample=True, temperature=0.7)
|
99 |
+
if output and len(output) > 0 and "generated_text" in output[0]:
|
100 |
+
document += output[0]['generated_text'] + "\n\n"
|
101 |
+
else:
|
102 |
+
return "Error: Model failed to generate text."
|
103 |
+
|
104 |
+
return document # Return final document text
|
105 |
+
|
106 |
+
# π Function to Save Generated Content as PDF
|
107 |
def save_to_pdf(content, filename):
|
108 |
+
c = canvas.Canvas(filename, pagesize=letter)
|
109 |
+
c.setFont("Helvetica", 10)
|
110 |
|
111 |
+
text = c.beginText(40, 750) # Set starting position for text
|
112 |
+
text.setLeading(14) # Line spacing
|
113 |
|
114 |
+
for line in content.split("\n"):
|
115 |
+
text.textLine(line) # Add text line by line
|
116 |
+
if text.getY() < 50: # Start a new page when space runs out
|
117 |
+
c.drawText(text)
|
118 |
+
c.showPage()
|
119 |
+
text = c.beginText(40, 750)
|
120 |
+
text.setLeading(14)
|
121 |
+
|
122 |
+
c.drawText(text)
|
123 |
+
c.save()
|
124 |
+
|
125 |
+
# π Streamlit UI
|
|
|
|
|
126 |
def main():
|
127 |
+
st.title("π AI-Powered Functional Requirement Generator for Banking")
|
128 |
|
129 |
+
# Dropdown menu for selecting banking topics
|
130 |
banking_topics = [
|
131 |
"Core Banking System",
|
132 |
"Loan Management System",
|
|
|
139 |
"Wealth & Portfolio Management"
|
140 |
]
|
141 |
|
|
|
142 |
topic = st.selectbox("Select a Banking Functional Requirement Topic", banking_topics)
|
143 |
|
|
|
144 |
if st.button("Generate Functional Requirement Document"):
|
145 |
+
with st.spinner("Generating... This may take a while."):
|
146 |
+
content = generate_functional_requirements(topic) # Generate content
|
147 |
+
|
148 |
if "Error" in content:
|
149 |
+
st.error(content)
|
150 |
else:
|
151 |
+
filename = "functional_requirement.pdf"
|
152 |
+
save_to_pdf(content, filename) # Save to PDF
|
153 |
+
st.success("β
Functional Requirement Document Generated!")
|
154 |
+
st.download_button(label="π₯ Download PDF", data=open(filename, "rb"), file_name=filename, mime="application/pdf")
|
|
|
155 |
|
156 |
if __name__ == "__main__":
|
157 |
+
main()
|