Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -4,10 +4,10 @@ import pandas as pd
|
|
4 |
from transformers import pipeline
|
5 |
import random
|
6 |
|
7 |
-
# Load the Hugging Face model for text generation (
|
8 |
@st.cache_resource
|
9 |
def load_text_generator():
|
10 |
-
return pipeline("
|
11 |
|
12 |
text_generator = load_text_generator()
|
13 |
|
@@ -34,9 +34,9 @@ def search_topic_in_content(content, topic):
|
|
34 |
topic_sentences = [s for s in sentences if topic.lower() in s.lower()] # Filter sentences containing the topic
|
35 |
return ". ".join(topic_sentences) if topic_sentences else None
|
36 |
|
37 |
-
# Function to generate content using Hugging Face model
|
38 |
-
def
|
39 |
-
prompt = f"Explain '{topic}' in
|
40 |
response = text_generator(prompt, max_length=300, num_return_sequences=1)
|
41 |
return response[0]['generated_text']
|
42 |
|
@@ -86,7 +86,7 @@ if st.button("Generate Study Material"):
|
|
86 |
st.write(filtered_content)
|
87 |
else:
|
88 |
st.warning("No relevant content found in the uploaded material. Generating AI-based content instead.")
|
89 |
-
ai_content =
|
90 |
st.write("**AI-Generated Content:**")
|
91 |
st.write(ai_content)
|
92 |
else:
|
|
|
4 |
from transformers import pipeline
|
5 |
import random
|
6 |
|
7 |
+
# Load the Hugging Face model for text generation and summarization (FLAN-T5 or T5-Small)
|
8 |
@st.cache_resource
|
9 |
def load_text_generator():
|
10 |
+
return pipeline("text2text-generation", model="google/flan-t5-base") # Efficient and professional model
|
11 |
|
12 |
text_generator = load_text_generator()
|
13 |
|
|
|
34 |
topic_sentences = [s for s in sentences if topic.lower() in s.lower()] # Filter sentences containing the topic
|
35 |
return ". ".join(topic_sentences) if topic_sentences else None
|
36 |
|
37 |
+
# Function to generate structured content using Hugging Face model
|
38 |
+
def generate_professional_content(topic):
|
39 |
+
prompt = f"Explain '{topic}' in bullet points, highlighting the key concepts, examples, and applications in a professional manner for electrical engineering students."
|
40 |
response = text_generator(prompt, max_length=300, num_return_sequences=1)
|
41 |
return response[0]['generated_text']
|
42 |
|
|
|
86 |
st.write(filtered_content)
|
87 |
else:
|
88 |
st.warning("No relevant content found in the uploaded material. Generating AI-based content instead.")
|
89 |
+
ai_content = generate_professional_content(topic)
|
90 |
st.write("**AI-Generated Content:**")
|
91 |
st.write(ai_content)
|
92 |
else:
|