File size: 2,424 Bytes
1da3e41
f660c84
75cd008
f9479bf
1da3e41
f660c84
 
 
 
 
 
 
 
 
 
 
 
 
1da3e41
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2ef385a
 
1da3e41
 
 
 
 
f660c84
 
 
 
 
 
1da3e41
 
 
f660c84
 
 
1da3e41
 
 
 
 
 
 
 
 
75cd008
f660c84
 
 
2ef385a
f660c84
 
 
 
 
 
6cfbea7
f660c84
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import streamlit as st
import google.generativeai as genai
import requests
import os

MODEL_ID = "gemini-2.0-flash-exp" 
api_key = os.getenv("GEMINI_API_KEY")
model_id = MODEL_ID
genai.configure(api_key=api_key)

if "model" not in st.session_state:
    st.session_state.model = genai.GenerativeModel(MODEL_ID)

model = st.session_state.model
chat = model.start_chat()

creative_prompt = ""
factcheck_prompt = ""

# Page Title
st.title("LemmaTeks: AI-Powered Text Generator")

# Sidebar for Settings
with st.sidebar:
    st.header("Configuration")
    
    # Dropdown for Output Format
    output_format = st.selectbox(
        "Choose Output Format:", 
        ["Story", "Poem", "Article", "Code"]
    )
    
    # Dropdown for Tone/Style
    tone_style = st.selectbox(
        "Select Tone/Style:", 
        ["Formal", "Informal", "Humorous", "Technical"]
    )
    
    # Sliders for Text Length and Creativity
    text_length = st.slider("Text Length (words):", min_value=50, max_value=2000, value=1000, step=50)
    creativity_level = st.slider("Creativity Level:", min_value=0.0, max_value=1.0, value=0.3, step=0.1)
    
    # Checkboxes for Features
    creative_mode = st.checkbox("Enable Creative Mode")
    fact_checking = st.checkbox("Enable Fact-Checking")

    #modify prompt based on settings
    if creative_mode:
        creative_prompt = " Optimize the creativity of your response. "        
    if fact_checking:
        factcheck_prompt = "Support your answer with evidences. "

# Text Input Field
user_prompt = st.text_area("Enter Your Prompt Here:")

# Append the creative and fact checking
user_prompt = user_prompt + creative_prompt + factcheck_prompt

# Submit Button
if st.button("Generate"):
    if user_prompt.strip() == "":
        st.warning("Please enter a prompt before generating!")
    else:
        # Process AI Request
        st.write("Generating responses...")
        
        try:

            response = model.generate_content(
                f"Format: {output_format}\nTone: {tone_style}\nPrompt: {user_prompt}",
                generation_config = genai.GenerationConfig(
                max_output_tokens=text_length * 2,
                temperature=creativity_level,
                )                
            )
            
            st.markdown(response.text)
            
        except Exception as e:
            st.error(f"Exception occured: {e}")