File size: 1,645 Bytes
55798e5 42425e7 55798e5 c321b3b 55798e5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
import streamlit as st
from langchain.prompts import PromptTemplate
from langchain_google_genai import ChatGoogleGenerativeAI
import os, spaces
os.environ['GOOGLE_API_KEY'] = os.getenv('geminiapi')
# Function for LLM response
def llm_response(user_text, number_of_words, blog_audience):
# define llm
llm = ChatGoogleGenerativeAI(model="gemini-pro")
# define prompt template
ptemplate = '''
You are an Expert Blog Writer. For the topic {user_text},
write a Blog in {number_of_words} words for an audience of {blog_audience}.
'''
prompt = PromptTemplate(template=ptemplate,input_variables=['user_text','number_of_words','blog_audience'])
final_prompt = prompt.format(user_text=user_text, number_of_words=number_of_words, blog_audience=blog_audience)
# invoke llm to get result
result = llm.invoke(final_prompt)
# print result on screen
st.subheader("Result:")
st.write(result.content)
# define page config
st.set_page_config(
page_title="Blog Generation",
page_icon="🧊",
layout="centered",
initial_sidebar_state="collapsed",
)
st.header("Blog Generation App🧊")
user_text = st.text_input("Enter title for blog")
col1,col2 = st.columns([6,6])
with col1:
number_of_words = st.text_input("Number of words in Blog")
with col2:
blog_audience = st.selectbox("Select target audience",
['Data Scientists', 'Researchers', 'Common People'],
index=2)
submit_btn = st.button("Submit")
if submit_btn:
llm_response(user_text, number_of_words, blog_audience) # function call for printing results
|