PebinAPJ commited on
Commit
57ea418
·
verified ·
1 Parent(s): f7b42df

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -4
app.py CHANGED
@@ -7,7 +7,6 @@ from langchain_community.vectorstores import FAISS
7
  from langchain.memory import ConversationBufferMemory
8
  from langchain.chains import ConversationalRetrievalChain
9
  from transformers import pipeline # Hugging Face pipeline for using T5 model
10
- import os
11
 
12
 
13
  # Access Hugging Face API token from Streamlit secrets
@@ -42,10 +41,15 @@ def get_vectorstore(text_chunks):
42
  return vectorstore
43
 
44
 
45
- # Function to create the conversation chain using T5 from Hugging Face API
46
  def get_conversation_chain(vectorstore):
47
- # Use HuggingFacePipeline with a Hugging Face T5 model
48
- t5_model = pipeline("text2text-generation", model="google/t5-large", tokenizer="google/t5-large")
 
 
 
 
 
49
 
50
  llm = HuggingFacePipeline(pipeline=t5_model)
51
 
 
7
  from langchain.memory import ConversationBufferMemory
8
  from langchain.chains import ConversationalRetrievalChain
9
  from transformers import pipeline # Hugging Face pipeline for using T5 model
 
10
 
11
 
12
  # Access Hugging Face API token from Streamlit secrets
 
41
  return vectorstore
42
 
43
 
44
+ # Function to create the conversation chain using a smaller model
45
  def get_conversation_chain(vectorstore):
46
+ # Use HuggingFacePipeline with a smaller model like `t5-small`
47
+ t5_model = pipeline(
48
+ "text2text-generation",
49
+ model="t5-small", # Smaller model for low-end systems
50
+ tokenizer="t5-small",
51
+ use_auth_token=hf_token # Automatically fetches the token from Streamlit secrets
52
+ )
53
 
54
  llm = HuggingFacePipeline(pipeline=t5_model)
55