Jurk06 commited on
Commit
353997e
·
verified ·
1 Parent(s): 820f7b8

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -0
app.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from langchain.llms import HuggingFacePipeline
3
+ from langchain.chains import ConversationChain
4
+ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
5
+
6
+ # Load pre-trained model and tokenizer from Hugging Face
7
+ model_name = "microsoft/DialoGPT-medium" # You can replace this with other conversational models
8
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
9
+ model = AutoModelForCausalLM.from_pretrained(model_name)
10
+
11
+ # Create a pipeline for conversational tasks
12
+ hf_pipeline = pipeline("text-generation", model=model, tokenizer=tokenizer)
13
+
14
+ # Wrap the pipeline in a LangChain LLM
15
+ llm = HuggingFacePipeline(pipeline=hf_pipeline)
16
+
17
+ # Create a conversation chain with memory
18
+ from langchain.memory import ConversationBufferMemory
19
+ memory = ConversationBufferMemory()
20
+ conversation = ConversationChain(llm=llm, memory=memory)
21
+
22
+ # Define a function for Gradio to handle conversation
23
+ def chatbot(user_input):
24
+ response = conversation.run(user_input)
25
+ return response
26
+
27
+ # Gradio UI
28
+ with gr.Blocks() as demo:
29
+ gr.Markdown("## 🤖 Chatbot with Hugging Face and LangChain")
30
+ chatbot_interface = gr.Chatbot()
31
+ user_input = gr.Textbox(label="Type your message:", placeholder="Say something...")
32
+ submit_button = gr.Button("Send")
33
+
34
+ # Bind the input and output
35
+ submit_button.click(chatbot, inputs=user_input, outputs=chatbot_interface)
36
+
37
+ # Launch the app
38
+ demo.launch()