abdull4h commited on
Commit
21bea31
·
verified ·
1 Parent(s): cdeabf7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -10
app.py CHANGED
@@ -1,18 +1,26 @@
1
  import gradio as gr
2
  from transformers import pipeline, Conversation
3
- import torch # Ensures that PyTorch is available
4
 
5
- # Load the conversational pipeline with a local model
6
- chatbot = pipeline("conversational", model="facebook/blenderbot-400M-distill")
 
 
 
 
7
 
8
  def chat(user_input):
9
- # Initialize a conversation with the user's message
10
- conversation = Conversation(user_input)
11
- # Process the conversation with the model
12
- updated_conversation = chatbot(conversation)
13
- # Return the latest generated response
14
- return updated_conversation.generated_responses[-1]
 
 
 
15
 
 
16
  iface = gr.Interface(
17
  fn=chat,
18
  inputs=gr.Textbox(lines=2, placeholder="Type your message here..."),
@@ -22,4 +30,4 @@ iface = gr.Interface(
22
  )
23
 
24
  if __name__ == "__main__":
25
- iface.launch()
 
1
  import gradio as gr
2
  from transformers import pipeline, Conversation
3
+ import torch
4
 
5
+ # Load the conversational pipeline with the model
6
+ chatbot = pipeline(
7
+ "conversational",
8
+ model="facebook/blenderbot-400M-distill",
9
+ device=0 if torch.cuda.is_available() else -1 # Use GPU if available
10
+ )
11
 
12
  def chat(user_input):
13
+ try:
14
+ # Initialize a conversation with the user's message
15
+ conversation = Conversation(user_input)
16
+ # Process the conversation with the model
17
+ updated_conversation = chatbot(conversation)
18
+ # Return the latest generated response
19
+ return updated_conversation.generated_responses[-1]
20
+ except Exception as e:
21
+ return f"An error occurred: {str(e)}"
22
 
23
+ # Create the Gradio interface
24
  iface = gr.Interface(
25
  fn=chat,
26
  inputs=gr.Textbox(lines=2, placeholder="Type your message here..."),
 
30
  )
31
 
32
  if __name__ == "__main__":
33
+ iface.launch()