ArrcttacsrjksX commited on
Commit
c0d1457
·
verified ·
1 Parent(s): 475a923

Upload app (14).py

Browse files
Files changed (1) hide show
  1. app (14).py +32 -0
app (14).py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+
4
+ # Load pre-trained model and tokenizer
5
+ model_name = "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B"
6
+ tokenizer = AutoTokenizer.from_pretrained(model_name) # Use AutoTokenizer to automatically detect the correct tokenizer
7
+ model = AutoModelForCausalLM.from_pretrained(model_name) # Use AutoModelForCausalLM for causal language models
8
+
9
+ def generate_response(message, history):
10
+ # Combine the conversation history with the new message
11
+ input_text = f"{message}"
12
+
13
+ # Tokenize input text
14
+ inputs = tokenizer.encode(input_text, return_tensors="pt")
15
+
16
+ # Generate response using the model
17
+ outputs = model.generate(inputs, max_length=50, num_return_sequences=1)
18
+
19
+ # Decode generated text
20
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
21
+
22
+ return response
23
+
24
+ # Create ChatInterface
25
+ demo = gr.ChatInterface(
26
+ fn=generate_response,
27
+ title="Chat with DeepSeek",
28
+ description="A simple chatbot powered by DeepSeek."
29
+ )
30
+
31
+ # Launch the app
32
+ demo.launch()