|
from transformers import AutoTokenizer, TextStreamer |
|
from unsloth import FastLanguageModel |
|
import torch |
|
|
|
|
|
model_name = "Rafay17/Llama3.2_1b_customModle2" |
|
tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
|
|
model = FastLanguageModel.from_pretrained( |
|
model_name=model_name, |
|
max_seq_length=512, |
|
dtype="float16", |
|
load_in_4bit=True |
|
) |
|
|
|
FastLanguageModel.for_inference(model) |
|
|
|
|
|
def generate_response(input_text): |
|
|
|
labeled_prompt = ( |
|
"Please provide the response with the following labels:\n" |
|
"Speaker: [SPEAKER]\n" |
|
"Text: [TEXT]\n" |
|
"Sentiment: [SENTIMENT]\n" |
|
"Emotion: [EMOTION]\n" |
|
"Intent: [INTENT]\n" |
|
"Tone: [TONE]\n" |
|
"Confidence Level: [CONFIDENCE]\n" |
|
"Frustration Level: [FRUSTRATION]\n" |
|
"Response Length: [LENGTH]\n" |
|
"Action Required: [ACTION]\n" |
|
"Interruption: [INTERRUPTION]\n" |
|
"Cooperation Level: [COOPERATION]\n" |
|
"Clarity: [CLARITY]\n" |
|
"Objective: [OBJECTIVE]\n" |
|
"Timeline: [TIMELINE]\n" |
|
"Motivation: [MOTIVATION]\n" |
|
"Conversation Stage: [STAGE]\n" |
|
"Resolution: [RESOLUTION]\n" |
|
"Context: [CONTEXT]\n" |
|
"Urgency: [URGENCY]\n" |
|
"Problem Type: [PROBLEM]\n" |
|
"Key Words: [KEYWORDS]\n" |
|
"Expected Detail: [DETAIL]\n" |
|
"Time Gap: [TIME]\n" |
|
"Client Expectation: [EXPECTATION]\n" |
|
"Channel: [CHANNEL]\n" |
|
"Power Relationship: [POWER]\n\n" |
|
f"User Input: {input_text}\n" |
|
"Response:" |
|
) |
|
|
|
|
|
inputs = tokenizer( |
|
[labeled_prompt], |
|
return_tensors="pt", |
|
padding=True, |
|
truncation=True, |
|
max_length=512, |
|
).to("cuda") |
|
|
|
|
|
text_streamer = TextStreamer(tokenizer, skip_prompt=True) |
|
|
|
|
|
with torch.no_grad(): |
|
model.generate( |
|
input_ids=inputs.input_ids, |
|
attention_mask=inputs.attention_mask, |
|
streamer=text_streamer, |
|
max_new_tokens=100, |
|
pad_token_id=tokenizer.eos_token_id, |
|
) |
|
|
|
|
|
def user_interaction(): |
|
while True: |
|
user_input = input("Enter conversation details (or type 'exit' to quit): ") |
|
if user_input.lower() == 'exit': |
|
print("Exiting the program.") |
|
break |
|
print("Generating response for input:") |
|
generate_response(user_input) |
|
|
|
|
|
user_interaction() |
|
|