Kleo commited on
Commit
f078667
·
verified ·
1 Parent(s): cfa63bb

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +5 -10
README.md CHANGED
@@ -61,21 +61,16 @@ Use the code below to get started with the model.
61
  In the following demonstration series, we show examples of matching and non-matching text predictions of our model for the key point "Social media regulation harms privacy" of the debatable topic "Social media platforms should be regulated by the government" and the focus is on arguments contesting the topic.
62
  ```
63
  import torch
64
- from transformers import pipeline, AutoModelForSequenceClassification, AutoTokenizer, BitsAndBytesConfig
65
  from peft import PeftModel, PeftConfig
66
  huggingface-cli login
67
 
68
- bnb_config = BitsAndBytesConfig(
69
- load_in_4bit=True,
70
- bnb_4bit_use_double_quant=True,
71
- bnb_4bit_quant_type="nf4",
72
- bnb_4bit_compute_dtype=torch.bfloat16)
73
-
74
 
 
75
  peft_model_id = "Kleo/meltemi_arg2kp_matcher"
76
  peft_config = PeftConfig.from_pretrained(peft_model_id)
77
- model = AutoModelForSequenceClassification.from_pretrained(peft_config.base_model_name_or_path, return_dict=True, quantization_config=bnb_config, device_map='auto')
78
- tokenizer = AutoTokenizer.from_pretrained(peft_config.base_model_name_or_path)
79
 
80
  # Load the Lora model
81
  model = PeftModel.from_pretrained(model, peft_model_id)
@@ -126,7 +121,7 @@ for text in matching_texts:
126
  results.append(predicted_class_id)
127
 
128
  # Print the results
129
- for idx, (text, label_id) in enumerate(zip(texts, results), start=1):
130
  print(f"Text {idx}: Predicted Label ID: {label_id}")
131
  ```
132
 
 
61
  In the following demonstration series, we show examples of matching and non-matching text predictions of our model for the key point "Social media regulation harms privacy" of the debatable topic "Social media platforms should be regulated by the government" and the focus is on arguments contesting the topic.
62
  ```
63
  import torch
64
+ from transformers import AutoModelForSequenceClassification, AutoTokenizer
65
  from peft import PeftModel, PeftConfig
66
  huggingface-cli login
67
 
 
 
 
 
 
 
68
 
69
+ base_model_id="ilsp/Meltemi-7B-v1"
70
  peft_model_id = "Kleo/meltemi_arg2kp_matcher"
71
  peft_config = PeftConfig.from_pretrained(peft_model_id)
72
+ model = AutoModelForSequenceClassification.from_pretrained(base_model_id,"device_map='auto')
73
+ tokenizer = AutoTokenizer.from_pretrained(base_model_id)
74
 
75
  # Load the Lora model
76
  model = PeftModel.from_pretrained(model, peft_model_id)
 
121
  results.append(predicted_class_id)
122
 
123
  # Print the results
124
+ for idx, (text, label_id) in enumerate(zip(matching_texts, results), start=1):
125
  print(f"Text {idx}: Predicted Label ID: {label_id}")
126
  ```
127