date3k2 commited on
Commit
8cade22
·
verified ·
1 Parent(s): 37d9421

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +25 -22
README.md CHANGED
@@ -1,19 +1,19 @@
1
- ---
2
- license: apache-2.0
3
- library_name: peft
4
- tags:
5
- - trl
6
- - sft
7
- - generated_from_trainer
8
- base_model: mistralai/Mistral-7B-Instruct-v0.3
9
- model-index:
10
- - name: mistral-v0.3-alpaca-vi-v1
11
- results: []
12
- language:
13
- - vi
14
- - en
15
- pipeline_tag: text-generation
16
- ---
17
 
18
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
19
  should probably proofread and complete it, then remove this comment. -->
@@ -32,9 +32,12 @@ It achieves the following results on the evaluation set:
32
  ## Usage
33
 
34
  ```python
35
- model_name = "mistralai/Mistral-7B-Instruct-v0.3"
36
- from transformers import AutoModelForCausalLM, BitsAndBytesConfig
37
  import torch
 
 
 
 
38
  bnb_config = BitsAndBytesConfig(
39
  load_in_8bit= True
40
  )
@@ -44,16 +47,16 @@ model = AutoModelForCausalLM.from_pretrained(
44
  device_map="auto",
45
  trust_remote_code=True,
46
  )
47
- from transformers import AutoModelForCausalLM, AutoTokenizer
48
 
49
- device = "cuda"
50
 
51
- tokenizer = AutoTokenizer.from_pretrained(model_name)
 
52
 
53
  messages = [
54
  {"role": "user", "content": "Bạn là ai?"},
55
  {"role": "assistant", "content": "Tôi là Dastral, một trợ lý AI đắc lực"},
56
- {"role": "user", "content": "Viết công thức để nấu một món bánh thịt heo."}
57
  ]
58
 
59
  encodeds = tokenizer.apply_chat_template(messages, return_tensors="pt")
 
1
+ ---
2
+ license: apache-2.0
3
+ library_name: peft
4
+ tags:
5
+ - trl
6
+ - sft
7
+ - generated_from_trainer
8
+ base_model: mistralai/Mistral-7B-Instruct-v0.3
9
+ model-index:
10
+ - name: mistral-v0.3-vi-alpaca
11
+ results: []
12
+ language:
13
+ - vi
14
+ - en
15
+ pipeline_tag: text-generation
16
+ ---
17
 
18
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
19
  should probably proofread and complete it, then remove this comment. -->
 
32
  ## Usage
33
 
34
  ```python
35
+ from transformers import AutoModelForCausalLM, BitsAndBytesConfig, AutoTokenizer
 
36
  import torch
37
+
38
+ model_name = "mistralai/Mistral-7B-Instruct-v0.3"
39
+ peft_model_id = "date3k2/mistral-v0.3-alpaca-vi-v1"
40
+
41
  bnb_config = BitsAndBytesConfig(
42
  load_in_8bit= True
43
  )
 
47
  device_map="auto",
48
  trust_remote_code=True,
49
  )
50
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
51
 
 
52
 
53
+ model.load_adapter(peft_model_id)
54
+ device = "cuda"
55
 
56
  messages = [
57
  {"role": "user", "content": "Bạn là ai?"},
58
  {"role": "assistant", "content": "Tôi là Dastral, một trợ lý AI đắc lực"},
59
+ {"role": "user", "content": "Viết công thức để nấu một món ngon từ thịt bò."}
60
  ]
61
 
62
  encodeds = tokenizer.apply_chat_template(messages, return_tensors="pt")