Spaces:
Sleeping
Sleeping
Update main.py
Browse files
main.py
CHANGED
@@ -5,12 +5,12 @@ import re
|
|
5 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
6 |
|
7 |
model_dir_small = 'edithram23/Redaction'
|
8 |
-
tokenizer_small = AutoTokenizer.from_pretrained(
|
9 |
model_small = AutoModelForSeq2SeqLM.from_pretrained(model_dir_small)
|
10 |
|
11 |
|
12 |
model_dir_large = 'edithram23/Redaction_Personal_info_v1'
|
13 |
-
tokenizer_large = AutoTokenizer.from_pretrained(
|
14 |
model_large = AutoModelForSeq2SeqLM.from_pretrained(model_dir_large)
|
15 |
|
16 |
def mask_generation(text,model=model_small,tokenizer=tokenizer_small):
|
|
|
5 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
6 |
|
7 |
model_dir_small = 'edithram23/Redaction'
|
8 |
+
tokenizer_small = AutoTokenizer.from_pretrained(model_dir_small)
|
9 |
model_small = AutoModelForSeq2SeqLM.from_pretrained(model_dir_small)
|
10 |
|
11 |
|
12 |
model_dir_large = 'edithram23/Redaction_Personal_info_v1'
|
13 |
+
tokenizer_large = AutoTokenizer.from_pretrained(model_dir_large)
|
14 |
model_large = AutoModelForSeq2SeqLM.from_pretrained(model_dir_large)
|
15 |
|
16 |
def mask_generation(text,model=model_small,tokenizer=tokenizer_small):
|