nebiyu29 commited on
Commit
8ec0711
·
verified ·
1 Parent(s): fc014d5

changed the missplelling and made the model gradients in active

Browse files
Files changed (1) hide show
  1. app.py +12 -10
app.py CHANGED
@@ -1,18 +1,20 @@
1
- from transformers import AutoTokenizer,AutoModelForSequenceClassification
2
  import gradio as gr
3
 
4
  model_name="nebiyu29/hate_classifier"
5
- tokenizer=AuotoTokenizer.from_pretrained(model_name)
6
  model=AutoModelfForSequenceClassification.from_pretrained(model_name)
7
 
8
- #this where the model is active
9
- def model_classifier(text):
10
- if len(text)==0:
11
- return f"the input text is {text}"
12
- else:
13
- encoded_input=tokenizer(text) #this is where the encoding happens
14
- scores=model(encoded) #this is is the score for rach values
15
- return scores
 
 
16
 
17
 
18
 
 
1
+ from transformers import AutoModelForSequenceClassification,AutoTokenizer
2
  import gradio as gr
3
 
4
  model_name="nebiyu29/hate_classifier"
5
+ tokenizer=AutoTokenizer.from_pretrained(model_name)
6
  model=AutoModelfForSequenceClassification.from_pretrained(model_name)
7
 
8
+ #this where the model is active and we need to make the gradiends in active
9
+ model.eval()
10
+ with torch.no_grad():
11
+ def model_classifier(text):
12
+ if len(text)==0:
13
+ return f"the input text is {text}"
14
+ else:
15
+ encoded_input=tokenizer(text) #this is where the encoding happens
16
+ scores=model(encoded) #this is is the score for rach values
17
+ return scores
18
 
19
 
20