kar0lina commited on
Commit
53d66b2
·
1 Parent(s): d2f50bd

Initial commit

Browse files
Files changed (2) hide show
  1. README.md +1 -1
  2. app.py +6 -6
README.md CHANGED
@@ -1,6 +1,6 @@
1
  ---
2
  title: Petarda
3
- emoji: 🤯🤯🤯🤯🤯
4
  colorFrom: yellow
5
  colorTo: blue
6
  sdk: gradio
 
1
  ---
2
  title: Petarda
3
+ emoji: 🤯
4
  colorFrom: yellow
5
  colorTo: blue
6
  sdk: gradio
app.py CHANGED
@@ -11,27 +11,27 @@ def trait_classifier(text: str) -> Dict[str, float]:
11
  # Set up text classification pipeline
12
  agr_classifier = pipeline(task="text-classification",
13
  # Because our model is on Hugging Face already, we can pass in the model name directly
14
- model="kar0lina/petarda_xlm-roberta-base_agr", # link to model on HF Hub
15
  device="cuda" if torch.cuda.is_available() else "cpu",
16
  top_k=None) # return all possible scores (not just top-1)
17
  con_classifier = pipeline(task="text-classification",
18
  # Because our model is on Hugging Face already, we can pass in the model name directly
19
- model="kar0lina/petarda_xlm-roberta-base_con", # link to model on HF Hub
20
  device="cuda" if torch.cuda.is_available() else "cpu",
21
  top_k=None) # return all possible scores (not just top-1)
22
  ext_classifier = pipeline(task="text-classification",
23
  # Because our model is on Hugging Face already, we can pass in the model name directly
24
- model="kar0lina/petarda_xlm-roberta-base_ext", # link to model on HF Hub
25
  device="cuda" if torch.cuda.is_available() else "cpu",
26
  top_k=None) # return all possible scores (not just top-1)
27
  neu_classifier = pipeline(task="text-classification",
28
  # Because our model is on Hugging Face already, we can pass in the model name directly
29
- model="kar0lina/petarda_xlm-roberta-base_neu", # link to model on HF Hub
30
  device="cuda" if torch.cuda.is_available() else "cpu",
31
  top_k=None) # return all possible scores (not just top-1)
32
  ope_classifier = pipeline(task="text-classification",
33
  # Because our model is on Hugging Face already, we can pass in the model name directly
34
- model="kar0lina/petarda_xlm-roberta-base_ope", # link to model on HF Hub
35
  device="cuda" if torch.cuda.is_available() else "cpu",
36
  top_k=None) # return all possible scores (not just top-1)
37
 
@@ -92,7 +92,7 @@ def trait_classifier(text: str) -> Dict[str, float]:
92
  description = """
93
  A text classifier for PErsonality Trait prediction using Ai model Roberta - Demo App.
94
 
95
- Fine-tuned from [xlm-roberta-base](https://huggingface.co/FacebookAI/xlm-roberta-base)"""
96
 
97
 
98
  demo = gr.Interface(fn=trait_classifier,
 
11
  # Set up text classification pipeline
12
  agr_classifier = pipeline(task="text-classification",
13
  # Because our model is on Hugging Face already, we can pass in the model name directly
14
+ model="kar0lina/petarda_xlm-roberta-base_pandora1000-agr", # link to model on HF Hub
15
  device="cuda" if torch.cuda.is_available() else "cpu",
16
  top_k=None) # return all possible scores (not just top-1)
17
  con_classifier = pipeline(task="text-classification",
18
  # Because our model is on Hugging Face already, we can pass in the model name directly
19
+ model="kar0lina/petarda_xlm-roberta-base_pandora1000-con", # link to model on HF Hub
20
  device="cuda" if torch.cuda.is_available() else "cpu",
21
  top_k=None) # return all possible scores (not just top-1)
22
  ext_classifier = pipeline(task="text-classification",
23
  # Because our model is on Hugging Face already, we can pass in the model name directly
24
+ model="kar0lina/petarda_xlm-roberta-base_pandora1000-ext", # link to model on HF Hub
25
  device="cuda" if torch.cuda.is_available() else "cpu",
26
  top_k=None) # return all possible scores (not just top-1)
27
  neu_classifier = pipeline(task="text-classification",
28
  # Because our model is on Hugging Face already, we can pass in the model name directly
29
+ model="kar0lina/petarda_xlm-roberta-base_pandora1000-neu", # link to model on HF Hub
30
  device="cuda" if torch.cuda.is_available() else "cpu",
31
  top_k=None) # return all possible scores (not just top-1)
32
  ope_classifier = pipeline(task="text-classification",
33
  # Because our model is on Hugging Face already, we can pass in the model name directly
34
+ model="kar0lina/petarda_xlm-roberta-base_pandora1000-ope", # link to model on HF Hub
35
  device="cuda" if torch.cuda.is_available() else "cpu",
36
  top_k=None) # return all possible scores (not just top-1)
37
 
 
92
  description = """
93
  A text classifier for PErsonality Trait prediction using Ai model Roberta - Demo App.
94
 
95
+ Fine-tuned from [xlm-roberta-base](https://huggingface.co/FacebookAI/xlm-roberta-base) on Pandora dataset"""
96
 
97
 
98
  demo = gr.Interface(fn=trait_classifier,