abdeljalilELmajjodi commited on
Commit
fd96f46
·
verified ·
1 Parent(s): a81b12c

add gpu spaces

Browse files
Files changed (1) hide show
  1. app.py +6 -8
app.py CHANGED
@@ -2,13 +2,14 @@ import gradio as gr
2
  import torch
3
  from transformers import pipeline
4
  import os
 
5
 
6
  #load_dotenv()
7
  key=os.environ["HF_KEY"]
8
 
9
 
10
  def load_model():
11
- pipe=pipeline(task="fill-mask",model="atlasia/xlm-roberta-large-ft-alatlas",token=key)
12
  return pipe
13
 
14
  print("[INFO] load model ...")
@@ -19,14 +20,11 @@ print("[INFO] model loaded")
19
  # predictions=pipe(text)
20
  # return predictions[0]["sequence"],predictions
21
 
22
-
23
  def predict(text):
24
-
25
- # Get prediction
26
- with torch.no_grad():
27
- outputs = pipe(text)
28
- scores= [x["score"] for x in outputs]
29
- tokens= [x["token_str"] for x in outputs]
30
  # scores= [x["score"] for x in outputs]
31
  # Convert to percentages and create label-probability pairs
32
  #probs = probabilities[0].tolist()
 
2
  import torch
3
  from transformers import pipeline
4
  import os
5
+ import spaces
6
 
7
  #load_dotenv()
8
  key=os.environ["HF_KEY"]
9
 
10
 
11
  def load_model():
12
+ pipe=pipeline(task="fill-mask",model="atlasia/xlm-roberta-large-ft-alatlas",token=key,device=0)
13
  return pipe
14
 
15
  print("[INFO] load model ...")
 
20
  # predictions=pipe(text)
21
  # return predictions[0]["sequence"],predictions
22
 
23
+ @spaces.GPU
24
  def predict(text):
25
+ outputs = pipe(text)
26
+ scores= [x["score"] for x in outputs]
27
+ tokens= [x["token_str"] for x in outputs]
 
 
 
28
  # scores= [x["score"] for x in outputs]
29
  # Convert to percentages and create label-probability pairs
30
  #probs = probabilities[0].tolist()