yarenty commited on
Commit
8b89134
·
1 Parent(s): acf444e

fixing token name

Browse files
Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -12,7 +12,8 @@ from transformers import AutoTokenizer
12
  from ddgs import DDGS
13
  import spaces # Import spaces early to enable ZeroGPU support
14
 
15
- access_token=os.environ['HF_TOKEN']
 
16
 
17
  # Optional: Disable GPU visibility if you wish to force CPU usage
18
  # os.environ["CUDA_VISIBLE_DEVICES"] = ""
@@ -139,7 +140,7 @@ def load_pipeline(model_name):
139
  return PIPELINES[model_name]
140
  repo = MODELS[model_name]["repo_id"]
141
  tokenizer = AutoTokenizer.from_pretrained(repo,
142
- token=access_token)
143
  for dtype in (torch.bfloat16, torch.float16, torch.float32):
144
  try:
145
  pipe = pipeline(
@@ -150,7 +151,7 @@ def load_pipeline(model_name):
150
  torch_dtype=dtype,
151
  device_map="auto",
152
  use_cache=False, # ← disable past-key-value caching
153
- token=access_token)
154
  PIPELINES[model_name] = pipe
155
  return pipe
156
  except Exception:
 
12
  from ddgs import DDGS
13
  import spaces # Import spaces early to enable ZeroGPU support
14
 
15
+ # Get Hugging Face token - works in both local and HF Spaces environments
16
+ access_token = os.environ.get('HF_TOKEN') or os.environ.get('HUGGINGFACE_HUB_TOKEN') or None
17
 
18
  # Optional: Disable GPU visibility if you wish to force CPU usage
19
  # os.environ["CUDA_VISIBLE_DEVICES"] = ""
 
140
  return PIPELINES[model_name]
141
  repo = MODELS[model_name]["repo_id"]
142
  tokenizer = AutoTokenizer.from_pretrained(repo,
143
+ token=access_token if access_token else None)
144
  for dtype in (torch.bfloat16, torch.float16, torch.float32):
145
  try:
146
  pipe = pipeline(
 
151
  torch_dtype=dtype,
152
  device_map="auto",
153
  use_cache=False, # ← disable past-key-value caching
154
+ token=access_token if access_token else None)
155
  PIPELINES[model_name] = pipe
156
  return pipe
157
  except Exception: