Zaixi commited on
Commit
8fcfb2d
·
1 Parent(s): 1d5e06f

adapt zerogpu

Browse files
Files changed (2) hide show
  1. app.py +6 -2
  2. runner/inference.py +1 -1
app.py CHANGED
@@ -212,6 +212,8 @@ def create_protenix_json(input_data: Dict) -> List[Dict]:
212
  #@torch.inference_mode()
213
  @spaces.GPU(duration=120) # Specify a duration to avoid timeout
214
  def predict_structure(input_collector: dict):
 
 
215
  """Handle both input types"""
216
  os.makedirs("./output", exist_ok=True)
217
 
@@ -282,9 +284,9 @@ configs = parse_configs(
282
  fill_required_with_null=True,
283
  )
284
  configs.load_checkpoint_path='./checkpoint.pt'
285
- download_infercence_cache(configs, model_version="v0.2.0")
286
  configs.use_deepspeed_evo_attention=False
287
- runner = InferenceRunner(configs)
288
  add_watermark = gr.Checkbox(label="Add Watermark", value=True)
289
  add_watermark1 = gr.Checkbox(label="Add Watermark", value=True)
290
 
@@ -453,6 +455,8 @@ with gr.Blocks(title="FoldMark", css=custom_css) as demo:
453
 
454
  @spaces.GPU
455
  def is_watermarked(file):
 
 
456
  # Generate a unique subdirectory and filename
457
  unique_id = str(uuid.uuid4().hex[:8])
458
  subdir = os.path.join('./output', unique_id)
 
212
  #@torch.inference_mode()
213
  @spaces.GPU(duration=120) # Specify a duration to avoid timeout
214
  def predict_structure(input_collector: dict):
215
+ #first initialize runner
216
+ runner = InferenceRunner(configs)
217
  """Handle both input types"""
218
  os.makedirs("./output", exist_ok=True)
219
 
 
284
  fill_required_with_null=True,
285
  )
286
  configs.load_checkpoint_path='./checkpoint.pt'
287
+ download_infercence_cache()
288
  configs.use_deepspeed_evo_attention=False
289
+
290
  add_watermark = gr.Checkbox(label="Add Watermark", value=True)
291
  add_watermark1 = gr.Checkbox(label="Add Watermark", value=True)
292
 
 
455
 
456
  @spaces.GPU
457
  def is_watermarked(file):
458
+ #first initialize runner
459
+ runner = InferenceRunner(configs)
460
  # Generate a unique subdirectory and filename
461
  unique_id = str(uuid.uuid4().hex[:8])
462
  subdir = os.path.join('./output', unique_id)
runner/inference.py CHANGED
@@ -201,7 +201,7 @@ class InferenceRunner(object):
201
  self.model.configs = new_configs
202
 
203
 
204
- def download_infercence_cache(configs: Any, model_version: str = "v0.2.0") -> None:
205
  code_directory = './'
206
 
207
  data_cache_dir = os.path.join(code_directory, "release_data/ccd_cache")
 
201
  self.model.configs = new_configs
202
 
203
 
204
+ def download_infercence_cache() -> None:
205
  code_directory = './'
206
 
207
  data_cache_dir = os.path.join(code_directory, "release_data/ccd_cache")