mdanish commited on
Commit
a312e49
·
verified ·
1 Parent(s): e172b18

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +5 -3
app.py CHANGED
@@ -19,14 +19,16 @@ st.set_page_config(
19
  #model, preprocess = open_clip.create_model_from_pretrained('hf-hub:laion/CLIP-ViT-g-14-laion2B-s12B-b42K')
20
  #tokenizer = open_clip.get_tokenizer('hf-hub:laion/CLIP-ViT-g-14-laion2B-s12B-b42K')
21
 
22
- model, preprocess = open_clip.create_model_from_pretrained(clip_model_name)
23
- tokenizer = open_clip.get_tokenizer(clip_model_name)
 
 
24
 
25
  @st.cache_resource
26
  def load_model():
27
  """Load the OpenCLIP model and return model and processor"""
28
  model, _, preprocess = open_clip.create_model_and_transforms(
29
- 'ViT-H-14'
30
  pretrained='laion2b_s32b_b79k'
31
  )
32
  tokenizer = open_clip.get_tokenizer('ViT-H-14')
 
19
  #model, preprocess = open_clip.create_model_from_pretrained('hf-hub:laion/CLIP-ViT-g-14-laion2B-s12B-b42K')
20
  #tokenizer = open_clip.get_tokenizer('hf-hub:laion/CLIP-ViT-g-14-laion2B-s12B-b42K')
21
 
22
+ #model, preprocess = open_clip.create_model_from_pretrained(clip_model_name)
23
+ #tokenizer = open_clip.get_tokenizer(clip_model_name)
24
+
25
+ st.write("Available models:", open_clip.list_models())
26
 
27
  @st.cache_resource
28
  def load_model():
29
  """Load the OpenCLIP model and return model and processor"""
30
  model, _, preprocess = open_clip.create_model_and_transforms(
31
+ 'ViT-H-14',
32
  pretrained='laion2b_s32b_b79k'
33
  )
34
  tokenizer = open_clip.get_tokenizer('ViT-H-14')