mdanish commited on
Commit
801db1d
·
verified ·
1 Parent(s): 254e461

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +18 -3
app.py CHANGED
@@ -2,11 +2,26 @@ import streamlit as st
2
  from PIL import Image
3
  import numpy as np
4
 
 
5
 
6
- modelpath = '20241204-ams-no-env-open_clip_ViT-H-14-378-quickgelu.npz'
7
 
8
- model = np.load(modelpath)
9
- st.write(model['walkability_vecs'].shape)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
11
  file = st.file_uploader('Upload An Image')
12
 
 
2
  from PIL import Image
3
  import numpy as np
4
 
5
+ import open_clip
6
 
7
+ #from transformers import CLIPProcessor, CLIPModel
8
 
9
+ knnpath = '20241204-ams-no-env-open_clip_ViT-H-14-378-quickgelu.npz'
10
+ clip_model_name = 'ViT-H-14-378-quickgelu'
11
+
12
+ #model, preprocess = open_clip.create_model_from_pretrained('hf-hub:laion/CLIP-ViT-g-14-laion2B-s12B-b42K')
13
+ #tokenizer = open_clip.get_tokenizer('hf-hub:laion/CLIP-ViT-g-14-laion2B-s12B-b42K')
14
+
15
+ model, preprocess = open_clip.create_model_from_pretrained(clip_model_name)
16
+ tokenizer = open_clip.get_tokenizer(clip_model_name)
17
+
18
+ st.write(model)
19
+ #clip_model = CLIPModel.from_pretrained()
20
+ #clip_processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
21
+
22
+
23
+ knn = np.load(modelpath)
24
+ st.write(knn['walkability_vecs'].shape)
25
 
26
  file = st.file_uploader('Upload An Image')
27