AlekseyCalvin commited on
Commit
72bf789
·
verified ·
1 Parent(s): c31cb16

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -14
app.py CHANGED
@@ -45,22 +45,14 @@ torch.cuda.empty_cache()
45
 
46
  device = "cuda" if torch.cuda.is_available() else "cpu"
47
 
48
- clipmodel = 'norm'
49
- if clipmodel == "long":
50
- model_id = "zer0int/LongCLIP-GmP-ViT-L-14"
51
- config = CLIPConfig.from_pretrained(model_id)
52
- maxtokens = 77
53
- if clipmodel == "norm":
54
- model_id = "zer0int/CLIP-GmP-ViT-L-14"
55
- config = CLIPConfig.from_pretrained(model_id)
56
- maxtokens = 77
57
- clip_model = CLIPModel.from_pretrained(model_id, torch_dtype=torch.bfloat16, config=config, ignore_mismatched_sizes=True).to("cuda")
58
- clip_processor = CLIPProcessor.from_pretrained(model_id, padding="max_length", max_length=maxtokens, ignore_mismatched_sizes=True, return_tensors="pt", truncation=True)
59
- #t5 = HFEmbedder("DeepFloyd/t5-v1_1-xxl", max_length=512, torch_dtype=torch.bfloat16).to(device)
60
-
61
  pipe.tokenizer = clip_processor.tokenizer
62
  pipe.text_encoder = clip_model.text_model
63
- pipe.tokenizer_max_length = maxtokens
64
  pipe.text_encoder.dtype = torch.bfloat16
65
  #pipe.text_encoder_2 = t5.text_model
66
 
 
45
 
46
  device = "cuda" if torch.cuda.is_available() else "cpu"
47
 
48
+ model_id = ("zer0int/LongCLIP-GmP-ViT-L-14")
49
+ config = CLIPConfig.from_pretrained(model_id)
50
+ config.text_config.max_position_embeddings = 248
51
+ clip_model = CLIPModel.from_pretrained(model_id, torch_dtype=torch.bfloat16, config=config, ignore_mismatched_sizes=True)
52
+ clip_processor = CLIPProcessor.from_pretrained(model_id, padding="max_length", max_length=248)
 
 
 
 
 
 
 
 
53
  pipe.tokenizer = clip_processor.tokenizer
54
  pipe.text_encoder = clip_model.text_model
55
+ pipe.tokenizer_max_length = 248
56
  pipe.text_encoder.dtype = torch.bfloat16
57
  #pipe.text_encoder_2 = t5.text_model
58