laurenssam commited on
Commit
39336af
·
1 Parent(s): 2c571f0
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -104,7 +104,6 @@ def get_response(params):
104
  )
105
 
106
  images = [load_image_from_base64(image) for image in images]
107
- print(images)
108
  # images = process_images(images, image_processor, model.config)
109
  images = process_images(images, image_processor, model.config).to('cpu', dtype=torch.float)
110
 
@@ -142,7 +141,7 @@ def get_response(params):
142
  input_ids = (
143
  tokenizer_image_token(prompt, tokenizer, IMAGE_TOKEN_INDEX, return_tensors="pt")
144
  .unsqueeze(0)
145
- .to(model.device)
146
  )
147
  keywords = [stop_str]
148
 
@@ -154,7 +153,7 @@ def get_response(params):
154
  max_new_tokens = min(
155
  max_new_tokens, max_context_length - input_ids.shape[-1] - num_image_tokens
156
  )
157
-
158
  if max_new_tokens < 1:
159
  yield json.dumps(
160
  {
 
104
  )
105
 
106
  images = [load_image_from_base64(image) for image in images]
 
107
  # images = process_images(images, image_processor, model.config)
108
  images = process_images(images, image_processor, model.config).to('cpu', dtype=torch.float)
109
 
 
141
  input_ids = (
142
  tokenizer_image_token(prompt, tokenizer, IMAGE_TOKEN_INDEX, return_tensors="pt")
143
  .unsqueeze(0)
144
+ .to(model.device, dtype=torch.float)
145
  )
146
  keywords = [stop_str]
147
 
 
153
  max_new_tokens = min(
154
  max_new_tokens, max_context_length - input_ids.shape[-1] - num_image_tokens
155
  )
156
+ images = images.to(dtype=torch.float)
157
  if max_new_tokens < 1:
158
  yield json.dumps(
159
  {