stevenbucaille commited on
Commit
c456892
·
verified ·
1 Parent(s): 5ab194a
Files changed (2) hide show
  1. config.json +17 -2
  2. model.safetensors +2 -2
config.json CHANGED
@@ -10,14 +10,29 @@
10
  "hidden_act": "gelu",
11
  "hidden_size": 256,
12
  "initializer_range": 0.02,
 
13
  "keypoint_detector_config": {
14
- "model_type": "superpoint"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  },
16
  "model_type": "lightglue",
17
  "num_attention_heads": 4,
18
  "num_hidden_layers": 9,
19
  "num_key_value_heads": 4,
20
  "torch_dtype": "float32",
21
- "transformers_version": "4.50.0.dev0",
22
  "width_confidence": 0.99
23
  }
 
10
  "hidden_act": "gelu",
11
  "hidden_size": 256,
12
  "initializer_range": 0.02,
13
+ "intermediate_size": 512,
14
  "keypoint_detector_config": {
15
+ "border_removal_distance": 4,
16
+ "decoder_hidden_size": 256,
17
+ "descriptor_decoder_dim": 256,
18
+ "encoder_hidden_sizes": [
19
+ 64,
20
+ 64,
21
+ 128,
22
+ 128
23
+ ],
24
+ "initializer_range": 0.02,
25
+ "keypoint_decoder_dim": 65,
26
+ "keypoint_threshold": 0.005,
27
+ "max_keypoints": -1,
28
+ "model_type": "superpoint",
29
+ "nms_radius": 4
30
  },
31
  "model_type": "lightglue",
32
  "num_attention_heads": 4,
33
  "num_hidden_layers": 9,
34
  "num_key_value_heads": 4,
35
  "torch_dtype": "float32",
36
+ "transformers_version": "4.52.0.dev0",
37
  "width_confidence": 0.99
38
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:011c0b26ca1acc7d9361fa6e371a635a83549d2d4c92fdb7ee436acbc6257be0
3
- size 55020320
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f4ee197a05b339d45ddb02dc240170a61e6dcf77fd6c92247fa9df3d6df3449
3
+ size 55020144