qgallouedec HF Staff commited on
Commit
12e01c2
·
verified ·
1 Parent(s): 5b4e692

Upload Idefics2ForConditionalGeneration

Browse files
Files changed (2) hide show
  1. config.json +3 -3
  2. model.safetensors +2 -2
config.json CHANGED
@@ -7,7 +7,7 @@
7
  "perceiver_config": {
8
  "attention_dropout": 0.0,
9
  "hidden_act": "silu",
10
- "hidden_size": 8,
11
  "initializer_range": 0.02,
12
  "model_type": "idefics2_perceiver",
13
  "num_key_value_heads": 4,
@@ -21,7 +21,7 @@
21
  "attention_dropout": 0.0,
22
  "head_dim": null,
23
  "hidden_act": "silu",
24
- "hidden_size": 8,
25
  "initializer_range": 0.02,
26
  "intermediate_size": 32,
27
  "max_position_embeddings": 131072,
@@ -42,7 +42,7 @@
42
  "vision_config": {
43
  "attention_dropout": 0.0,
44
  "hidden_act": "gelu_pytorch_tanh",
45
- "hidden_size": 8,
46
  "image_size": 224,
47
  "initializer_range": 0.02,
48
  "intermediate_size": 32,
 
7
  "perceiver_config": {
8
  "attention_dropout": 0.0,
9
  "hidden_act": "silu",
10
+ "hidden_size": 16,
11
  "initializer_range": 0.02,
12
  "model_type": "idefics2_perceiver",
13
  "num_key_value_heads": 4,
 
21
  "attention_dropout": 0.0,
22
  "head_dim": null,
23
  "hidden_act": "silu",
24
+ "hidden_size": 16,
25
  "initializer_range": 0.02,
26
  "intermediate_size": 32,
27
  "max_position_embeddings": 131072,
 
42
  "vision_config": {
43
  "attention_dropout": 0.0,
44
  "hidden_act": "gelu_pytorch_tanh",
45
+ "hidden_size": 16,
46
  "image_size": 224,
47
  "initializer_range": 0.02,
48
  "intermediate_size": 32,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:90a559b76895df7f59e0de243104d34edf875888092cae7be011e8ae2ee26008
3
- size 2557856
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b058374302f8185bfa4d7b0181436e705a58bfaa955e28d032332c588e9a6b70
3
+ size 5129768