AbstractPhil commited on
Commit
9489ed2
·
verified ·
1 Parent(s): 38f56f8

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -10
config.json CHANGED
@@ -1,9 +1,7 @@
1
  {
2
- # Model Integration IDs
3
  "adapter_id": "002",
4
  "name": "TwoStreamShuntAdapter",
5
 
6
- # Backbone Model Dimensions
7
  "t5": {
8
  "model": "google/flan-t5-base",
9
  "hidden_size": 768,
@@ -13,32 +11,26 @@
13
  "hidden_size": 768,
14
  },
15
 
16
- # Adapter Dimensions
17
  "bottleneck": 384,
18
  "heads": 12,
19
 
20
- # Guidance Parameters
21
  "tau_init": 0.1,
22
  "max_guidance": 10.0,
23
 
24
- # Projection Configuration
25
- "proj_layers": 2, # number of linear+GELU layers
26
- "layer_norm": true, # apply LayerNorm before stack
27
  "dropout": 0.1,
28
  "use_dropout": true,
29
  "use_proj_stack": true,
30
 
31
- # Runtime Safeguards
32
  "assert_input_dims": true,
33
 
34
- # Routing Logic
35
  "routing": {
36
  "type": "cross_attention",
37
  "enable_causal_mask": false,
38
  "bidirectional": true
39
  },
40
 
41
- # Version & Metadata
42
  "version": "v0.3.1",
43
  "description": "Upgraded FLAN-T5 ↔ CLIP-L token shunt with projection stack, dropout, and field-consistent architecture."
44
  }
 
1
  {
 
2
  "adapter_id": "002",
3
  "name": "TwoStreamShuntAdapter",
4
 
 
5
  "t5": {
6
  "model": "google/flan-t5-base",
7
  "hidden_size": 768,
 
11
  "hidden_size": 768,
12
  },
13
 
 
14
  "bottleneck": 384,
15
  "heads": 12,
16
 
 
17
  "tau_init": 0.1,
18
  "max_guidance": 10.0,
19
 
20
+ "proj_layers": 2,
21
+ "layer_norm": true,
 
22
  "dropout": 0.1,
23
  "use_dropout": true,
24
  "use_proj_stack": true,
25
 
 
26
  "assert_input_dims": true,
27
 
 
28
  "routing": {
29
  "type": "cross_attention",
30
  "enable_causal_mask": false,
31
  "bidirectional": true
32
  },
33
 
 
34
  "version": "v0.3.1",
35
  "description": "Upgraded FLAN-T5 ↔ CLIP-L token shunt with projection stack, dropout, and field-consistent architecture."
36
  }