michaelfeil commited on
Commit
99553f4
·
verified ·
1 Parent(s): d6f618a

"use_bidirectional_attention": true flag

Browse files
Files changed (1) hide show
  1. config.json +2 -1
config.json CHANGED
@@ -37,5 +37,6 @@
37
  "torch_dtype": "bfloat16",
38
  "transformers_version": "4.44.2",
39
  "use_cache": true,
40
- "vocab_size": 128256
 
41
  }
 
37
  "torch_dtype": "bfloat16",
38
  "transformers_version": "4.44.2",
39
  "use_cache": true,
40
+ "vocab_size": 128256,
41
+ "use_bidirectional_attention": true
42
  }