29 lines
656 B
JSON
29 lines
656 B
JSON
|
{
|
||
|
"alibi": false,
|
||
|
"apply_residual_connection_post_layernorm": false,
|
||
|
"architectures": [
|
||
|
"RWForCausalLM"
|
||
|
],
|
||
|
"attention_dropout": 0.0,
|
||
|
"auto_map": {
|
||
|
"AutoConfig": "configuration_RW.RWConfig",
|
||
|
"AutoModelForCausalLM": "modelling_RW.RWForCausalLM"
|
||
|
},
|
||
|
"bias": false,
|
||
|
"bos_token_id": 1,
|
||
|
"eos_token_id": 2,
|
||
|
"hidden_dropout": 0.0,
|
||
|
"hidden_size": 8192,
|
||
|
"initializer_range": 0.02,
|
||
|
"layer_norm_epsilon": 1e-05,
|
||
|
"model_type": "RefinedWeb",
|
||
|
"n_head": 128,
|
||
|
"n_head_kv": 8,
|
||
|
"n_layer": 60,
|
||
|
"parallel_attn": true,
|
||
|
"torch_dtype": "bfloat16",
|
||
|
"transformers_version": "4.27.4",
|
||
|
"use_cache": true,
|
||
|
"vocab_size": 65024
|
||
|
}
|