add attn_pdrop and auto_map
This commit is contained in:
parent
ff4e06fd98
commit
85d94971d2
@ -2,6 +2,10 @@
|
|||||||
"architectures": [
|
"architectures": [
|
||||||
"PhiForCausalLM"
|
"PhiForCausalLM"
|
||||||
],
|
],
|
||||||
|
"auto_map": {
|
||||||
|
"AutoConfig": "configuration_phi.PhiConfig",
|
||||||
|
"AutoModelForCausalLM": "modeling_phi.PhiForCausalLM"
|
||||||
|
},
|
||||||
"bos_token_id": 1,
|
"bos_token_id": 1,
|
||||||
"eos_token_id": 2,
|
"eos_token_id": 2,
|
||||||
"hidden_act": "gelu_new",
|
"hidden_act": "gelu_new",
|
||||||
@ -15,6 +19,7 @@
|
|||||||
"pretraining_tp": 1,
|
"pretraining_tp": 1,
|
||||||
"resid_pdrop": 0.0,
|
"resid_pdrop": 0.0,
|
||||||
"embd_pdrop": 0.0,
|
"embd_pdrop": 0.0,
|
||||||
|
"attention_dropout": 0.0,
|
||||||
"layer_norm_eps": 1e-05,
|
"layer_norm_eps": 1e-05,
|
||||||
"rope_scaling": null,
|
"rope_scaling": null,
|
||||||
"rope_theta": 10000.0,
|
"rope_theta": 10000.0,
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user