From 85d94971d2049212f6a9d6f27b718875d93c8780 Mon Sep 17 00:00:00 2001 From: Susnato Dhar Date: Sat, 25 Nov 2023 09:47:14 +0530 Subject: [PATCH] add attn_pdrop and auto_map --- config.json | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/config.json b/config.json index 25a6b4d..67abf9a 100644 --- a/config.json +++ b/config.json @@ -2,6 +2,10 @@ "architectures": [ "PhiForCausalLM" ], + "auto_map": { + "AutoConfig": "configuration_phi.PhiConfig", + "AutoModelForCausalLM": "modeling_phi.PhiForCausalLM" + }, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "gelu_new", @@ -15,6 +19,7 @@ "pretraining_tp": 1, "resid_pdrop": 0.0, "embd_pdrop": 0.0, + "attention_dropout": 0.0, "layer_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, @@ -24,4 +29,4 @@ "transformers_version": "4.34.0.dev0", "use_cache": true, "vocab_size": 51200 -} \ No newline at end of file +}