Upload modeling_mixformer_sequential.py

This commit is contained in:
Gustavo de Rosa 2023-09-27 15:22:44 +00:00 committed by huggingface-web
parent 8ab0f29ff6
commit b6a7e2fe15

@ -753,7 +753,7 @@ class MixFormerSequentialForCausalLM(MixFormerSequentialPreTrainedModel):
**kwargs, **kwargs,
) -> CausalLMOutputWithPast: ) -> CausalLMOutputWithPast:
if attention_mask is not None and self.training: if attention_mask is not None and self.training:
raise ValueError("`attention_mask` is not supported during training.") print("`attention_mask` is not supported during training. Using it might lead to unexpected results.")
if past_key_values is None and attention_mask is None: if past_key_values is None and attention_mask is None:
lm_logits = self.layers(input_ids) lm_logits = self.layers(input_ids)