From d4d973a6545718d6f9a905ee1cfaab7bbfaa2636 Mon Sep 17 00:00:00 2001 From: zhangyihuiben Date: Tue, 25 Nov 2025 18:52:11 +0800 Subject: [PATCH] =?UTF-8?q?=E3=80=90master=E3=80=91=E3=80=90mcore=E3=80=91?= =?UTF-8?q?=E3=80=90bugfix=E3=80=91fix=20nope=5Flayer=5Finterval=20not=20r?= =?UTF-8?q?ejected=20by=20invalid=20value?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- mindformers/parallel_core/transformer_config.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/mindformers/parallel_core/transformer_config.py b/mindformers/parallel_core/transformer_config.py index 297cdc5da..d88c3666a 100644 --- a/mindformers/parallel_core/transformer_config.py +++ b/mindformers/parallel_core/transformer_config.py @@ -660,6 +660,17 @@ class TransformerConfig(ModelParallelConfig, MFModelConfig): setattr(self, k, v) del self.rope_scaling + if self.position_embedding_type == "none": + self.nope_layer_interval = None + + if self.nope_layer_interval is None: + pass + elif not isinstance(self.nope_layer_interval, int): + raise TypeError("nope_layer_interval must be a int, " + f"but got {type(self.nope_layer_interval)}.") + elif self.nope_layer_interval <= 0: + raise ValueError("nope_layer_interval must be larger than 0.") + if self.bias_swiglu_fusion and self.hidden_act != 'swiglu': raise ValueError( "When using bias_swiglu_fusion, hidden_act must be swiglu." -- Gitee