Skip to content

Commit dc77245

Browse files
committed
add glm4_moe
1 parent 8f6e9c9 commit dc77245

File tree

8 files changed

+801
-0
lines changed

8 files changed

+801
-0
lines changed

mindone/transformers/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -594,6 +594,7 @@
594594
GlmModel,
595595
GlmPreTrainedModel,
596596
)
597+
from .models.glm4_moe import Glm4MoeForCausalLM, Glm4MoeModel, Glm4MoePreTrainedModel
597598
from .models.glpn import (
598599
GLPNFeatureExtractor,
599600
GLPNForDepthEstimation,

mindone/transformers/models/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,7 @@
8989
gemma3,
9090
git,
9191
glm,
92+
glm4_moe,
9293
glpn,
9394
got_ocr2,
9495
gpt2,

mindone/transformers/models/auto/configuration_auto.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -111,6 +111,7 @@
111111
("gemma3_text", "Gemma3TextConfig"),
112112
("git", "GitConfig"),
113113
("glm", "GlmConfig"),
114+
("glm4_moe", "Glm4MoeConfig"),
114115
("glpn", "GLPNConfig"),
115116
("got_ocr2", "GotOcr2Config"),
116117
("gpt2", "GPT2Config"),
@@ -379,6 +380,7 @@
379380
("gemma3_text", "Gemma3ForCausalLM"),
380381
("git", "GIT"),
381382
("glm", "GLM"),
383+
("glm4_moe", "Glm4MoE"),
382384
("glpn", "GLPN"),
383385
("got_ocr2", "GOT-OCR2"),
384386
("gpt2", "OpenAI GPT-2"),

mindone/transformers/models/auto/modeling_auto.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,7 @@
104104
("gemma3_text", "Gemma3TextModel"),
105105
("git", "GitModel"),
106106
("glm", "GlmModel"),
107+
("glm4_moe", "Glm4MoeModel"),
107108
("glpn", "GLPNModel"),
108109
("got_ocr2", "GotOcr2ForConditionalGeneration"),
109110
("gpt2", "GPT2Model"),
@@ -439,6 +440,7 @@
439440
("gemma3_text", "Gemma3ForCausalLM"),
440441
("git", "GitForCausalLM"),
441442
("glm", "GlmForCausalLM"),
443+
("glm4_moe", "Glm4MoeForCausalLM"),
442444
("got_ocr2", "GotOcr2ForConditionalGeneration"),
443445
("gpt2", "GPT2LMHeadModel"),
444446
("gpt_bigcode", "GPTBigCodeForCausalLM"),
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
# Copyright 2025 The HuggingFace Team. All rights reserved.
2+
#
3+
# This code is adapted from https://github.com/huggingface/transformers
4+
# with modifications to run transformers on mindspore.
5+
#
6+
# Licensed under the Apache License, Version 2.0 (the "License");
7+
# you may not use this file except in compliance with the License.
8+
# You may obtain a copy of the License at
9+
#
10+
# http://www.apache.org/licenses/LICENSE-2.0
11+
#
12+
# Unless required by applicable law or agreed to in writing, software
13+
# distributed under the License is distributed on an "AS IS" BASIS,
14+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
# See the License for the specific language governing permissions and
16+
# limitations under the License.
17+
from .modeling_glm4_moe import *

0 commit comments

Comments
 (0)