Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions mindone/transformers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1318,6 +1318,15 @@
T5Model,
T5PreTrainedModel,
)
from .models.t5gemma import (
T5GemmaEncoder,
T5GemmaEncoderModel,
T5GemmaForConditionalGeneration,
T5GemmaForSequenceClassification,
T5GemmaForTokenClassification,
T5GemmaModel,
T5GemmaPreTrainedModel,
)
from .models.table_transformer import (
TableTransformerForObjectDetection,
TableTransformerModel,
Expand Down
1 change: 1 addition & 0 deletions mindone/transformers/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,6 +221,7 @@
swinv2,
switch_transformers,
t5,
t5gemma,
table_transformer,
tapas,
textnet,
Expand Down
2 changes: 2 additions & 0 deletions mindone/transformers/models/auto/configuration_auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,6 +257,7 @@
("trocr", "TrOCRConfig"),
("tvp", "TvpConfig"),
("udop", "UdopConfig"),
("t5gemma", "T5GemmaConfig"),
("umt5", "UMT5Config"),
("unispeech", "UniSpeechConfig"),
("unispeech-sat", "UniSpeechSatConfig"),
Expand Down Expand Up @@ -521,6 +522,7 @@
("swinv2", "Swin Transformer V2"),
("swin2sr", "Swin2SR"),
("t5", "T5"),
("t5gemma", "T5Gemma"),
("t5v1.1", "T5v1.1"),
("table-transformer", "Table Transformer"),
("tapas", "TAPAS"),
Expand Down
7 changes: 7 additions & 0 deletions mindone/transformers/models/auto/modeling_auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,6 +233,7 @@
("timesformer", "TimesformerModel"),
("tvp", "TvpModel"),
("udop", "UdopModel"),
("t5gemma", "T5GemmaModel"),
("umt5", "UMT5Model"),
("unispeech", "UniSpeechModel"),
("unispeech-sat", "UniSpeechSatModel"),
Expand Down Expand Up @@ -328,6 +329,7 @@
("vipllava", "VipLlavaForConditionalGeneration"),
("visual_bert", "VisualBertForPreTraining"),
("vit_mae", "ViTMAEForPreTraining"),
("t5gemma", "T5GemmaForConditionalGeneration"),
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The new entries for t5gemma are not in alphabetical order in several mapping lists (MODEL_FOR_PRETRAINING_MAPPING_NAMES, MODEL_WITH_LM_HEAD_MAPPING_NAMES, MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING_NAMES). To maintain consistency within the file, please place them in their correct alphabetical positions, usually after the corresponding t5 entry.

("wav2vec2", "Wav2Vec2ForPreTraining"),
("wav2vec2-conformer", "Wav2Vec2ConformerForPreTraining"),
("xlm", "XLMWithLMHeadModel"),
Expand Down Expand Up @@ -397,6 +399,7 @@
("squeezebert", "SqueezeBertForMaskedLM"),
("t5", "T5ForConditionalGeneration"),
("tapas", "TapasForMaskedLM"),
("t5gemma", "T5GemmaForConditionalGeneration"),
("wav2vec2", "Wav2Vec2ForMaskedLM"),
("whisper", "WhisperForConditionalGeneration"),
("xlm", "XLMWithLMHeadModel"),
Expand Down Expand Up @@ -831,6 +834,7 @@
("seamless_m4t_v2", "SeamlessM4Tv2ForTextToText"),
("squeezebert", "SqueezeBertForSequenceClassification"),
("t5", "T5ForConditionalGeneration"),
("t5gemma", "T5GemmaForConditionalGeneration"),
("umt5", "UMT5ForConditionalGeneration"),
("xlm-prophetnet", "XLMProphetNetForConditionalGeneration"),
]
Expand Down Expand Up @@ -919,6 +923,7 @@
("starcoder2", "Starcoder2ForSequenceClassification"),
("t5", "T5ForSequenceClassification"),
("tapas", "TapasForSequenceClassification"),
("t5gemma", "T5GemmaForSequenceClassification"),
("umt5", "UMT5ForSequenceClassification"),
("xlm", "XLMForSequenceClassification"),
("xlm-roberta", "XLMRobertaForSequenceClassification"),
Expand Down Expand Up @@ -1070,6 +1075,7 @@
("squeezebert", "SqueezeBertForTokenClassification"),
("stablelm", "StableLmForTokenClassification"),
("t5", "T5ForTokenClassification"),
("t5gemma", "T5GemmaForTokenClassification"),
("umt5", "UMT5ForTokenClassification"),
("xlm", "XLMForTokenClassification"),
("xlm-roberta", "XLMRobertaForTokenClassification"),
Expand Down Expand Up @@ -1257,6 +1263,7 @@
("roberta-prelayernorm", "RobertaPreLayerNormModel"),
("squeezebert", "SqueezeBertModel"),
("t5", "T5EncoderModel"),
("t5gemma", "T5GemmaEncoderModel"),
("umt5", "UMT5EncoderModel"),
("xlm", "XLMModel"),
("xlm-roberta", "XLMRobertaModel"),
Expand Down
17 changes: 17 additions & 0 deletions mindone/transformers/models/t5gemma/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Copyright 2024 The HuggingFace Team. All rights reserved.
#
# This code is adapted from https://github.com/huggingface/transformers
# with modifications to run transformers on mindspore.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .modeling_t5gemma import *
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

Wildcard imports (*) are discouraged as they can pollute the namespace and make it unclear which names are being imported. Please use explicit imports and define __all__ to control what is exported from the module. It is also good practice to keep the imported names and __all__ list sorted alphabetically.

Suggested change
from .modeling_t5gemma import *
from .modeling_t5gemma import (
T5GemmaEncoderModel,
T5GemmaForConditionalGeneration,
T5GemmaForSequenceClassification,
T5GemmaForTokenClassification,
T5GemmaModel,
T5GemmaPreTrainedModel,
)
__all__ = [
"T5GemmaEncoderModel",
"T5GemmaForConditionalGeneration",
"T5GemmaForSequenceClassification",
"T5GemmaForTokenClassification",
"T5GemmaModel",
"T5GemmaPreTrainedModel",
]

Loading