Skip to content

Commit 0243a3f

Browse files
committed
Add missing arguments
Signed-off-by: cyy <cyyever@outlook.com>
1 parent 7f38068 commit 0243a3f

File tree

25 files changed

+30
-33
lines changed

25 files changed

+30
-33
lines changed

src/transformers/models/d_fine/modular_d_fine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -898,7 +898,7 @@ def __init__(self, config: DFineConfig):
898898

899899
class DFineForObjectDetection(RTDetrForObjectDetection, DFinePreTrainedModel):
900900
def __init__(self, config: DFineConfig):
901-
DFinePreTrainedModel.__init__(config)
901+
DFinePreTrainedModel.__init__(self, config)
902902

903903
# D-FINE encoder-decoder model
904904
self.eval_idx = config.eval_idx if config.eval_idx >= 0 else config.decoder_layers + config.eval_idx

src/transformers/models/data2vec/modular_data2vec_audio.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ def load_adapter(self):
183183

184184
class Data2VecAudioModel(Data2VecAudioPreTrainedModel, Wav2Vec2Model):
185185
def __init__(self, config: Data2VecAudioConfig):
186-
Data2VecAudioPreTrainedModel.__init__(config)
186+
Data2VecAudioPreTrainedModel.__init__(self, config)
187187
self.config = config
188188
self.feature_extractor = Data2VecAudioFeatureEncoder(config)
189189
self.feature_projection = Data2VecAudioFeatureProjection(config)
@@ -215,7 +215,7 @@ def forward(self, **super_kwargs):
215215

216216
class Data2VecAudioForCTC(Data2VecAudioPreTrainedModel, Wav2Vec2ForCTC):
217217
def __init__(self, config):
218-
Data2VecAudioPreTrainedModel.__init__(config)
218+
Data2VecAudioPreTrainedModel.__init__(self, config)
219219

220220
self.data2vec_audio = Data2VecAudioModel(config)
221221
self.dropout = nn.Dropout(config.final_dropout)

src/transformers/models/eomt/modular_eomt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -249,7 +249,7 @@ class EomtPatchEmbeddings(Dinov2PatchEmbeddings):
249249

250250
class EomtEmbeddings(Dinov2Embeddings, nn.Module):
251251
def __init__(self, config: EomtConfig) -> None:
252-
Dinov2Embeddings().__init__()
252+
Dinov2Embeddings().__init__(self, config)
253253

254254
self.config = config
255255
self.patch_size = config.patch_size

src/transformers/models/ernie4_5/modular_ernie4_5.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ def apply_rotary_pos_emb(q, k, cos, sin, position_ids=None, unsqueeze_dim=1):
8484

8585
class Ernie4_5MLP(LlamaMLP):
8686
def __init__(self, config: Ernie4_5Config):
87-
super().__init__()
87+
super().__init__(config)
8888

8989
self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.use_bias)
9090
self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.use_bias)

src/transformers/models/ernie4_5_moe/modular_ernie4_5_moe.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -314,7 +314,7 @@ def forward(
314314
@auto_docstring
315315
class Ernie4_5_MoeForCausalLM(MixtralForCausalLM, Ernie4_5_MoePreTrainedModel):
316316
def __init__(self, config):
317-
Ernie4_5_MoePreTrainedModel().__init__(config)
317+
Ernie4_5_MoePreTrainedModel().__init__(self, config)
318318
self.model = Ernie4_5_MoeModel(config)
319319
self.vocab_size = config.vocab_size
320320
self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=config.use_bias)

src/transformers/models/esm/openfold_utils/residue_constants.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -541,7 +541,6 @@ def make_bond_key(atom1_name: str, atom2_name: str) -> str:
541541

542542
# A compact atom encoding with 14 columns
543543
# pylint: disable=line-too-long
544-
# pylint: disable=bad-whitespace
545544
restype_name_to_atom14_names: dict[str, list[str]] = {
546545
"ALA": ["N", "CA", "C", "O", "CB", "", "", "", "", "", "", "", "", ""],
547546
"ARG": ["N", "CA", "C", "O", "CB", "CG", "CD", "NE", "CZ", "NH1", "NH2", "", "", ""],
@@ -566,7 +565,6 @@ def make_bond_key(atom1_name: str, atom2_name: str) -> str:
566565
"UNK": ["", "", "", "", "", "", "", "", "", "", "", "", "", ""],
567566
}
568567
# pylint: enable=line-too-long
569-
# pylint: enable=bad-whitespace
570568

571569

572570
# This is the standard residue order when coding AA type as a number.

src/transformers/models/evolla/modular_evolla.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@
6565

6666
class EvollaSaProtEmbeddings(EsmEmbeddings):
6767
def __init__(self, config):
68-
super().__init__()
68+
super().__init__(config)
6969
# remove the position_ids in EsmEmbeddings
7070
self.position_ids = None
7171

src/transformers/models/falcon_h1/modular_falcon_h1.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -252,7 +252,7 @@ def forward(
252252

253253
class FalconH1RMSNormGated(MambaRMSNormGated):
254254
def __init__(self, hidden_size, eps=1e-6, n_groups=1, norm_before_gate=True):
255-
super().__init__()
255+
super().__init__(hidden_size=hidden_size, eps=eps)
256256
self.weight = nn.Parameter(torch.ones(hidden_size))
257257
self.variance_epsilon = eps
258258
self.n_groups = n_groups
@@ -813,7 +813,7 @@ def forward(
813813

814814
class FalconH1MLP(LlamaMLP):
815815
def __init__(self, config: FalconH1Config = None):
816-
super().__init__()
816+
super().__init__(config)
817817
self.gate_multiplier, self.down_multiplier = config.mlp_multipliers
818818

819819
def forward(self, x):

src/transformers/models/falcon_mamba/modular_falcon_mamba.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -517,7 +517,7 @@ class FalconMambaCausalLMOutput(MambaCausalLMOutput):
517517

518518
class FalconMambaModel(MambaModel, FalconMambaPreTrainedModel):
519519
def __init__(self, config):
520-
FalconMambaPreTrainedModel.__init__(config)
520+
FalconMambaPreTrainedModel.__init__(self, config)
521521

522522
self.embeddings = nn.Embedding(config.vocab_size, config.hidden_size)
523523
self.layers = nn.ModuleList(

src/transformers/models/gemma3/modular_gemma3.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -382,8 +382,7 @@ def __init__(self, config: Gemma3TextConfig):
382382

383383

384384
class Gemma3RMSNorm(Gemma2RMSNorm):
385-
def __init__(self, dim: int, eps: float = 1e-6):
386-
super().__init__()
385+
pass
387386

388387

389388
class Gemma3RotaryEmbedding(Gemma2RotaryEmbedding):
@@ -396,7 +395,7 @@ class Gemma3Attention(Gemma2Attention):
396395
def __init__(self, config: Gemma3TextConfig, layer_idx: int):
397396
self.is_sliding = config.layer_types[layer_idx] == "sliding_attention"
398397

399-
super().__init__()
398+
super().__init__(config, layer_idx)
400399
self.sliding_window = config.sliding_window if self.is_sliding else None
401400

402401
self.q_norm = Gemma3RMSNorm(dim=config.head_dim, eps=config.rms_norm_eps)

0 commit comments

Comments
 (0)