Skip to content

Commit 2f40139

Browse files
committed
fix import
1 parent 2edc8c1 commit 2f40139

File tree

10 files changed

+13
-13
lines changed

10 files changed

+13
-13
lines changed

src/transformers/models/bit/modeling_bit.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -637,7 +637,7 @@ def _init_weights(self, module):
637637
elif isinstance(module, nn.Linear):
638638
init.kaiming_uniform_(module.weight, a=math.sqrt(5))
639639
if module.bias is not None:
640-
fan_in, _ = init._calculate_fan_in_and_fan_out(module.weight)
640+
fan_in, _ = torch.nn.init._calculate_fan_in_and_fan_out(module.weight)
641641
bound = 1 / math.sqrt(fan_in) if fan_in > 0 else 0
642642
init.uniform_(module.bias, -bound, bound)
643643
elif isinstance(module, (nn.BatchNorm2d, nn.GroupNorm)):

src/transformers/models/deprecated/trajectory_transformer/modeling_trajectory_transformer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ def _init_weights(self, module):
9898
for i in range(module.n_models):
9999
init.kaiming_uniform_(module.weight[i], a=math.sqrt(5) / self.config.kaiming_initializer_range)
100100
if module.bias is not None:
101-
fan_in, _ = init._calculate_fan_in_and_fan_out(module.weight[i])
101+
fan_in, _ = torch.nn.init._calculate_fan_in_and_fan_out(module.weight[i])
102102
bound = (1 / math.sqrt(fan_in)) * self.config.initializer_range
103103
init.uniform_(module.bias[i], -bound, bound)
104104

@@ -161,7 +161,7 @@ def reset_parameters(self):
161161
for i in range(self.n_models):
162162
init.kaiming_uniform_(self.weight[i], a=math.sqrt(5))
163163
if self.bias is not None:
164-
fan_in, _ = init._calculate_fan_in_and_fan_out(self.weight[i])
164+
fan_in, _ = torch.nn.init._calculate_fan_in_and_fan_out(self.weight[i])
165165
bound = 1 / math.sqrt(fan_in)
166166
init.uniform_(self.bias[i], -bound, bound)
167167

src/transformers/models/emu3/modeling_emu3.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -944,13 +944,13 @@ def _init_weights(self, module):
944944
if isinstance(module, (nn.Conv2d, nn.Conv3d)):
945945
init.kaiming_normal_(module.weight, mode="fan_out", nonlinearity="relu")
946946
if module.bias is not None:
947-
fan_in, _ = init._calculate_fan_in_and_fan_out(module.weight)
947+
fan_in, _ = torch.nn.init._calculate_fan_in_and_fan_out(module.weight)
948948
bound = 1 / math.sqrt(fan_in)
949949
init.uniform_(module.bias, -bound, bound)
950950
elif isinstance(module, nn.Linear):
951951
init.kaiming_uniform_(module.weight, a=math.sqrt(5))
952952
if module.bias is not None:
953-
fan_in, _ = init._calculate_fan_in_and_fan_out(module.weight)
953+
fan_in, _ = torch.nn.init._calculate_fan_in_and_fan_out(module.weight)
954954
bound = 1 / math.sqrt(fan_in) if fan_in > 0 else 0
955955
init.uniform_(module.bias, -bound, bound)
956956
elif isinstance(module, (nn.BatchNorm2d, nn.BatchNorm3d, nn.GroupNorm)):

src/transformers/models/emu3/modular_emu3.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -694,13 +694,13 @@ def _init_weights(self, module):
694694
if isinstance(module, (nn.Conv2d, nn.Conv3d)):
695695
init.kaiming_normal_(module.weight, mode="fan_out", nonlinearity="relu")
696696
if module.bias is not None:
697-
fan_in, _ = init._calculate_fan_in_and_fan_out(module.weight)
697+
fan_in, _ = torch.nn.init._calculate_fan_in_and_fan_out(module.weight)
698698
bound = 1 / math.sqrt(fan_in)
699699
init.uniform_(module.bias, -bound, bound)
700700
elif isinstance(module, nn.Linear):
701701
init.kaiming_uniform_(module.weight, a=math.sqrt(5))
702702
if module.bias is not None:
703-
fan_in, _ = init._calculate_fan_in_and_fan_out(module.weight)
703+
fan_in, _ = torch.nn.init._calculate_fan_in_and_fan_out(module.weight)
704704
bound = 1 / math.sqrt(fan_in) if fan_in > 0 else 0
705705
init.uniform_(module.bias, -bound, bound)
706706
elif isinstance(module, (nn.BatchNorm2d, nn.BatchNorm3d, nn.GroupNorm)):

src/transformers/models/eomt/modeling_eomt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1003,7 +1003,7 @@ def _init_weights(self, module: nn.Module) -> None:
10031003
if isinstance(module, (nn.Linear, nn.Conv2d, nn.ConvTranspose2d)):
10041004
init.kaiming_uniform_(module.weight, a=math.sqrt(5))
10051005
if module.bias is not None:
1006-
fan_in, _ = init._calculate_fan_in_and_fan_out(module.weight)
1006+
fan_in, _ = torch.nn.init._calculate_fan_in_and_fan_out(module.weight)
10071007
bound = 1 / math.sqrt(fan_in) if fan_in > 0 else 0
10081008
init.uniform_(module.bias, -bound, bound)
10091009
elif isinstance(module, nn.LayerNorm):

src/transformers/models/eomt/modular_eomt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -408,7 +408,7 @@ def _init_weights(self, module: nn.Module) -> None:
408408
if isinstance(module, (nn.Linear, nn.Conv2d, nn.ConvTranspose2d)):
409409
init.kaiming_uniform_(module.weight, a=math.sqrt(5))
410410
if module.bias is not None:
411-
fan_in, _ = init._calculate_fan_in_and_fan_out(module.weight)
411+
fan_in, _ = torch.nn.init._calculate_fan_in_and_fan_out(module.weight)
412412
bound = 1 / math.sqrt(fan_in) if fan_in > 0 else 0
413413
init.uniform_(module.bias, -bound, bound)
414414
elif isinstance(module, nn.LayerNorm):

src/transformers/models/mamba2/modeling_mamba2.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -737,7 +737,7 @@ def _init_weights(self, module):
737737

738738
# # Inverse of softplus: https://github.com/pytorch/pytorch/issues/72759
739739
inv_dt = dt + torch.log(-torch.expm1(-dt))
740-
init.copy_(module.dt_proj.bias, inv_dt)
740+
init.copy_(module.dt_bias, inv_dt)
741741

742742
init.kaiming_uniform_(module.conv1d.weight, a=math.sqrt(5))
743743
if module.conv1d.bias is not None:

src/transformers/models/regnet/modeling_regnet.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -272,7 +272,7 @@ def _init_weights(self, module):
272272
elif isinstance(module, nn.Linear):
273273
init.kaiming_uniform_(module.weight, a=math.sqrt(5))
274274
if module.bias is not None:
275-
fan_in, _ = init._calculate_fan_in_and_fan_out(module.weight)
275+
fan_in, _ = torch.nn.init._calculate_fan_in_and_fan_out(module.weight)
276276
bound = 1 / math.sqrt(fan_in) if fan_in > 0 else 0
277277
init.uniform_(module.bias, -bound, bound)
278278
elif isinstance(module, (nn.BatchNorm2d, nn.GroupNorm)):

src/transformers/models/resnet/modeling_resnet.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -259,7 +259,7 @@ def _init_weights(self, module):
259259
elif isinstance(module, nn.Linear):
260260
init.kaiming_uniform_(module.weight, a=math.sqrt(5))
261261
if module.bias is not None:
262-
fan_in, _ = init._calculate_fan_in_and_fan_out(module.weight)
262+
fan_in, _ = torch.nn.init._calculate_fan_in_and_fan_out(module.weight)
263263
bound = 1 / math.sqrt(fan_in) if fan_in > 0 else 0
264264
init.uniform_(module.bias, -bound, bound)
265265
elif isinstance(module, (nn.BatchNorm2d, nn.GroupNorm)):

src/transformers/models/rt_detr/modeling_rt_detr_resnet.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -313,7 +313,7 @@ def _init_weights(self, module):
313313
elif isinstance(module, nn.Linear):
314314
init.kaiming_uniform_(module.weight, a=math.sqrt(5))
315315
if module.bias is not None:
316-
fan_in, _ = init._calculate_fan_in_and_fan_out(module.weight)
316+
fan_in, _ = torch.nn.init._calculate_fan_in_and_fan_out(module.weight)
317317
bound = 1 / math.sqrt(fan_in) if fan_in > 0 else 0
318318
init.uniform_(module.bias, -bound, bound)
319319
elif isinstance(module, (nn.BatchNorm2d, nn.GroupNorm)):

0 commit comments

Comments
 (0)