Skip to content

Commit 40dd378

Browse files
authored
Replace arg with in mindspore.nn.Dropout (#644)
1 parent 6e27f8f commit 40dd378

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

42 files changed

+99
-145
lines changed

mindocr/models/backbones/mindcv_models/cait.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -67,9 +67,9 @@ def __init__(self,
6767
self.q = nn.Dense(dim, dim, has_bias=qkv_bias)
6868
self.k = nn.Dense(dim, dim, has_bias=qkv_bias)
6969
self.v = nn.Dense(dim, dim, has_bias=qkv_bias)
70-
self.attn_drop = nn.Dropout(1 - attn_drop_rate)
70+
self.attn_drop = nn.Dropout(p=attn_drop_rate)
7171
self.proj = nn.Dense(dim, dim)
72-
self.proj_drop = nn.Dropout(1 - proj_drop_rate)
72+
self.proj_drop = nn.Dropout(p=proj_drop_rate)
7373
self.softmax = nn.Softmax(axis=-1)
7474

7575
self.attn_matmul_v = ops.BatchMatMul()
@@ -156,14 +156,14 @@ def __init__(self,
156156
self.scale = qk_scale or head_dim ** -0.5
157157

158158
self.qkv = nn.Dense(dim, dim * 3, has_bias=qkv_bias)
159-
self.attn_drop = nn.Dropout(1 - attn_drop_rate)
159+
self.attn_drop = nn.Dropout(p=attn_drop_rate)
160160

161161
self.proj = nn.Dense(dim, dim, has_bias=False)
162162

163163
self.proj_l = nn.Dense(num_heads, num_heads, has_bias=False)
164164
self.proj_w = nn.Dense(num_heads, num_heads, has_bias=False)
165165

166-
self.proj_drop = nn.Dropout(1 - proj_drop_rate)
166+
self.proj_drop = nn.Dropout(p=proj_drop_rate)
167167

168168
self.softmax = nn.Softmax(axis=-1)
169169

@@ -271,7 +271,7 @@ def __init__(self,
271271
zeros = ops.Zeros()
272272
self.cls_token = Parameter(zeros((1, 1, embed_dim), ms.float32))
273273
self.pos_embed = Parameter(zeros((1, num_patches, embed_dim), ms.float32))
274-
self.pos_drop = nn.Dropout(1 - drop_rate)
274+
self.pos_drop = nn.Dropout(p=drop_rate)
275275

276276
dpr = [drop_path_rate for i in range(depth)]
277277

mindocr/models/backbones/mindcv_models/coat.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ def __init__(
7676
self.fc1 = nn.Dense(in_channels=in_features, out_channels=hidden_features, has_bias=True)
7777
self.act = nn.GELU(approximate=False)
7878
self.fc2 = nn.Dense(in_channels=hidden_features, out_channels=out_features, has_bias=True)
79-
self.drop = nn.Dropout(keep_prob=1.0 - drop)
79+
self.drop = nn.Dropout(p=drop)
8080

8181
def construct(self, x: Tensor) -> Tensor:
8282
x = self.fc1(x)
@@ -173,9 +173,9 @@ def __init__(
173173
self.q = nn.Dense(in_channels=dim, out_channels=dim, has_bias=qkv_bias)
174174
self.k = nn.Dense(in_channels=dim, out_channels=dim, has_bias=qkv_bias)
175175
self.v = nn.Dense(in_channels=dim, out_channels=dim, has_bias=qkv_bias)
176-
self.attn_drop = nn.Dropout(keep_prob=1 - attn_drop)
176+
self.attn_drop = nn.Dropout(p=attn_drop)
177177
self.proj = nn.Dense(dim, dim)
178-
self.proj_drop = nn.Dropout(keep_prob=1 - proj_drop)
178+
self.proj_drop = nn.Dropout(p=proj_drop)
179179
self.softmax = nn.Softmax(axis=-1)
180180
self.batch_matmul = ops.BatchMatMul()
181181

mindocr/models/backbones/mindcv_models/convit.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -85,10 +85,10 @@ def __init__(
8585
self.k = nn.Dense(in_channels=dim, out_channels=dim, has_bias=qkv_bias)
8686
self.v = nn.Dense(in_channels=dim, out_channels=dim, has_bias=qkv_bias)
8787

88-
self.attn_drop = nn.Dropout(keep_prob=1.0 - attn_drop)
88+
self.attn_drop = nn.Dropout(p=attn_drop)
8989
self.proj = nn.Dense(in_channels=dim, out_channels=dim)
9090
self.pos_proj = nn.Dense(in_channels=3, out_channels=num_heads)
91-
self.proj_drop = nn.Dropout(keep_prob=1.0 - proj_drop)
91+
self.proj_drop = nn.Dropout(p=proj_drop)
9292
self.gating_param = Parameter(ops.ones((num_heads), ms.float32))
9393
self.softmax = nn.Softmax(axis=-1)
9494
self.batch_matmul = ops.BatchMatMul()
@@ -144,9 +144,9 @@ def __init__(
144144
self.q = nn.Dense(in_channels=dim, out_channels=dim, has_bias=qkv_bias)
145145
self.k = nn.Dense(in_channels=dim, out_channels=dim, has_bias=qkv_bias)
146146
self.v = nn.Dense(in_channels=dim, out_channels=dim, has_bias=qkv_bias)
147-
self.attn_drop = nn.Dropout(keep_prob=1.0 - attn_drop)
147+
self.attn_drop = nn.Dropout(p=attn_drop)
148148
self.proj = nn.Dense(in_channels=dim, out_channels=dim)
149-
self.proj_drop = nn.Dropout(keep_prob=1.0 - proj_drop)
149+
self.proj_drop = nn.Dropout(p=proj_drop)
150150
self.softmax = nn.Softmax(axis=-1)
151151
self.batch_matmul = ops.BatchMatMul()
152152

@@ -261,7 +261,7 @@ def __init__(
261261
self.num_patches = self.patch_embed.num_patches
262262

263263
self.cls_token = Parameter(ops.Zeros()((1, 1, embed_dim), ms.float32))
264-
self.pos_drop = nn.Dropout(keep_prob=1.0 - drop_rate)
264+
self.pos_drop = nn.Dropout(p=drop_rate)
265265

266266
if self.use_pos_embed:
267267
self.pos_embed = Parameter(ops.Zeros()((1, self.num_patches, embed_dim), ms.float32))

mindocr/models/backbones/mindcv_models/crossvit.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -50,9 +50,9 @@ def __init__(self, dim, num_heads=8, qkv_bias=False, attn_drop=0., proj_drop=0.)
5050
self.scale = head_dim ** -0.5
5151

5252
self.qkv = nn.Dense(dim, dim * 3, has_bias=qkv_bias)
53-
self.attn_drop = nn.Dropout(1.0 - attn_drop)
53+
self.attn_drop = nn.Dropout(p=attn_drop)
5454
self.proj = nn.Dense(dim, dim)
55-
self.proj_drop = nn.Dropout(1.0 - proj_drop)
55+
self.proj_drop = nn.Dropout(p=proj_drop)
5656

5757
def construct(self, x: Tensor) -> Tensor:
5858
B, N, C = x.shape
@@ -152,9 +152,9 @@ def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0.
152152
self.wq = nn.Dense(dim, dim, has_bias=qkv_bias)
153153
self.wk = nn.Dense(dim, dim, has_bias=qkv_bias)
154154
self.wv = nn.Dense(dim, dim, has_bias=qkv_bias)
155-
self.attn_drop = nn.Dropout(1.0 - attn_drop)
155+
self.attn_drop = nn.Dropout(p=attn_drop)
156156
self.proj = nn.Dense(dim, dim)
157-
self.proj_drop = nn.Dropout(1.0 - proj_drop)
157+
self.proj_drop = nn.Dropout(p=proj_drop)
158158

159159
def construct(self, x: Tensor) -> Tensor:
160160
B, N, C = x.shape # 3,3,16
@@ -341,7 +341,7 @@ def __init__(self, img_size=(224, 224), patch_size=(8, 16), in_channels=3, num_c
341341
d.append(c)
342342
d = tuple(d)
343343
self.cls_token = ms.ParameterTuple(d)
344-
self.pos_drop = nn.Dropout(1.0 - drop_rate)
344+
self.pos_drop = nn.Dropout(p=drop_rate)
345345

346346
total_depth = sum([sum(x[-2:]) for x in depth])
347347
dpr = np.linspace(0, drop_path_rate, total_depth) # stochastic depth decay rule

mindocr/models/backbones/mindcv_models/densenet.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def __init__(
6161
self.conv2 = nn.Conv2d(bn_size * growth_rate, growth_rate, kernel_size=3, stride=1, pad_mode="pad", padding=1)
6262

6363
self.drop_rate = drop_rate
64-
self.dropout = nn.Dropout(keep_prob=1 - self.drop_rate)
64+
self.dropout = nn.Dropout(p=self.drop_rate)
6565

6666
def construct(self, features: Tensor) -> Tensor:
6767
bottleneck = self.conv1(self.relu1(self.norm1(features)))

mindocr/models/backbones/mindcv_models/edgenext.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -264,9 +264,9 @@ def __init__(
264264
self.temperature = Parameter(Tensor(np.ones((num_heads, 1, 1)), ms.float32))
265265

266266
self.qkv = nn.Dense(dim, dim * 3, has_bias=qkv_bias)
267-
self.attn_drop = nn.Dropout(1 - attn_drop)
267+
self.attn_drop = nn.Dropout(p=attn_drop)
268268
self.proj = nn.Dense(dim, dim)
269-
self.proj_drop = nn.Dropout(1 - proj_drop)
269+
self.proj_drop = nn.Dropout(p=proj_drop)
270270

271271
def construct(self, x: Tensor) -> Tensor:
272272
B, N, C = x.shape
@@ -363,7 +363,7 @@ def __init__(self, in_chans=3, num_classes=1000,
363363
self.head = nn.Dense(dims[-1], num_classes)
364364

365365
# self.head_dropout = nn.Dropout(kwargs["classifier_dropout"])
366-
self.head_dropout = nn.Dropout(1.0)
366+
self.head_dropout = nn.Dropout(p=0.0)
367367
self.head_init_scale = head_init_scale
368368
self._initialize_weights()
369369

mindocr/models/backbones/mindcv_models/efficientnet.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -421,7 +421,7 @@ def __init__(
421421

422422
self.features = nn.SequentialCell(layers)
423423
self.avgpool = GlobalAvgPooling()
424-
self.dropout = nn.Dropout(1 - dropout_rate)
424+
self.dropout = nn.Dropout(p=dropout_rate)
425425
self.mlp_head = nn.Dense(lastconv_output_channels, num_classes)
426426
self._initialize_weights()
427427

mindocr/models/backbones/mindcv_models/ghostnet.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@ class GhostNet(nn.Cell):
177177
num_classes: number of classification classes. Default: 1000.
178178
in_channels: number of input channels. Default: 3.
179179
width: base width of hidden channel in blocks. Default: 1.0
180-
droupout: the probability of the features before classification. Default: 0.2
180+
dropout: the probability of the features before classification. Default: 0.2
181181
"""
182182

183183
def __init__(
@@ -227,7 +227,7 @@ def __init__(
227227
padding=0, stride=1, has_bias=True, pad_mode="pad")
228228
self.act2 = nn.ReLU()
229229
if self.dropout_rate > 0:
230-
self.dropout = nn.Dropout(self.dropout_rate)
230+
self.dropout = nn.Dropout(p=self.dropout_rate)
231231
self.classifier = nn.Dense(output_channel, num_classes)
232232
self._initialize_weights()
233233

mindocr/models/backbones/mindcv_models/googlenet.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ def __init__(
109109
self.fc2 = nn.Dense(1024, num_classes)
110110
self.flatten = nn.Flatten()
111111
self.relu = nn.ReLU()
112-
self.dropout = nn.Dropout(1 - drop_rate)
112+
self.dropout = nn.Dropout(p=drop_rate)
113113

114114
def construct(self, x: Tensor) -> Tensor:
115115
x = self.avg_pool(x)
@@ -170,7 +170,7 @@ def __init__(
170170
self.aux2 = InceptionAux(528, num_classes, drop_rate=drop_rate_aux)
171171

172172
self.pool = GlobalAvgPooling()
173-
self.dropout = nn.Dropout(keep_prob=1 - drop_rate)
173+
self.dropout = nn.Dropout(p=drop_rate)
174174
self.classifier = nn.Dense(1024, num_classes)
175175
self._initialize_weights()
176176

mindocr/models/backbones/mindcv_models/inception_v3.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -266,7 +266,7 @@ def __init__(
266266
self.inception7c = InceptionE(2048)
267267

268268
self.pool = GlobalAvgPooling()
269-
self.dropout = nn.Dropout(keep_prob=1 - drop_rate)
269+
self.dropout = nn.Dropout(p=drop_rate)
270270
self.num_features = 2048
271271
self.classifier = nn.Dense(self.num_features, num_classes)
272272
self._initialize_weights()

0 commit comments

Comments
 (0)