Skip to content

Commit 0380e5a

Browse files
matsumotosanBordaSkafteNickideependujha
authored
Remove unviolated linting rules (#21229)
* remove unviolated ignores. autofix. * remove * remove more * simple change to re-run ci * trigger ci --------- Co-authored-by: Jirka Borovec <6035284+Borda@users.noreply.github.com> Co-authored-by: Nicki Skafte Detlefsen <skaftenicki@gmail.com> Co-authored-by: Deependu <deependujha21@gmail.com>
1 parent c913649 commit 0380e5a

File tree

17 files changed

+20
-47
lines changed

17 files changed

+20
-47
lines changed

.pre-commit-config.yaml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,7 @@ repos:
2727
hooks:
2828
- id: end-of-file-fixer
2929
- id: trailing-whitespace
30-
# keep formatting in README flexible
31-
exclude: README.md
30+
exclude: README.md # keep formatting in README flexible
3231
- id: check-json
3332
- id: check-yaml
3433
- id: check-toml

pyproject.toml

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -92,9 +92,6 @@ ignore = [
9292
"src/**" = [
9393
"S101", # todo: Use of `assert` detected
9494
"S113", # todo: Probable use of requests call without timeout
95-
"S301", # todo: `pickle` and modules that wrap it can be unsafe when used to deserialize untrusted data, possible security issue
96-
"S324", # todo: Probable use of insecure hash functions in `hashlib`
97-
"S403", # todo: `pickle`, `cPickle`, `dill`, and `shelve` modules are possibly insecure
9895
"S404", # todo: `subprocess` module is possibly insecure
9996
"S602", # todo: `subprocess` call with `shell=True` identified, security issue
10097
"S603", # todo: `subprocess` call: check for execution of untrusted input
@@ -106,18 +103,14 @@ ignore = [
106103
"tests/**" = [
107104
"S101", # Use of `assert` detected
108105
"S301", # `pickle` and modules that wrap it can be unsafe when used to deserialize untrusted data, possible security issue
109-
"S113", # todo: Probable use of requests call without timeout
110106
"S311", # todo: Standard pseudo-random generators are not suitable for cryptographic purposes
111107
"S108", # todo: Probable insecure usage of temporary file or directory: "/tmp/sys-customizations-sync"
112-
"S202", # Uses of `tarfile.extractall()`
113108
"S403", # `pickle`, `cPickle`, `dill`, and `shelve` modules are possibly insecure
114109
"S404", # `subprocess` module is possibly insecure
115110
"S602", # todo: `subprocess` call with `shell=True` identified, security issue
116111
"S603", # todo: `subprocess` call: check for execution of untrusted input
117112
"S605", # todo: Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
118113
"S607", # todo: Starting a process with a partial executable path
119-
"RET504", # todo:Unnecessary variable assignment before `return` statement
120-
"PT004", # todo: Fixture `tmpdir_unittest_fixture` does not return anything, add leading underscore
121114
"PT012", # todo: `pytest.raises()` block should contain a single simple statement
122115
"PT019", # todo: Fixture `_` without value is injected as parameter, use `@pytest.mark.usefixtures` instead
123116
]

tests/parity_fabric/models.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,7 @@ def forward(self, x):
6060
x = torch.flatten(x, 1) # flatten all dimensions except batch
6161
x = F.relu(self.fc1(x))
6262
x = F.relu(self.fc2(x))
63-
x = self.fc3(x)
64-
return x
63+
return self.fc3(x)
6564

6665
def get_optimizer(self):
6766
return torch.optim.SGD(self.parameters(), lr=0.0001)

tests/tests_fabric/strategies/test_model_parallel_integration.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -83,8 +83,7 @@ def _parallelize_feed_forward_fsdp2(model, device_mesh):
8383

8484
def _parallelize_feed_forward_fsdp2_tp(model, device_mesh):
8585
model = _parallelize_feed_forward_tp(model, device_mesh)
86-
model = _parallelize_feed_forward_fsdp2(model, device_mesh)
87-
return model
86+
return _parallelize_feed_forward_fsdp2(model, device_mesh)
8887

8988

9089
@RunIf(min_torch="2.4", standalone=True, min_cuda_gpus=4)

tests/tests_pytorch/accelerators/test_xla.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -46,8 +46,7 @@ def __init__(self):
4646
def forward(self, x):
4747
x = self.layer_1(x)
4848
x = self.layer_2(x)
49-
x = self.layer_3(x)
50-
return x
49+
return self.layer_3(x)
5150

5251

5352
@RunIf(tpu=True, standalone=True)
@@ -230,8 +229,7 @@ def __init__(self):
230229
def forward(self, x):
231230
x = self.net_a(x)
232231
x = self.layer_2(x)
233-
x = self.net_b(x)
234-
return x
232+
return self.net_b(x)
235233

236234

237235
@RunIf(tpu=True)

tests/tests_pytorch/callbacks/test_lr_monitor.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -428,8 +428,7 @@ def __init__(self):
428428

429429
def forward(self, x):
430430
x = self.linear_a(x)
431-
x = self.linear_b(x)
432-
return x
431+
return self.linear_b(x)
433432

434433
def configure_optimizers(self):
435434
param_groups = [
@@ -603,8 +602,7 @@ def __init__(self, lr, momentum):
603602

604603
def forward(self, x):
605604
x = self.linear_a(x)
606-
x = self.linear_b(x)
607-
return x
605+
return self.linear_b(x)
608606

609607
def configure_optimizers(self):
610608
param_groups = [

tests/tests_pytorch/callbacks/test_spike.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,7 @@ def training_step(self, batch, batch_idx: int):
2929
if curr_loss_val is None:
3030
curr_loss_val = batch_idx
3131

32-
loss = self.layer(torch.tensor(curr_loss_val, device=self.device, dtype=self.dtype).view(1, 1))
33-
return loss
32+
return self.layer(torch.tensor(curr_loss_val, device=self.device, dtype=self.dtype).view(1, 1))
3433

3534
def configure_optimizers(self):
3635
return torch.optim.SGD(self.parameters(), lr=1e-3)

tests/tests_pytorch/callbacks/test_stochastic_weight_avg.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -266,8 +266,7 @@ def __init__(self):
266266

267267
def forward(self, x):
268268
x = self.layer1(x)
269-
x = self.layer2(x)
270-
return x
269+
return self.layer2(x)
271270

272271
def configure_optimizers(self):
273272
params = [{"params": self.layer1.parameters(), "lr": 0.1}, {"params": self.layer2.parameters(), "lr": 0.2}]

tests/tests_pytorch/helpers/advanced_models.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -46,8 +46,7 @@ def block(in_feat, out_feat, normalize=True):
4646

4747
def forward(self, z):
4848
img = self.model(z)
49-
img = img.view(img.size(0), *self.img_shape)
50-
return img
49+
return img.view(img.size(0), *self.img_shape)
5150

5251

5352
class Discriminator(nn.Module):
@@ -204,8 +203,7 @@ def forward(self, x):
204203
x = torch.tanh(x)
205204
x = self.c_d1_bn(x)
206205
x = self.c_d1_drop(x)
207-
x = self.c_d2(x)
208-
return x
206+
return self.c_d2(x)
209207

210208
def training_step(self, batch, batch_nb):
211209
x, y = batch

tests/tests_pytorch/helpers/simple_models.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -100,8 +100,7 @@ def forward(self, x):
100100
x = self.layer_1a(x)
101101
x = self.layer_2(x)
102102
x = self.layer_2a(x)
103-
x = self.layer_end(x)
104-
return x
103+
return self.layer_end(x)
105104

106105
def configure_optimizers(self):
107106
optimizer = torch.optim.Adam(self.parameters(), lr=0.01)

0 commit comments

Comments
 (0)