This repository was archived by the owner on Aug 28, 2025. It is now read-only.
File tree Expand file tree Collapse file tree 2 files changed +4
-5
lines changed
lightning_examples/text-transformers Expand file tree Collapse file tree 2 files changed +4
-5
lines changed Original file line number Diff line number Diff line change 11title : Finetune Transformers Models with PyTorch Lightning
22author : PL team
33created : 2021-01-31
4- updated : 2021-12-03
4+ updated : 2022-02-08
55license : CC BY-SA
6- build : 2
6+ build : 0
77tags :
88 - Text
99description : |
@@ -17,5 +17,4 @@ requirements:
1717 - scikit-learn
1818 - torchtext>=0.9
1919accelerator :
20- - CPU
2120 - GPU
Original file line number Diff line number Diff line change @@ -224,8 +224,8 @@ def setup(self, stage=None) -> None:
224224
225225 # Calculate total steps
226226 tb_size = self .hparams .train_batch_size * max (1 , self .trainer .gpus )
227- ab_size = self . trainer . accumulate_grad_batches * float ( self .trainer .max_epochs )
228- self .total_steps = ( len (train_loader .dataset ) // tb_size ) // ab_size
227+ ab_size = tb_size * self .trainer .accumulate_grad_batches
228+ self .total_steps = int (( len (train_loader .dataset ) / ab_size ) * float ( self . trainer . max_epochs ))
229229
230230 def configure_optimizers (self ):
231231 """Prepare optimizer and schedule (linear warmup and decay)"""
You can’t perform that action at this time.
0 commit comments