Skip to content

Commit 27f19ba

Browse files
committed
make sure diffusion prior trainer can operate with no warmup
1 parent 8f38339 commit 27f19ba

File tree

2 files changed

+4
-3
lines changed

2 files changed

+4
-3
lines changed

dalle2_pytorch/trainer.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ def __init__(
181181
eps = 1e-6,
182182
max_grad_norm = None,
183183
group_wd_params = True,
184-
warmup_steps = 1,
184+
warmup_steps = None,
185185
cosine_decay_max_steps = None,
186186
**kwargs
187187
):
@@ -357,7 +357,8 @@ def update(self):
357357

358358
# accelerator will ocassionally skip optimizer steps in a "dynamic loss scaling strategy"
359359
if not self.accelerator.optimizer_step_was_skipped:
360-
with self.warmup_scheduler.dampening():
360+
sched_context = self.warmup_scheduler.dampening if exists(self.warmup_scheduler) else nullcontext
361+
with sched_context():
361362
self.scheduler.step()
362363

363364
if self.use_ema:

dalle2_pytorch/version.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = '1.8.1'
1+
__version__ = '1.8.2'

0 commit comments

Comments
 (0)