diff --git a/helpers/caching/vae.py b/helpers/caching/vae.py index 3d46b752..88d3f585 100644 --- a/helpers/caching/vae.py +++ b/helpers/caching/vae.py @@ -144,8 +144,9 @@ def generate_vae_cache_filename(self, filepath: str) -> tuple: subfolders = "" if self.instance_data_dir is not None: subfolders = os.path.dirname(filepath).replace(self.instance_data_dir, "") - if len(subfolders) > 0 and subfolders[0] == "/" and self.cache_dir[0] != "/": - subfolders = subfolders[1:] + subfolders = subfolders.lstrip(os.sep) + + if len(subfolders) > 0: full_filename = os.path.join(self.cache_dir, subfolders, base_filename) # logger.debug( # f"full_filename: {full_filename} = os.path.join({self.cache_dir}, {subfolders}, {base_filename})" diff --git a/helpers/training/trainer.py b/helpers/training/trainer.py index e5091ec9..65f3aaac 100644 --- a/helpers/training/trainer.py +++ b/helpers/training/trainer.py @@ -2501,6 +2501,7 @@ def train(self): ) # Backpropagate + grad_norm = None if not self.config.disable_accelerator: training_logger.debug("Backwards pass.") self.accelerator.backward(loss) @@ -2514,7 +2515,6 @@ def train(self): if param.grad is not None: param.grad.data = param.grad.data.to(torch.float32) - grad_norm = None if ( self.accelerator.sync_gradients and self.config.optimizer != "optimi-stableadamw"