Skip to content

Commit

Permalink
Merge pull request #1078 from bghira/main
Browse files Browse the repository at this point in the history
fix for nested image subdirs w/ duplicated filenames across subdirs
  • Loading branch information
bghira authored Oct 18, 2024
2 parents fb4f106 + e34ad1c commit 8bf644f
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 3 deletions.
5 changes: 3 additions & 2 deletions helpers/caching/vae.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,8 +144,9 @@ def generate_vae_cache_filename(self, filepath: str) -> tuple:
subfolders = ""
if self.instance_data_dir is not None:
subfolders = os.path.dirname(filepath).replace(self.instance_data_dir, "")
if len(subfolders) > 0 and subfolders[0] == "/" and self.cache_dir[0] != "/":
subfolders = subfolders[1:]
subfolders = subfolders.lstrip(os.sep)

if len(subfolders) > 0:
full_filename = os.path.join(self.cache_dir, subfolders, base_filename)
# logger.debug(
# f"full_filename: {full_filename} = os.path.join({self.cache_dir}, {subfolders}, {base_filename})"
Expand Down
2 changes: 1 addition & 1 deletion helpers/training/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2501,6 +2501,7 @@ def train(self):
)

# Backpropagate
grad_norm = None
if not self.config.disable_accelerator:
training_logger.debug("Backwards pass.")
self.accelerator.backward(loss)
Expand All @@ -2514,7 +2515,6 @@ def train(self):
if param.grad is not None:
param.grad.data = param.grad.data.to(torch.float32)

grad_norm = None
if (
self.accelerator.sync_gradients
and self.config.optimizer != "optimi-stableadamw"
Expand Down

0 comments on commit 8bf644f

Please sign in to comment.