Skip to content

Commit

Permalink
support partial collage cache update via fn arg
Browse files Browse the repository at this point in the history
  • Loading branch information
azuline committed Oct 17, 2023
1 parent 04363be commit 0292366
Showing 1 changed file with 19 additions and 6 deletions.
25 changes: 19 additions & 6 deletions rose/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,9 +169,8 @@ def update_cache(c: Config, force: bool = False) -> None:
Update the read cache to match the data for all releases in the music source directory. Delete
any cached releases that are no longer present on disk.
"""
dirs = [Path(d.path).resolve() for d in os.scandir(c.music_source_dir) if d.is_dir()]
update_cache_for_releases(c, dirs, force)
update_cache_for_collages(c, force)
update_cache_for_releases(c, None, force)
update_cache_for_collages(c, None, force)
update_cache_delete_nonexistent_releases(c)


Expand All @@ -191,7 +190,12 @@ def update_cache_delete_nonexistent_releases(c: Config) -> None:
logger.info(f"Evicted release {row['source_path']} from cache")


def update_cache_for_releases(c: Config, release_dirs: list[Path], force: bool = False) -> None:
def update_cache_for_releases(
c: Config,
# Leave as None to update all releases.
release_dirs: list[Path] | None = None,
force: bool = False,
) -> None:
"""
Update the read cache to match the data for any passed-in releases. If a directory lacks a
.rose.{uuid}.toml datafile, create the datafile for the release and set it to the initial state.
Expand All @@ -207,6 +211,9 @@ def update_cache_for_releases(c: Config, release_dirs: list[Path], force: bool =
With these optimizations, we make a lot of readdir and stat calls, but minimize file and
database accesses to solely the files that have updated since the last cache run.
"""
release_dirs = release_dirs or [
Path(d.path).resolve() for d in os.scandir(c.music_source_dir) if d.is_dir()
]
logger.info(f"Refreshing the read cache for {len(release_dirs)} releases")
logger.debug(f"Refreshing cached data for {', '.join([r.name for r in release_dirs])}")

Expand Down Expand Up @@ -806,7 +813,12 @@ def update_cache_for_releases(c: Config, release_dirs: list[Path], force: bool =
logger.debug(f"Release update loop time {time.time() - loop_start=}")


def update_cache_for_collages(c: Config, force: bool = False) -> None:
def update_cache_for_collages(
c: Config,
# Leave as None to update all collages.
collage_names: list[str] | None = None,
force: bool = False,
) -> None:
"""
Update the read cache to match the data for all stored collages.
Expand All @@ -824,7 +836,8 @@ def update_cache_for_collages(c: Config, force: bool = False) -> None:
path = Path(f.path)
if path.suffix != ".toml":
continue
files.append((path.resolve(), path.stem, f))
if collage_names is None or path.stem in collage_names:
files.append((path.resolve(), path.stem, f))
logger.info(f"Refreshing the read cache for {len(files)} collages")

cached_collages: dict[str, CachedCollage] = {}
Expand Down

0 comments on commit 0292366

Please sign in to comment.