Skip to content

Commit

Permalink
add collage.py with the collage lifecycle ops
Browse files Browse the repository at this point in the history
  • Loading branch information
azuline committed Oct 17, 2023
1 parent 0292366 commit 44a066f
Show file tree
Hide file tree
Showing 13 changed files with 229 additions and 73 deletions.
47 changes: 40 additions & 7 deletions rose/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,11 +170,12 @@ def update_cache(c: Config, force: bool = False) -> None:
any cached releases that are no longer present on disk.
"""
update_cache_for_releases(c, None, force)
update_cache_evict_nonexistent_releases(c)
update_cache_for_collages(c, None, force)
update_cache_delete_nonexistent_releases(c)
update_cache_evict_nonexistent_collages(c)


def update_cache_delete_nonexistent_releases(c: Config) -> None:
def update_cache_evict_nonexistent_releases(c: Config) -> None:
logger.info("Evicting cached releases that are not on disk")
dirs = [Path(d.path).resolve() for d in os.scandir(c.music_source_dir) if d.is_dir()]
with connect(c) as conn:
Expand Down Expand Up @@ -505,7 +506,7 @@ def update_cache_for_releases(
continue

# Otherwise, read tags from disk and construct a new cached_track.
logger.debug(f"Track cache miss for {f}, reading tags from disk")
logger.debug(f"Track cache miss for {f.name}, reading tags from disk")
tags = AudioFile.from_file(track_path)

# Now that we're here, pull the release tags. We also need them to compute the
Expand Down Expand Up @@ -897,7 +898,9 @@ def update_cache_for_collages(
nonexistent_release_idxs.append(idx)
continue
cached_collage.release_ids.append(rls["uuid"])
logger.debug(f"Found {len(cached_collage.release_ids)} release(s) in {source_path}")

logger.info(f"Applying cache updates for collage {cached_collage.name}")
conn.execute(
"""
INSERT INTO collages (name, source_mtime) VALUES (?, ?)
Expand All @@ -921,8 +924,6 @@ def update_cache_for_collages(
args,
)

logger.info(f"Applying cache updates for collage {cached_collage.name}")

if nonexistent_release_idxs:
new_diskdata_releases: list[dict[str, str]] = []
removed_releases: list[str] = []
Expand All @@ -934,7 +935,6 @@ def update_cache_for_collages(

with source_path.open("wb") as fp:
tomli_w.dump({"releases": new_diskdata_releases}, fp)

logger.info(
f"Removing nonexistent releases from collage {cached_collage.name}: "
f"{','.join(removed_releases)}"
Expand All @@ -943,6 +943,26 @@ def update_cache_for_collages(
logger.debug(f"Collage update loop time {time.time() - loop_start=}")


def update_cache_evict_nonexistent_collages(c: Config) -> None:
logger.info("Evicting cached collages that are not on disk")
collage_names: list[str] = []
for f in os.scandir(c.music_source_dir / "!collages"):
p = Path(f.path)
if p.is_file() and p.suffix == ".toml":
collage_names.append(p.stem)

with connect(c) as conn:
conn.execute(
f"""
DELETE FROM collages
WHERE name NOT IN ({",".join(["?"] * len(collage_names))})
""",
collage_names,
)
for name in collage_names:
logger.info(f"Evicted collage {name} from cache")


def list_releases(
c: Config,
sanitized_artist_filter: str | None = None,
Expand Down Expand Up @@ -1113,6 +1133,17 @@ def get_release_files(c: Config, release_virtual_dirname: str) -> ReleaseFiles:
return rf


def get_release_id_from_virtual_dirname(c: Config, release_virtual_dirname: str) -> str | None:
with connect(c) as conn:
cursor = conn.execute(
"SELECT id FROM releases WHERE virtual_dirname = ?",
(release_virtual_dirname,),
)
if row := cursor.fetchone():
return row["id"]
return None


def list_artists(c: Config) -> Iterator[str]:
with connect(c) as conn:
cursor = conn.execute("SELECT DISTINCT artist FROM releases_artists")
Expand Down Expand Up @@ -1170,7 +1201,9 @@ def release_exists(c: Config, virtual_dirname: str) -> Path | None:


def track_exists(
c: Config, release_virtual_dirname: str, track_virtual_filename: str
c: Config,
release_virtual_dirname: str,
track_virtual_filename: str,
) -> Path | None:
with connect(c) as conn:
cursor = conn.execute(
Expand Down
2 changes: 0 additions & 2 deletions rose/cache.sql
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,6 @@ CREATE TABLE collages_releases (
);
CREATE INDEX collages_releases_collage_name ON collages_releases(collage_name);
CREATE INDEX collages_releases_release_id ON collages_releases(release_id);
CREATE UNIQUE INDEX collages_releases_collage_position ON collages_releases(collage_name, position);

CREATE TABLE playlists (
name TEXT PRIMARY KEY,
Expand All @@ -126,4 +125,3 @@ CREATE TABLE playlists_tracks (
);
CREATE INDEX playlists_tracks_playlist_name ON playlists_tracks(playlist_name);
CREATE INDEX playlists_tracks_track_id ON playlists_tracks(track_id);
CREATE UNIQUE INDEX playlists_tracks_playlist_position ON playlists_tracks(playlist_name, position);
79 changes: 15 additions & 64 deletions rose/cache_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,12 @@
STORED_DATA_FILE_REGEX,
CachedArtist,
CachedRelease,
CachedTrack,
artist_exists,
collage_exists,
connect,
cover_exists,
genre_exists,
get_release_files,
get_release_id_from_virtual_dirname,
label_exists,
list_artists,
list_collage_releases,
Expand All @@ -28,7 +27,7 @@
release_exists,
track_exists,
update_cache,
update_cache_delete_nonexistent_releases,
update_cache_evict_nonexistent_releases,
update_cache_for_releases,
)
from rose.config import Config
Expand Down Expand Up @@ -64,6 +63,7 @@ def test_migration(config: Config) -> None:
TESTDATA = Path(__file__).resolve().parent.parent / "testdata" / "cache"
TEST_RELEASE_1 = TESTDATA / "Test Release 1"
TEST_RELEASE_2 = TESTDATA / "Test Release 2"
TEST_RELEASE_3 = TESTDATA / "Test Release 3"
TEST_COLLAGE_1 = TESTDATA / "Collage 1"


Expand Down Expand Up @@ -113,7 +113,7 @@ def test_update_cache_releases(config: Config) -> None:
)
row = cursor.fetchone()
assert row["source_path"] == str(release_dir)
assert row["title"] == "A Cool Album"
assert row["title"] == "I Love Blackpink"
assert row["release_type"] == "album"
assert row["release_year"] == 1990
assert row["new"]
Expand All @@ -123,7 +123,7 @@ def test_update_cache_releases(config: Config) -> None:
(release_id,),
)
genres = {r["genre"] for r in cursor.fetchall()}
assert genres == {"Electronic", "House"}
assert genres == {"K-Pop", "Pop"}

cursor = conn.execute(
"SELECT label FROM releases_labels WHERE release_id = ?",
Expand All @@ -138,8 +138,7 @@ def test_update_cache_releases(config: Config) -> None:
)
artists = {(r["artist"], r["role"]) for r in cursor.fetchall()}
assert artists == {
("Artist A", "main"),
("Artist B", "main"),
("BLACKPINK", "main"),
}

for f in release_dir.iterdir():
Expand All @@ -157,7 +156,7 @@ def test_update_cache_releases(config: Config) -> None:
)
row = cursor.fetchone()
track_id = row["id"]
assert row["title"] == "Title"
assert row["title"].startswith("Track")
assert row["release_id"] == release_id
assert row["track_number"] != ""
assert row["disc_number"] == "1"
Expand All @@ -169,18 +168,8 @@ def test_update_cache_releases(config: Config) -> None:
)
artists = {(r["artist"], r["role"]) for r in cursor.fetchall()}
assert artists == {
("Artist GH", "main"),
("Artist HI", "main"),
("Artist C", "guest"),
("Artist A", "guest"),
("Artist AB", "remixer"),
("Artist BC", "remixer"),
("Artist CD", "producer"),
("Artist DE", "producer"),
("Artist EF", "composer"),
("Artist FG", "composer"),
("Artist IJ", "djmixer"),
("Artist JK", "djmixer"),
("BLACKPINK", "main"),
("Teddy", "composer"),
}


Expand Down Expand Up @@ -212,7 +201,7 @@ def test_update_cache_releases_already_fully_cached(config: Config) -> None:
)
row = cursor.fetchone()
assert row["source_path"] == str(release_dir)
assert row["title"] == "A Cool Album"
assert row["title"] == "I Love Blackpink"
assert row["release_type"] == "album"
assert row["release_year"] == 1990
assert row["new"]
Expand All @@ -237,7 +226,7 @@ def test_update_cache_releases_disk_update_to_previously_cached(config: Config)
)
row = cursor.fetchone()
assert row["source_path"] == str(release_dir)
assert row["title"] == "A Cool Album"
assert row["title"] == "I Love Blackpink"
assert row["release_type"] == "album"
assert row["release_year"] == 1990
assert row["new"]
Expand Down Expand Up @@ -293,7 +282,7 @@ def test_update_cache_releases_source_path_renamed(config: Config) -> None:
)
row = cursor.fetchone()
assert row["source_path"] == str(moved_release_dir)
assert row["title"] == "A Cool Album"
assert row["title"] == "I Love Blackpink"
assert row["release_type"] == "album"
assert row["release_year"] == 1990
assert row["new"]
Expand All @@ -308,7 +297,7 @@ def test_update_cache_releases_delete_nonexistent(config: Config) -> None:
VALUES ('aaaaaa', '/nonexistent', '999', 'nonexistent', 'aa', 'unknown', false, 'aa;aa')
""" # noqa: E501
)
update_cache_delete_nonexistent_releases(config)
update_cache_evict_nonexistent_releases(config)
with connect(config) as conn:
cursor = conn.execute("SELECT COUNT(*) FROM releases")
assert cursor.fetchone()[0] == 0
Expand Down Expand Up @@ -495,46 +484,8 @@ def test_list_releases(config: Config) -> None:


@pytest.mark.usefixtures("seeded_cache")
def test_get_release_files(config: Config) -> None:
rf = get_release_files(config, "r1")
assert rf.tracks == [
CachedTrack(
source_mtime=rf.tracks[0].source_mtime, # IGNORE THIS FIELD.
id="t1",
source_path=Path(config.music_source_dir / "r1" / "01.m4a"),
virtual_filename="01.m4a",
title="Track 1",
release_id="r1",
track_number="01",
disc_number="01",
duration_seconds=120,
artists=[
CachedArtist(name="Techno Man", role="main"),
CachedArtist(name="Bass Man", role="main"),
],
formatted_artists="Techno Man;Bass Man",
),
CachedTrack(
source_mtime=rf.tracks[1].source_mtime, # IGNORE THIS FIELD.
id="t2",
source_path=Path(config.music_source_dir / "r1" / "02.m4a"),
virtual_filename="02.m4a",
title="Track 2",
release_id="r1",
track_number="02",
disc_number="01",
duration_seconds=240,
artists=[
CachedArtist(name="Techno Man", role="main"),
CachedArtist(name="Bass Man", role="main"),
],
formatted_artists="Techno Man;Bass Man",
),
]
assert rf.cover is None

rf = get_release_files(config, "r2")
assert rf.cover == config.music_source_dir / "r2" / "cover.jpg"
def test_get_release_id_from_virtual_dirname(config: Config) -> None:
assert get_release_id_from_virtual_dirname(config, "r1") == "r1"


@pytest.mark.usefixtures("seeded_cache")
Expand Down
68 changes: 68 additions & 0 deletions rose/collage.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import logging
from pathlib import Path

import tomli_w
import tomllib

from rose.cache import (
get_release_id_from_virtual_dirname,
update_cache_evict_nonexistent_collages,
update_cache_for_collages,
)
from rose.config import Config

logger = logging.getLogger(__name__)


def delete_release_from_collage(
c: Config,
collage_name: str,
release_virtual_dirname: str,
) -> None:
release_id = get_release_id_from_virtual_dirname(c, release_virtual_dirname)
fpath = collage_path(c, collage_name)
with fpath.open("rb") as fp:
data = tomllib.load(fp)
data["releases"] = data.get("releases", [])
data["releases"] = [r for r in data.get("releases", []) if r["uuid"] != release_id]
with fpath.open("wb") as fp:
tomli_w.dump(data, fp)
update_cache_for_collages(c, [collage_name], force=True)


def add_release_to_collage(
c: Config,
collage_name: str,
release_virtual_dirname: str,
) -> None:
release_id = get_release_id_from_virtual_dirname(c, release_virtual_dirname)
fpath = collage_path(c, collage_name)
with fpath.open("rb") as fp:
data = tomllib.load(fp)
data["releases"] = data.get("releases", [])
# Check to see if release is already in the collage. If so, no op. We don't support duplicate
# collage entries.
for r in data["releases"]:
if r["uuid"] == release_id:
logger.debug(
f"No-Opping: Release {release_virtual_dirname} already in collage {collage_name}."
)
return
data["releases"].append({"uuid": release_id, "description_meta": release_virtual_dirname})
with fpath.open("wb") as fp:
tomli_w.dump(data, fp)
update_cache_for_collages(c, [collage_name], force=True)


def create_collage(c: Config, collage_name: str) -> None:
collage_path(c, collage_name).touch()
update_cache_for_collages(c, [collage_name], force=True)


def delete_collage(c: Config, collage_name: str) -> None:
collage_path(c, collage_name).unlink()
update_cache_evict_nonexistent_collages(c)


def collage_path(c: Config, name: str) -> Path:
return c.music_source_dir / "!collages" / f"{name}.toml"
Loading

0 comments on commit 44a066f

Please sign in to comment.