Skip to content

Commit

Permalink
update playlist dump
Browse files Browse the repository at this point in the history
  • Loading branch information
azuline committed Nov 3, 2023
1 parent 5831b19 commit 7ad3c23
Show file tree
Hide file tree
Showing 4 changed files with 68 additions and 20 deletions.
13 changes: 13 additions & 0 deletions rose/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,6 +260,19 @@ class CachedTrack:
artists: list[CachedArtist]
formatted_artists: str

def dump(self) -> dict[str, Any]:
return {
"id": self.id,
"source_path": str(self.source_path.resolve()),
"title": self.title,
"release_id": self.release_id,
"tracknumber": self.track_number,
"discnumber": self.disc_number,
"duration_seconds": self.duration_seconds,
"artists": [a.dump() for a in self.artists],
"formatted_artists": self.formatted_artists,
}


@dataclass
class CachedCollage:
Expand Down
3 changes: 1 addition & 2 deletions rose/collages.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,8 +139,7 @@ def add_release_to_collage(

def dump_collages(c: Config) -> str:
out: list[dict[str, Any]] = []
collage_names = list(list_collages(c))
for name in collage_names:
for name in list_collages(c):
cdata = get_collage(c, name)
assert cdata is not None
releases: list[dict[str, Any]] = []
Expand Down
29 changes: 14 additions & 15 deletions rose/playlists.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,21 +140,20 @@ def add_track_to_playlist(


def dump_playlists(c: Config) -> str:
out: dict[str, list[dict[str, Any]]] = {}
playlist_names = list(list_playlists(c))
for name in playlist_names:
out[name] = []
cachedata = get_playlist(c, name)
assert cachedata is not None
_, tracks = cachedata
for idx, track in enumerate(tracks):
out[name].append(
{
"position": idx + 1,
"track_id": track.id,
"track_filename": track.virtual_filename,
}
)
out: list[dict[str, Any]] = []
for name in list_playlists(c):
pdata = get_playlist(c, name)
assert pdata is not None
tracks: list[dict[str, Any]] = []
for idx, trk in enumerate(pdata[1]):
tracks.append({"position": idx + 1, **trk.dump()})
out.append(
{
"name": name,
"cover_image_path": str(pdata[0].cover_path) if pdata[0].cover_path else None,
"tracks": tracks,
}
)
return json.dumps(out)


Expand Down
43 changes: 40 additions & 3 deletions rose/playlists_test.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import json
import shutil
from pathlib import Path
from typing import Any
Expand Down Expand Up @@ -125,9 +126,45 @@ def test_rename_playlist(config: Config, source_dir: Path) -> None:
@pytest.mark.usefixtures("seeded_cache")
def test_dump_playlists(config: Config) -> None:
out = dump_playlists(config)
# fmt: off
assert out == '{"Lala Lisa": [{"position": 1, "track_id": "t1", "track_filename": "01.m4a"}, {"position": 2, "track_id": "t3", "track_filename": "01.m4a"}], "Turtle Rabbit": []}' # noqa: E501
# fmt: on
assert json.loads(out) == [
{
"cover_image_path": f"{config.music_source_dir}/!playlists/Lala Lisa.jpg",
"name": "Lala Lisa",
"tracks": [
{
"artists": [
{"alias": False, "name": "Bass Man", "role": "main"},
{"alias": False, "name": "Techno Man", "role": "main"},
],
"discnumber": "01",
"duration_seconds": 120,
"formatted_artists": "Techno Man;Bass Man",
"id": "t1",
"position": 1,
"release_id": "r1",
"source_path": f"{config.music_source_dir}/r1/01.m4a",
"title": "Track 1",
"tracknumber": "01",
},
{
"artists": [
{"alias": False, "name": "Conductor Woman", "role": "guest"},
{"alias": False, "name": "Violin Woman", "role": "main"},
],
"discnumber": "01",
"duration_seconds": 120,
"formatted_artists": "Violin Woman feat. Conductor Woman",
"id": "t3",
"position": 2,
"release_id": "r2",
"source_path": f"{config.music_source_dir}/r2/01.m4a",
"title": "Track 1",
"tracknumber": "01",
},
],
},
{"cover_image_path": None, "name": "Turtle Rabbit", "tracks": []},
]


def test_edit_playlists_ordering(monkeypatch: Any, config: Config, source_dir: Path) -> None:
Expand Down

0 comments on commit 7ad3c23

Please sign in to comment.