/
/
/
1"""MusicController: Orchestrates all data from music providers and sync to internal database."""
2
3from __future__ import annotations
4
5import asyncio
6import logging
7import os
8import shutil
9import time
10from collections.abc import Iterable, Sequence
11from contextlib import suppress
12from copy import deepcopy
13from datetime import datetime
14from itertools import zip_longest
15from math import inf
16from typing import TYPE_CHECKING, Any, Final, cast
17
18import numpy as np
19from music_assistant_models.config_entries import ConfigEntry, ConfigValueType
20from music_assistant_models.enums import (
21 ConfigEntryType,
22 EventType,
23 MediaType,
24 ProviderFeature,
25 ProviderType,
26)
27from music_assistant_models.errors import (
28 InvalidProviderID,
29 InvalidProviderURI,
30 MediaNotFoundError,
31 MusicAssistantError,
32)
33from music_assistant_models.helpers import get_global_cache_value
34from music_assistant_models.media_items import (
35 Artist,
36 AudioFormat,
37 BrowseFolder,
38 ItemMapping,
39 MediaItemType,
40 ProviderMapping,
41 RecommendationFolder,
42 SearchResults,
43 Track,
44)
45from music_assistant_models.provider import SyncTask
46from music_assistant_models.unique_list import UniqueList
47
48from music_assistant.constants import (
49 DB_TABLE_ALBUM_ARTISTS,
50 DB_TABLE_ALBUM_TRACKS,
51 DB_TABLE_ALBUMS,
52 DB_TABLE_ARTISTS,
53 DB_TABLE_AUDIOBOOKS,
54 DB_TABLE_GENRE_MEDIA_ITEM_MAPPING,
55 DB_TABLE_GENRES,
56 DB_TABLE_LOUDNESS_MEASUREMENTS,
57 DB_TABLE_PLAYLISTS,
58 DB_TABLE_PLAYLOG,
59 DB_TABLE_PODCASTS,
60 DB_TABLE_PROVIDER_MAPPINGS,
61 DB_TABLE_RADIOS,
62 DB_TABLE_SETTINGS,
63 DB_TABLE_SMART_FADES_ANALYSIS,
64 DB_TABLE_TRACK_ARTISTS,
65 DB_TABLE_TRACKS,
66 DEFAULT_GENRE_MAPPING,
67 PROVIDERS_WITH_SHAREABLE_URLS,
68)
69from music_assistant.controllers.streams.smart_fades.fades import SMART_CROSSFADE_DURATION
70from music_assistant.controllers.webserver.helpers.auth_middleware import get_current_user
71from music_assistant.helpers.api import api_command
72from music_assistant.helpers.compare import compare_strings, compare_version, create_safe_string
73from music_assistant.helpers.database import UNSET, DatabaseConnection
74from music_assistant.helpers.datetime import utc_timestamp
75from music_assistant.helpers.json import json_dumps, json_loads, serialize_to_json
76from music_assistant.helpers.tags import split_artists
77from music_assistant.helpers.uri import parse_uri
78from music_assistant.helpers.util import TaskManager, parse_optional_bool, parse_title_and_version
79from music_assistant.models.core_controller import CoreController
80from music_assistant.models.music_provider import MusicProvider
81from music_assistant.models.smart_fades import SmartFadesAnalysis, SmartFadesAnalysisFragment
82
83from .media.albums import AlbumsController
84from .media.artists import ArtistsController
85from .media.audiobooks import AudiobooksController
86from .media.genres import GenreController
87from .media.playlists import PlaylistController
88from .media.podcasts import PodcastsController
89from .media.radio import RadioController
90from .media.tracks import TracksController
91
92if TYPE_CHECKING:
93 from music_assistant_models.auth import User
94 from music_assistant_models.config_entries import CoreConfig
95 from music_assistant_models.media_items import Audiobook, PodcastEpisode
96
97 from music_assistant import MusicAssistant
98
99
100CONF_RESET_DB = "reset_db"
101DEFAULT_SYNC_INTERVAL = 12 * 60 # default sync interval in minutes
102CONF_SYNC_INTERVAL = "sync_interval"
103CONF_DELETED_PROVIDERS = "deleted_providers"
104DB_SCHEMA_VERSION: Final[int] = 27
105
106CACHE_CATEGORY_LAST_SYNC: Final[int] = 9
107CACHE_CATEGORY_SEARCH_RESULTS: Final[int] = 10
108LAST_PROVIDER_INSTANCE_SCAN: Final[str] = "last_provider_instance_scan"
109PROVIDER_INSTANCE_SCAN_INTERVAL: Final[int] = 30 * 24 * 60 * 60 # one month in seconds
110
111
112class MusicController(CoreController):
113 """Several helpers around the musicproviders."""
114
115 domain: str = "music"
116 config: CoreConfig
117
118 def __init__(self, mass: MusicAssistant) -> None:
119 """Initialize class."""
120 super().__init__(mass)
121 self.cache = self.mass.cache
122 self.artists = ArtistsController(self.mass)
123 self.albums = AlbumsController(self.mass)
124 self.tracks = TracksController(self.mass)
125 self.radio = RadioController(self.mass)
126 self.playlists = PlaylistController(self.mass)
127 self.audiobooks = AudiobooksController(self.mass)
128 self.podcasts = PodcastsController(self.mass)
129 self.genres = GenreController(self.mass)
130 self.in_progress_syncs: list[SyncTask] = []
131 self._database: DatabaseConnection | None = None
132 self._sync_lock = asyncio.Lock()
133 self.manifest.name = "Music controller"
134 self.manifest.description = (
135 "Music Assistant's core controller which manages all music from all providers."
136 )
137 self.manifest.icon = "archive-music"
138
139 @property
140 def database(self) -> DatabaseConnection:
141 """Return the database connection."""
142 if self._database is None:
143 raise RuntimeError("Database not initialized")
144 return self._database
145
146 async def get_config_entries(
147 self,
148 action: str | None = None,
149 values: dict[str, ConfigValueType] | None = None,
150 ) -> tuple[ConfigEntry, ...]:
151 """Return all Config Entries for this core module (if any)."""
152 entries = (
153 ConfigEntry(
154 key=CONF_RESET_DB,
155 type=ConfigEntryType.ACTION,
156 label="Reset library database",
157 description="This will issue a full reset of the library "
158 "database and trigger a full sync. Only use this option as a last resort "
159 "if you are seeing issues with the library database.",
160 category="generic",
161 advanced=True,
162 ),
163 )
164 if action == CONF_RESET_DB:
165 await self._reset_database()
166 await self.mass.cache.clear()
167 await self.start_sync()
168 entries = (
169 *entries,
170 ConfigEntry(
171 key=CONF_RESET_DB,
172 type=ConfigEntryType.LABEL,
173 label="The database has been reset.",
174 ),
175 )
176 return entries
177
178 async def setup(self, config: CoreConfig) -> None:
179 """Async initialize of module."""
180 self.config = config
181 # setup library database
182 await self._setup_database()
183 # make sure to finish any removal jobs
184 for removed_provider in self.mass.config.get_raw_core_config_value(
185 self.domain, CONF_DELETED_PROVIDERS, []
186 ):
187 await self.cleanup_provider(removed_provider)
188 # schedule cleanup task for matching provider instances
189 last_scan = cast(
190 "int",
191 self.mass.config.get_raw_core_config_value(self.domain, LAST_PROVIDER_INSTANCE_SCAN, 0),
192 )
193 if time.time() - last_scan > PROVIDER_INSTANCE_SCAN_INTERVAL:
194 self.mass.call_later(60, self.correct_multi_instance_provider_mappings)
195
196 async def close(self) -> None:
197 """Cleanup on exit."""
198 if self._database:
199 await self._database.close()
200
201 async def on_provider_loaded(self, provider: MusicProvider) -> None:
202 """Handle logic when a provider is loaded."""
203 await self.schedule_provider_sync(provider.instance_id)
204
205 async def on_provider_unload(self, provider: MusicProvider) -> None:
206 """Handle logic when a provider is (about to get) unloaded."""
207 # make sure to stop any running sync tasks first
208 for sync_task in list(self.in_progress_syncs):
209 if sync_task.provider_instance == provider.instance_id:
210 if sync_task.task:
211 sync_task.task.cancel()
212
213 @property
214 def providers(self) -> list[MusicProvider]:
215 """
216 Return all loaded/running MusicProviders (instances).
217
218 Note that this applies user provider filters (for all user types).
219 """
220 user = get_current_user()
221 user_provider_filter = user.provider_filter if user else None
222 return [
223 x
224 for x in self.mass.providers
225 if x.type == ProviderType.MUSIC
226 and (not user_provider_filter or x.instance_id in user_provider_filter)
227 ]
228
229 @api_command("music/sync")
230 async def start_sync(
231 self,
232 media_types: list[MediaType] | None = None,
233 providers: list[str] | None = None,
234 ) -> None:
235 """Start running the sync of (all or selected) musicproviders.
236
237 media_types: only sync these media types. None for all.
238 providers: only sync these provider instances. None for all.
239 """
240 if media_types is None:
241 media_types = MediaType.ALL
242 if providers is None:
243 providers = [x.instance_id for x in self.providers]
244
245 for media_type in media_types:
246 for provider in self.providers:
247 if provider.instance_id not in providers:
248 continue
249 if not provider.library_supported(media_type):
250 continue
251 # handle mediatype specific sync config
252 conf_key = f"library_sync_{media_type}s"
253 sync_conf = await self.mass.config.get_provider_config_value(
254 provider.instance_id, conf_key
255 )
256 if not sync_conf:
257 continue
258 self._start_provider_sync(provider, media_type)
259
260 @api_command("music/synctasks")
261 def get_running_sync_tasks(self) -> list[SyncTask]:
262 """Return list with providers that are currently (scheduled for) syncing."""
263 return self.in_progress_syncs
264
265 @api_command("music/search")
266 async def search(
267 self,
268 search_query: str,
269 media_types: list[MediaType] = MediaType.ALL,
270 limit: int = 25,
271 library_only: bool = False,
272 ) -> SearchResults:
273 """Perform global search for media items on all providers.
274
275 :param search_query: Search query.
276 :param media_types: A list of media_types to include.
277 :param limit: number of items to return in the search (per type).
278 """
279 # use cache to avoid repeated searches
280 search_providers = sorted(self.get_unique_providers())
281 cache_provider_key = "library" if library_only else ",".join(search_providers)
282 cache_key = f"{search_query}{'-'.join(sorted([mt.value for mt in media_types]))}-{limit}-{library_only}-{cache_provider_key}" # noqa: E501
283 if cache := await self.mass.cache.get(
284 key=cache_key, provider=self.domain, category=CACHE_CATEGORY_SEARCH_RESULTS
285 ):
286 return cache
287 if not media_types:
288 media_types = MediaType.ALL
289 # Check if the search query is a streaming provider public shareable URL
290 try:
291 media_type, provider_instance_id_or_domain, item_id = await parse_uri(
292 search_query, validate_id=True
293 )
294 except InvalidProviderURI:
295 pass
296 except InvalidProviderID as err:
297 self.logger.warning("%s", str(err))
298 return SearchResults()
299 else:
300 # handle special case of direct shareable url search
301 if provider_instance_id_or_domain in PROVIDERS_WITH_SHAREABLE_URLS:
302 try:
303 item = await self.get_item(
304 media_type=media_type,
305 item_id=item_id,
306 provider_instance_id_or_domain=provider_instance_id_or_domain,
307 )
308 except MusicAssistantError as err:
309 self.logger.warning("%s", str(err))
310 return SearchResults()
311 else:
312 if media_type == MediaType.ARTIST:
313 return SearchResults(artists=[item])
314 if media_type == MediaType.ALBUM:
315 return SearchResults(albums=[item])
316 if media_type == MediaType.TRACK:
317 return SearchResults(tracks=[item])
318 if media_type == MediaType.PLAYLIST:
319 return SearchResults(playlists=[item])
320 if media_type == MediaType.AUDIOBOOK:
321 return SearchResults(audiobooks=[item])
322 if media_type == MediaType.PODCAST:
323 return SearchResults(podcasts=[item])
324 return SearchResults()
325 # handle normal global search by querying all providers
326 results_per_provider: list[SearchResults] = []
327 # always first search the library
328 library_results = await self.search_library(search_query, media_types, limit=limit)
329 results_per_provider.append(library_results)
330 if not library_only:
331 # create a set of all provider item ids already in library
332 # this way we can avoid returning duplicates in the search results
333 all_prov_item_ids = {
334 (item.media_type, prov_mapping.provider_domain, prov_mapping.item_id)
335 for items in (
336 library_results.artists,
337 library_results.albums,
338 library_results.tracks,
339 library_results.playlists,
340 library_results.audiobooks,
341 library_results.podcasts,
342 )
343 for item in items
344 for prov_mapping in item.provider_mappings
345 }
346 # include results from library + all (unique) music providers
347 results_per_provider += await asyncio.gather(
348 *[
349 self._search_provider(
350 search_query,
351 provider_instance,
352 media_types,
353 limit=limit,
354 skip_item_ids=all_prov_item_ids,
355 )
356 for provider_instance in search_providers
357 ],
358 )
359 # return result from all providers while keeping index
360 # so the result is sorted as each provider delivered
361 result = SearchResults(
362 artists=[
363 item
364 for sublist in zip_longest(*[x.artists for x in results_per_provider])
365 for item in sublist
366 if item is not None
367 ][:limit],
368 albums=[
369 item
370 for sublist in zip_longest(*[x.albums for x in results_per_provider])
371 for item in sublist
372 if item is not None
373 ][:limit],
374 tracks=[
375 item
376 for sublist in zip_longest(*[x.tracks for x in results_per_provider])
377 for item in sublist
378 if item is not None
379 ][:limit],
380 playlists=[
381 item
382 for sublist in zip_longest(*[x.playlists for x in results_per_provider])
383 for item in sublist
384 if item is not None
385 ][:limit],
386 radio=[
387 item
388 for sublist in zip_longest(*[x.radio for x in results_per_provider])
389 for item in sublist
390 if item is not None
391 ][:limit],
392 audiobooks=[
393 item
394 for sublist in zip_longest(*[x.audiobooks for x in results_per_provider])
395 for item in sublist
396 if item is not None
397 ][:limit],
398 podcasts=[
399 item
400 for sublist in zip_longest(*[x.podcasts for x in results_per_provider])
401 for item in sublist
402 if item is not None
403 ][:limit],
404 )
405
406 # the search results should already be sorted by relevance
407 # but we apply one extra round of sorting and that is to put exact name
408 # matches and library items first
409 result.artists = self._sort_search_result(search_query, result.artists)
410 result.albums = self._sort_search_result(search_query, result.albums)
411 result.tracks = self._sort_search_result(search_query, result.tracks)
412 result.playlists = self._sort_search_result(search_query, result.playlists)
413 result.radio = self._sort_search_result(search_query, result.radio)
414 result.audiobooks = self._sort_search_result(search_query, result.audiobooks)
415 result.podcasts = self._sort_search_result(search_query, result.podcasts)
416 await self.mass.cache.set(
417 key=cache_key,
418 data=result,
419 expiration=600,
420 provider=self.domain,
421 category=CACHE_CATEGORY_SEARCH_RESULTS,
422 )
423 return result
424
425 async def _search_provider(
426 self,
427 search_query: str,
428 provider_instance_id_or_domain: str,
429 media_types: list[MediaType],
430 limit: int = 10,
431 skip_item_ids: set[tuple[MediaType, str, str]] | None = None,
432 ) -> SearchResults:
433 """Perform search on given provider.
434
435 :param search_query: Search query
436 :param provider_instance_id_or_domain: instance_id or domain of the provider
437 to perform the search on.
438 :param media_types: A list of media_types to include.
439 :param limit: number of items to return in the search (per type).
440 """
441 prov = self.mass.get_provider(provider_instance_id_or_domain)
442 if not prov:
443 return SearchResults()
444 if ProviderFeature.SEARCH not in prov.supported_features:
445 return SearchResults()
446
447 # create safe search string
448 search_query = search_query.replace("/", " ").replace("'", "")
449 prov_search_results = await prov.search(
450 search_query,
451 media_types,
452 limit,
453 )
454 if skip_item_ids:
455 # filter out items already in skip_item_ids
456 prov_search_results.artists = [
457 item
458 for item in prov_search_results.artists
459 if (item.media_type, prov.domain, item.item_id) not in skip_item_ids
460 ]
461 prov_search_results.albums = [
462 item
463 for item in prov_search_results.albums
464 if (item.media_type, prov.domain, item.item_id) not in skip_item_ids
465 ]
466 prov_search_results.tracks = [
467 item
468 for item in prov_search_results.tracks
469 if (item.media_type, prov.domain, item.item_id) not in skip_item_ids
470 ]
471 prov_search_results.playlists = [
472 item
473 for item in prov_search_results.playlists
474 if (item.media_type, prov.domain, item.item_id) not in skip_item_ids
475 ]
476 prov_search_results.audiobooks = [
477 item
478 for item in prov_search_results.audiobooks
479 if (item.media_type, prov.domain, item.item_id) not in skip_item_ids
480 ]
481 prov_search_results.podcasts = [
482 item
483 for item in prov_search_results.podcasts
484 if (item.media_type, prov.domain, item.item_id) not in skip_item_ids
485 ]
486 return prov_search_results
487
488 async def search_library(
489 self,
490 search_query: str,
491 media_types: list[MediaType],
492 limit: int = 10,
493 ) -> SearchResults:
494 """Perform search on the library.
495
496 :param search_query: Search query
497 :param media_types: A list of media_types to include.
498 :param limit: number of items to return in the search (per type).
499 """
500 result = SearchResults()
501 for media_type in media_types:
502 ctrl = self.get_controller(media_type)
503 search_results = await ctrl.search(search_query, "library", limit=limit)
504 if search_results:
505 if media_type == MediaType.ARTIST:
506 result.artists = search_results
507 elif media_type == MediaType.ALBUM:
508 result.albums = search_results
509 elif media_type == MediaType.TRACK:
510 result.tracks = search_results
511 elif media_type == MediaType.PLAYLIST:
512 result.playlists = search_results
513 elif media_type == MediaType.RADIO:
514 result.radio = search_results
515 elif media_type == MediaType.AUDIOBOOK:
516 result.audiobooks = search_results
517 elif media_type == MediaType.PODCAST:
518 result.podcasts = search_results
519 return result
520
521 @api_command("music/browse")
522 async def browse(self, path: str | None = None) -> Sequence[MediaItemType | BrowseFolder]:
523 """Browse Music providers."""
524 if not path or path == "root":
525 # root level; folder per provider
526 root_items: list[BrowseFolder] = []
527 for prov in self.providers:
528 if ProviderFeature.BROWSE not in prov.supported_features:
529 continue
530 root_items.append(
531 BrowseFolder(
532 item_id="root",
533 provider=prov.domain,
534 path=f"{prov.instance_id}://",
535 uri=f"{prov.instance_id}://",
536 name=prov.name,
537 )
538 )
539 return root_items
540
541 # provider level
542 prepend_items: list[BrowseFolder] = []
543 provider_instance, sub_path = path.split("://", 1)
544 prov = self.mass.get_provider(provider_instance)
545 # handle regular provider listing, always add back folder first
546 if not prov or not sub_path:
547 prepend_items.append(
548 BrowseFolder(item_id="root", provider="library", path="root", name="..")
549 )
550 if not prov:
551 return prepend_items
552 else:
553 back_path = f"{provider_instance}://" + "/".join(sub_path.split("/")[:-1])
554 prepend_items.append(
555 BrowseFolder(
556 item_id="back",
557 provider=provider_instance,
558 path=back_path,
559 name="..",
560 )
561 )
562 # limit -1 to account for the prepended items
563 prov_items = await prov.browse(path=path)
564 return prepend_items + prov_items
565
566 @api_command("music/recently_played_items")
567 async def recently_played(
568 self,
569 limit: int = 10,
570 media_types: list[MediaType] | None = None,
571 userid: str | None = None,
572 queue_id: str | None = None,
573 fully_played_only: bool = True,
574 user_initiated_only: bool = False,
575 ) -> list[ItemMapping]:
576 """Return a list of the last played items.
577
578 :param limit: Maximum number of items to return.
579 :param media_types: Filter by media types.
580 :param userid: Filter by specific user ID.
581 :param queue_id: Filter by specific queue ID.
582 :param fully_played_only: If True, only return fully played items.
583 :param user_initiated_only: If True, only return items initiated by the user.
584 """
585 if media_types is None:
586 media_types = MediaType.ALL
587 media_types_str = "(" + ",".join(f'"{x}"' for x in media_types) + ")"
588 available_providers = ("library", *self.get_unique_providers())
589 available_providers_str = "(" + ",".join(f'"{x}"' for x in available_providers) + ")"
590 query = (
591 f"SELECT * FROM {DB_TABLE_PLAYLOG} "
592 f"WHERE media_type in {media_types_str} "
593 f"AND provider in {available_providers_str} "
594 )
595 params: dict[str, Any] = {}
596 if fully_played_only:
597 query += "AND fully_played = 1 "
598 if user_initiated_only:
599 query += "AND user_initiated = 1 "
600 if userid:
601 query += "AND userid = :userid "
602 params["userid"] = userid
603 elif user := get_current_user():
604 query += "AND userid = :userid "
605 params["userid"] = user.user_id
606 if queue_id:
607 query += "AND queue_id = :queue_id "
608 params["queue_id"] = queue_id
609 query += "ORDER BY timestamp DESC"
610 db_rows = await self.mass.music.database.get_rows_from_query(
611 query, params=params or None, limit=limit
612 )
613 result: list[ItemMapping] = []
614 available_providers = ("library", *get_global_cache_value("available_providers", []))
615
616 # Get user provider filter if set
617 user = get_current_user()
618 user_provider_filter = user.provider_filter if user and user.provider_filter else None
619
620 for db_row in db_rows:
621 provider = db_row["provider"]
622 # Apply user provider filter
623 if user_provider_filter and provider not in user_provider_filter:
624 continue
625 result.append(
626 ItemMapping.from_dict(
627 {
628 "item_id": db_row["item_id"],
629 "provider": provider,
630 "media_type": db_row["media_type"],
631 "name": db_row["name"],
632 "image": json_loads(db_row["image"]) if db_row["image"] else None,
633 "available": provider in available_providers,
634 }
635 )
636 )
637 return result
638
639 @api_command("music/recently_added_tracks")
640 async def recently_added_tracks(self, limit: int = 10) -> list[Track]:
641 """Return a list of the last added tracks."""
642 return await self.tracks.library_items(limit=limit, order_by="timestamp_added_desc")
643
644 @api_command("music/in_progress_items")
645 async def in_progress_items(
646 self, limit: int = 10, all_users: bool = False
647 ) -> list[ItemMapping]:
648 """Return a list of the Audiobooks and PodcastEpisodes that are in progress."""
649 available_providers = ("library", *self.get_unique_providers())
650 available_providers_str = "(" + ",".join(f'"{x}"' for x in available_providers) + ")"
651 query = (
652 f"SELECT * FROM {DB_TABLE_PLAYLOG} "
653 f"WHERE media_type in ('audiobook', 'podcast_episode') AND fully_played = 0 "
654 f"AND provider in {available_providers_str} "
655 "AND seconds_played > 0 "
656 )
657 if not all_users and (user := get_current_user()):
658 query += f"AND userid = '{user.user_id}' "
659
660 query += "ORDER BY timestamp DESC"
661 db_rows = await self.mass.music.database.get_rows_from_query(query, limit=limit)
662 result: list[ItemMapping] = []
663
664 # Get user provider filter if set
665 user = get_current_user()
666 user_provider_filter = user.provider_filter if user and user.provider_filter else None
667
668 for db_row in db_rows:
669 provider = db_row["provider"]
670 # Apply user provider filter
671 if user_provider_filter and provider not in user_provider_filter:
672 continue
673 result.append(
674 ItemMapping.from_dict(
675 {
676 "item_id": db_row["item_id"],
677 "provider": provider,
678 "media_type": db_row["media_type"],
679 "name": db_row["name"],
680 "image": json_loads(db_row["image"]) if db_row["image"] else None,
681 "available": provider in available_providers,
682 }
683 )
684 )
685 return result
686
687 async def get_playlog_provider_item_ids(
688 self, provider_instance_id: str, limit: int = 0, userid: str | None = None
689 ) -> list[tuple[MediaType, str]]:
690 """Return a list of MediaType and provider_item_id of items in playlog of provider."""
691 # check if there is a provider user
692 # this method is not available in the frontend, so no need to check for session users.
693 user: User | None = None
694 if userid:
695 # userid overridden by parameter
696 user = await self.mass.webserver.auth.get_user(userid)
697 elif provider_user := await self._get_user_for_provider(provider_instance_id):
698 # based on configured provider filter we can try to find a user
699 user = provider_user
700
701 query = (
702 f"SELECT * FROM {DB_TABLE_PLAYLOG} "
703 "WHERE media_type in ('audiobook', 'podcast_episode') "
704 f"AND provider in ('library','{provider_instance_id}')"
705 )
706
707 if user:
708 # NOTE: if no user was found, we will return playlog items for all users
709 query += f" AND userid = '{user.user_id}'"
710 db_rows = await self.mass.music.database.get_rows_from_query(query, limit=limit)
711
712 result: list[tuple[MediaType, str]] = []
713 for db_row in db_rows:
714 if db_row["provider"] == "library":
715 # If the provider is library, we need to make sure that the item
716 # is part of the passed provider_instance_id.
717 # A podcast_episode cannot be in the provider_mappings
718 # so these entries must be audiobooks.
719 subquery = (
720 f"SELECT * FROM {DB_TABLE_PROVIDER_MAPPINGS} "
721 f"WHERE media_type = 'audiobook' AND item_id = {db_row['item_id']} "
722 f"AND provider_instance = '{provider_instance_id}'"
723 )
724 subrow = await self.mass.music.database.get_rows_from_query(subquery)
725 if len(subrow) != 1:
726 continue
727 result.append((MediaType.AUDIOBOOK, subrow[0]["provider_item_id"]))
728 continue
729 # non library - item id is provider_item_id
730 result.append((MediaType(db_row["media_type"]), db_row["item_id"]))
731
732 return result
733
734 @api_command("music/item_by_uri")
735 async def get_item_by_uri(self, uri: str) -> MediaItemType | BrowseFolder:
736 """Fetch MediaItem by uri."""
737 media_type, provider_instance_id_or_domain, item_id = await parse_uri(uri)
738 return await self.get_item(
739 media_type=media_type,
740 item_id=item_id,
741 provider_instance_id_or_domain=provider_instance_id_or_domain,
742 )
743
744 @api_command("music/recommendations")
745 async def recommendations(self) -> list[RecommendationFolder]:
746 """Get all recommendations."""
747 recommendation_providers = [
748 x for x in self.providers if ProviderFeature.RECOMMENDATIONS in x.supported_features
749 ]
750 results_per_provider: list[list[RecommendationFolder]] = await asyncio.gather(
751 self._get_default_recommendations(),
752 *[
753 self._get_provider_recommendations(provider_instance)
754 for provider_instance in recommendation_providers
755 ],
756 )
757 # return result from all providers while keeping index
758 # so the result is sorted as each provider delivered
759 return [item for sublist in zip_longest(*results_per_provider) for item in sublist if item]
760
761 @api_command("music/item")
762 async def get_item(
763 self,
764 media_type: MediaType,
765 item_id: str,
766 provider_instance_id_or_domain: str,
767 ) -> MediaItemType | BrowseFolder:
768 """Get single music item by id and media type."""
769 if provider_instance_id_or_domain == "database":
770 # backwards compatibility - to remove when 2.0 stable is released
771 provider_instance_id_or_domain = "library"
772 if provider_instance_id_or_domain == "builtin":
773 # handle special case of 'builtin' MusicProvider which allows us to play regular url's
774 return await self.mass.get_provider("builtin").parse_item(item_id)
775 if media_type == MediaType.PODCAST_EPISODE:
776 # special case for podcast episodes
777 return await self.podcasts.episode(item_id, provider_instance_id_or_domain)
778 if media_type == MediaType.FOLDER:
779 # special case for folders
780 return BrowseFolder(
781 item_id=item_id,
782 provider=provider_instance_id_or_domain,
783 name=item_id,
784 )
785 ctrl = self.get_controller(media_type)
786 return await ctrl.get(
787 item_id=item_id,
788 provider_instance_id_or_domain=provider_instance_id_or_domain,
789 )
790
791 @api_command("music/get_library_item")
792 async def get_library_item_by_prov_id(
793 self,
794 media_type: MediaType,
795 item_id: str,
796 provider_instance_id_or_domain: str,
797 ) -> MediaItemType | None:
798 """Get single library music item by id and media type."""
799 ctrl = self.get_controller(media_type)
800 return await ctrl.get_library_item_by_prov_id(
801 item_id=item_id,
802 provider_instance_id_or_domain=provider_instance_id_or_domain,
803 )
804
805 @api_command("music/favorites/add_item")
806 async def add_item_to_favorites(
807 self,
808 item: str | MediaItemType | ItemMapping,
809 ) -> None:
810 """Add an item to the favorites."""
811 if isinstance(item, str):
812 item = await self.get_item_by_uri(item)
813 # make sure we have a full library item
814 # a favorite must always be in the library
815 full_item = await self.get_item(
816 item.media_type,
817 item.item_id,
818 item.provider,
819 )
820 if full_item.provider != "library":
821 full_item = await self.add_item_to_library(full_item)
822 # set favorite in library db
823 ctrl = self.get_controller(item.media_type)
824 await ctrl.set_favorite(
825 full_item.item_id,
826 True,
827 )
828 # forward to provider(s) if needed
829 for prov_mapping in full_item.provider_mappings:
830 provider = self.mass.get_provider(prov_mapping.provider_instance)
831 if not provider or not provider.library_favorites_edit_supported(full_item.media_type):
832 continue
833 await provider.set_favorite(prov_mapping.item_id, full_item.media_type, True)
834
835 @api_command("music/favorites/remove_item")
836 async def remove_item_from_favorites(
837 self,
838 media_type: MediaType,
839 library_item_id: str | int,
840 ) -> None:
841 """Remove (library) item from the favorites."""
842 ctrl = self.get_controller(media_type)
843 await ctrl.set_favorite(
844 library_item_id,
845 False,
846 )
847 # forward to provider(s) if needed
848 full_item = await ctrl.get_library_item(library_item_id)
849 for prov_mapping in full_item.provider_mappings:
850 provider = self.mass.get_provider(prov_mapping.provider_instance)
851 if not provider or not provider.library_favorites_edit_supported(full_item.media_type):
852 continue
853 self.mass.create_task(provider.set_favorite(prov_mapping.item_id, media_type, False))
854
855 @api_command("music/library/remove_item")
856 async def remove_item_from_library(
857 self, media_type: MediaType, library_item_id: str | int, recursive: bool = True
858 ) -> None:
859 """
860 Remove item from the library.
861
862 Destructive! Will remove the item and all dependants.
863 """
864 ctrl = self.get_controller(media_type)
865 # remove from provider(s) library
866 full_item = await ctrl.get_library_item(library_item_id)
867 for prov_mapping in full_item.provider_mappings:
868 if not prov_mapping.in_library:
869 continue
870 provider = self.mass.get_provider(prov_mapping.provider_instance)
871 if not provider or not provider.library_edit_supported(full_item.media_type):
872 continue
873 if not provider.library_sync_back_enabled(full_item.media_type):
874 continue
875 prov_mapping.in_library = False
876 self.mass.create_task(provider.library_remove(prov_mapping.item_id, media_type))
877 # remove from library
878 await ctrl.remove_item_from_library(library_item_id, recursive)
879
880 @api_command("music/library/add_item")
881 async def add_item_to_library(
882 self, item: str | MediaItemType | ItemMapping, overwrite_existing: bool = False
883 ) -> MediaItemType:
884 """Add item (uri or mediaitem) to the library."""
885 if isinstance(item, ItemMapping):
886 # handle browse results that are returned as ItemMappings
887 item = item.uri
888 # ensure we have a full item
889 if isinstance(item, str):
890 full_item = await self.get_item_by_uri(item)
891 # For builtin provider (manual URLs), use the provided item directly
892 # to preserve custom modifications (name, images, etc.)
893 # For other providers, fetch fresh to ensure data validity
894 elif item.provider == "builtin":
895 full_item = item
896 else:
897 full_item = await self.get_item(
898 item.media_type,
899 item.item_id,
900 item.provider,
901 )
902 # add to provider(s) library first
903 for prov_mapping in full_item.provider_mappings:
904 # we optimistically set in library to True to prevent items
905 # from disappearing when the provider doesn't support library edit
906 # or 2-way sync is disabled.
907 prov_mapping.in_library = True
908 provider = self.mass.get_provider(prov_mapping.provider_instance)
909 if not provider or not provider.library_edit_supported(full_item.media_type):
910 continue
911 if not provider.library_sync_back_enabled(full_item.media_type):
912 continue
913 prov_item = deepcopy(full_item) if full_item.provider == "library" else full_item
914 prov_item.provider = prov_mapping.provider_instance
915 prov_item.item_id = prov_mapping.item_id
916 self.mass.create_task(provider.library_add(prov_item))
917 # add (or overwrite) to library
918 ctrl = self.get_controller(full_item.media_type)
919 library_item = await ctrl.add_item_to_library(full_item, overwrite_existing)
920 # perform full metadata scan
921 await self.mass.metadata.update_metadata(library_item, overwrite_existing)
922 return library_item
923
924 async def refresh_items(self, items: list[MediaItemType]) -> None:
925 """Refresh MediaItems to force retrieval of full info and matches.
926
927 Creates background tasks to process the action.
928 """
929 async with TaskManager(self.mass) as tg:
930 for media_item in items:
931 tg.create_task(self.refresh_item(media_item))
932
933 @api_command("music/refresh_item")
934 async def refresh_item( # noqa: PLR0915
935 self,
936 media_item: str | MediaItemType,
937 ) -> MediaItemType | None:
938 """Try to refresh a mediaitem by requesting it's full object or search for substitutes."""
939 if isinstance(media_item, str):
940 # media item uri given
941 media_item = await self.get_item_by_uri(media_item)
942
943 media_type = media_item.media_type
944 ctrl = self.get_controller(media_type)
945
946 # genres are library-only items with no provider mappings, nothing to refresh
947 if media_type == MediaType.GENRE:
948 return media_item
949
950 library_id = media_item.item_id if media_item.provider == "library" else None
951
952 # cache in_library state before the provider fetch overwrites media_item
953 in_library_cache: dict[tuple[str, str], bool] = {}
954 for m in media_item.provider_mappings:
955 if m.in_library is not None:
956 in_library_cache[(m.provider_instance, m.item_id)] = m.in_library
957
958 available_providers = get_global_cache_value("available_providers")
959 if TYPE_CHECKING:
960 available_providers = cast("set[str]", available_providers)
961
962 # fetch the first (available) provider item
963 for prov_mapping in sorted(
964 media_item.provider_mappings, key=lambda x: x.priority, reverse=True
965 ):
966 if not self.mass.get_provider(prov_mapping.provider_instance):
967 # ignore unavailable providers
968 continue
969 with suppress(MediaNotFoundError):
970 media_item = await ctrl.get_provider_item(
971 prov_mapping.item_id,
972 prov_mapping.provider_instance,
973 force_refresh=True,
974 )
975 provider = media_item.provider
976 item_id = media_item.item_id
977 break
978 else:
979 # try to find a substitute using search
980 searchresult = await self.search(media_item.name, [media_item.media_type], 20)
981 if media_item.media_type == MediaType.ARTIST:
982 result = searchresult.artists
983 elif media_item.media_type == MediaType.ALBUM:
984 result = searchresult.albums
985 elif media_item.media_type == MediaType.TRACK:
986 result = searchresult.tracks
987 elif media_item.media_type == MediaType.PLAYLIST:
988 result = searchresult.playlists
989 elif media_item.media_type == MediaType.AUDIOBOOK:
990 result = searchresult.audiobooks
991 elif media_item.media_type == MediaType.PODCAST:
992 result = searchresult.podcasts
993 else:
994 result = searchresult.radio
995 for item in result:
996 if item == media_item or item.provider == "library":
997 continue
998 if item.available:
999 provider = item.provider
1000 item_id = item.item_id
1001 break
1002 else:
1003 # raise if we didn't find a substitute
1004 raise MediaNotFoundError(f"Could not find a substitute for {media_item.name}")
1005 # fetch full (provider) item
1006 media_item = await ctrl.get_provider_item(item_id, provider, force_refresh=True)
1007 # update library item if needed (including refresh of the metadata etc.)
1008 if library_id is None:
1009 return media_item
1010 # restore in_library state from before the refresh
1011 for prov_mapping in media_item.provider_mappings:
1012 key = (prov_mapping.provider_instance, prov_mapping.item_id)
1013 if prov_mapping.in_library is None and key in in_library_cache:
1014 prov_mapping.in_library = in_library_cache[key]
1015 library_item = await ctrl.update_item_in_library(library_id, media_item, overwrite=True)
1016 if library_item.media_type == MediaType.ALBUM:
1017 # update (local) album tracks
1018 for album_track in await self.albums.tracks(
1019 library_item.item_id, library_item.provider, True
1020 ):
1021 for prov_mapping in album_track.provider_mappings:
1022 if not (prov := self.mass.get_provider(prov_mapping.provider_instance)):
1023 continue
1024 if prov.is_streaming_provider:
1025 continue
1026 with suppress(MediaNotFoundError):
1027 prov_track = await prov.get_track(prov_mapping.item_id)
1028 await self.mass.music.tracks.update_item_in_library(
1029 album_track.item_id, prov_track
1030 )
1031 await ctrl.match_providers(library_item)
1032 await self.mass.metadata.update_metadata(library_item, force_refresh=True)
1033 return library_item
1034
1035 async def set_loudness(
1036 self,
1037 item_id: str,
1038 provider_instance_id_or_domain: str,
1039 loudness: float,
1040 album_loudness: float | None = None,
1041 media_type: MediaType = MediaType.TRACK,
1042 ) -> None:
1043 """Store (EBU-R128) Integrated Loudness Measurement for a mediaitem in db."""
1044 if not (provider := self.mass.get_provider(provider_instance_id_or_domain)):
1045 return
1046 if loudness in (None, inf, -inf):
1047 # skip invalid values
1048 return
1049 # prefer domain for streaming providers as the catalog is the same across instances
1050 prov_key = provider.domain if provider.is_streaming_provider else provider.instance_id
1051 values = {
1052 "item_id": item_id,
1053 "media_type": media_type.value,
1054 "provider": prov_key,
1055 "loudness": loudness,
1056 }
1057 if album_loudness not in (None, inf, -inf):
1058 values["loudness_album"] = album_loudness
1059 await self.database.insert_or_replace(DB_TABLE_LOUDNESS_MEASUREMENTS, values)
1060
1061 async def set_smart_fades_analysis(
1062 self,
1063 item_id: str,
1064 provider_instance_id_or_domain: str,
1065 analysis: SmartFadesAnalysis,
1066 ) -> None:
1067 """Store Smart Fades BPM analysis for a track in db."""
1068 if not (provider := self.mass.get_provider(provider_instance_id_or_domain)):
1069 return
1070 if (
1071 analysis.duration <= 0.75 * SMART_CROSSFADE_DURATION
1072 or analysis.bpm <= 0
1073 or analysis.confidence < 0
1074 ):
1075 # skip invalid values, we skip analysis that were performed on
1076 # a short amount of audio as those are often unreliable
1077 return
1078 beats_json = await asyncio.to_thread(lambda: json_dumps(analysis.beats.tolist()))
1079 downbeats_json = await asyncio.to_thread(lambda: json_dumps(analysis.downbeats.tolist()))
1080 # prefer domain for streaming providers as the catalog is the same across instances
1081 prov_key = provider.domain if provider.is_streaming_provider else provider.instance_id
1082 values = {
1083 "fragment": analysis.fragment.value,
1084 "item_id": item_id,
1085 "provider": prov_key,
1086 "bpm": analysis.bpm,
1087 "beats": beats_json,
1088 "downbeats": downbeats_json,
1089 "confidence": analysis.confidence,
1090 "duration": analysis.duration,
1091 }
1092 await self.database.insert_or_replace(DB_TABLE_SMART_FADES_ANALYSIS, values)
1093
1094 async def get_smart_fades_analysis(
1095 self,
1096 item_id: str,
1097 provider_instance_id_or_domain: str,
1098 fragment: SmartFadesAnalysisFragment,
1099 ) -> SmartFadesAnalysis | None:
1100 """Get Smart Fades BPM analysis for a track from db."""
1101 if not (provider := self.mass.get_provider(provider_instance_id_or_domain)):
1102 return None
1103 # prefer domain for streaming providers as the catalog is the same across instances
1104 prov_key = provider.domain if provider.is_streaming_provider else provider.instance_id
1105 db_row = await self.database.get_row(
1106 DB_TABLE_SMART_FADES_ANALYSIS,
1107 {
1108 "item_id": item_id,
1109 "provider": prov_key,
1110 "fragment": fragment.value,
1111 },
1112 )
1113 if db_row and db_row["bpm"] > 0:
1114 beats = await asyncio.to_thread(lambda: np.array(json_loads(db_row["beats"])))
1115 downbeats = await asyncio.to_thread(lambda: np.array(json_loads(db_row["downbeats"])))
1116 return SmartFadesAnalysis(
1117 fragment=SmartFadesAnalysisFragment(db_row["fragment"]),
1118 bpm=float(db_row["bpm"]),
1119 beats=beats,
1120 downbeats=downbeats,
1121 confidence=float(db_row["confidence"]),
1122 duration=float(db_row["duration"]),
1123 )
1124 return None
1125
1126 async def get_loudness(
1127 self,
1128 item_id: str,
1129 provider_instance_id_or_domain: str,
1130 media_type: MediaType = MediaType.TRACK,
1131 ) -> tuple[float, float | None] | None:
1132 """Get (EBU-R128) Integrated Loudness Measurement for a mediaitem in db."""
1133 if not (provider := self.mass.get_provider(provider_instance_id_or_domain)):
1134 return None
1135 # prefer domain for streaming providers as the catalog is the same across instances
1136 prov_key = provider.domain if provider.is_streaming_provider else provider.instance_id
1137 db_row = await self.database.get_row(
1138 DB_TABLE_LOUDNESS_MEASUREMENTS,
1139 {
1140 "item_id": item_id,
1141 "media_type": media_type.value,
1142 "provider": prov_key,
1143 },
1144 )
1145 if db_row and db_row["loudness"] != inf and db_row["loudness"] != -inf:
1146 loudness = round(db_row["loudness"], 2)
1147 loudness_album = db_row["loudness_album"]
1148 loudness_album = (
1149 None if loudness_album in (None, inf, -inf) else round(loudness_album, 2)
1150 )
1151 return (loudness, loudness_album)
1152
1153 return None
1154
1155 @api_command("music/mark_played")
1156 async def mark_item_played(
1157 self,
1158 media_item: MediaItemType,
1159 fully_played: bool = True,
1160 seconds_played: int | None = None,
1161 is_playing: bool = False,
1162 userid: str | None = None,
1163 queue_id: str | None = None,
1164 user_initiated: bool = True,
1165 ) -> None:
1166 """
1167 Mark item as played in playlog.
1168
1169 :param media_item: The media item to mark as played.
1170 :param fully_played: If True, mark the item as fully played.
1171 :param seconds_played: The number of seconds played.
1172 :param is_playing: If True, the item is currently playing.
1173 :param userid: The user ID to mark the item as played for (instead of the current user).
1174 :param queue_id: The queue ID where the item was played.
1175 :param user_initiated: If True, the playback was initiated by the user (e.g. enqueued).
1176 """
1177 timestamp = utc_timestamp()
1178 if (
1179 media_item.provider.startswith("builtin")
1180 and media_item.media_type != MediaType.PLAYLIST
1181 ):
1182 # we deliberately skip builtin provider items as those are often
1183 # one-off items like TTS or some sound effect etc.
1184 return
1185
1186 params = {
1187 "item_id": media_item.item_id,
1188 "provider": media_item.provider,
1189 "media_type": media_item.media_type.value,
1190 "name": media_item.name,
1191 "image": serialize_to_json(media_item.image.to_dict()) if media_item.image else None,
1192 "fully_played": fully_played,
1193 "seconds_played": seconds_played,
1194 "timestamp": timestamp,
1195 "queue_id": queue_id,
1196 "user_initiated": user_initiated,
1197 }
1198 # try to figure out the user that triggered the action
1199 user: User | None = None
1200 if userid:
1201 # userid overridden by parameter
1202 user = await self.mass.webserver.auth.get_user(userid)
1203 elif session_user := get_current_user():
1204 # this is the active session user that triggered the action
1205 user = session_user
1206 elif provider_user := await self._get_user_for_provider(media_item.provider_mappings):
1207 # based on configured provider filter we can try to find a user
1208 user = provider_user
1209
1210 # update generic playlog table (when not playing)
1211 if not is_playing:
1212 if user:
1213 user_ids = [user.user_id]
1214 else:
1215 # NOTE: if no user was found, we will alter the playlog for all users
1216 user_ids = [user.user_id for user in await self.mass.webserver.auth.list_users()]
1217 for user_id in user_ids:
1218 params["userid"] = user_id
1219 await self.database.insert(
1220 DB_TABLE_PLAYLOG,
1221 params,
1222 allow_replace=True,
1223 )
1224
1225 # forward to provider(s) to sync resume state (e.g. for audiobooks)
1226 for prov_mapping in media_item.provider_mappings:
1227 if (
1228 user
1229 and user.provider_filter
1230 and prov_mapping.provider_instance not in user.provider_filter
1231 ):
1232 continue
1233 if music_prov := self.mass.get_provider(prov_mapping.provider_instance):
1234 self.mass.create_task(
1235 music_prov.on_played(
1236 media_type=media_item.media_type,
1237 prov_item_id=prov_mapping.item_id,
1238 fully_played=fully_played,
1239 position=seconds_played,
1240 media_item=media_item,
1241 is_playing=is_playing,
1242 )
1243 )
1244
1245 # also update playcount in library table (if fully played)
1246 if not fully_played or is_playing:
1247 return
1248 if not (ctrl := self.get_controller(media_item.media_type)):
1249 # skip non media items (e.g. plugin source)
1250 return
1251 db_item = await ctrl.get_library_item_by_prov_id(media_item.item_id, media_item.provider)
1252 if db_item:
1253 await self.database.execute(
1254 f"UPDATE {ctrl.db_table} SET play_count = play_count + 1, "
1255 f"last_played = {timestamp} WHERE item_id = {db_item.item_id}"
1256 )
1257 await self.database.commit()
1258
1259 @api_command("music/mark_unplayed")
1260 async def mark_item_unplayed(
1261 self,
1262 media_item: MediaItemType,
1263 userid: str | None = None,
1264 ) -> None:
1265 """
1266 Mark item as unplayed in playlog.
1267
1268 :param media_item: The media item to mark as unplayed.
1269 :param all_users: If True, mark the item as unplayed for all users.
1270 :param userid: The user ID to mark the item as unplayed for (instead of the current user).
1271 """
1272 params = {
1273 "item_id": media_item.item_id,
1274 "provider": media_item.provider,
1275 "media_type": media_item.media_type.value,
1276 }
1277 # try to figure out the user that triggered the action
1278 user: User | None = None
1279 if userid:
1280 # userid overridden by parameter
1281 user = await self.mass.webserver.auth.get_user(userid)
1282 elif session_user := get_current_user():
1283 # this is the active session user that triggered the action
1284 user = session_user
1285 elif provider_user := await self._get_user_for_provider(media_item.provider_mappings):
1286 # based on configured provider filter we can try to find a user
1287 user = provider_user
1288
1289 if user:
1290 user_ids = [user.user_id]
1291 else:
1292 # NOTE: if no user was found, we will alter the playlog for all users
1293 user_ids = [user.user_id for user in await self.mass.webserver.auth.list_users()]
1294 for user_id in user_ids:
1295 params["userid"] = user_id
1296 await self.database.delete(DB_TABLE_PLAYLOG, params)
1297
1298 # forward to provider(s) to sync resume state (e.g. for audiobooks)
1299 for prov_mapping in media_item.provider_mappings:
1300 if (
1301 user
1302 and user.provider_filter
1303 and prov_mapping.provider_instance not in user.provider_filter
1304 ):
1305 continue
1306 if music_prov := self.mass.get_provider(prov_mapping.provider_instance):
1307 self.mass.create_task(
1308 music_prov.on_played(
1309 media_type=media_item.media_type,
1310 prov_item_id=prov_mapping.item_id,
1311 fully_played=False,
1312 position=0,
1313 media_item=media_item,
1314 )
1315 )
1316 # also update playcount in library table
1317 ctrl = self.get_controller(media_item.media_type)
1318 db_item = await ctrl.get_library_item_by_prov_id(media_item.item_id, media_item.provider)
1319 if db_item:
1320 await self.database.execute(
1321 f"UPDATE {ctrl.db_table} SET play_count = play_count - 1, "
1322 f"last_played = 0 WHERE item_id = {db_item.item_id}"
1323 )
1324 await self.database.commit()
1325
1326 @api_command("music/track_by_name")
1327 async def get_track_by_name(
1328 self,
1329 track_name: str,
1330 artist_name: str | None = None,
1331 album_name: str | None = None,
1332 track_version: str | None = None,
1333 ) -> Track | None:
1334 """Get a track by its name, optionally with artist and album."""
1335 if track_version is None:
1336 track_name, version = parse_title_and_version(track_name)
1337 search_query = f"{artist_name} - {track_name}" if artist_name else track_name
1338 search_result = await self.mass.music.search(
1339 search_query=search_query,
1340 media_types=[MediaType.TRACK],
1341 )
1342 for allow_item_mapping in (False, True):
1343 for search_track in search_result.tracks:
1344 is_track = isinstance(search_track, Track)
1345 if not allow_item_mapping and not is_track:
1346 continue
1347 if not compare_strings(track_name, search_track.name):
1348 continue
1349 if not compare_version(version, search_track.version):
1350 continue
1351 # check optional artist(s)
1352 if artist_name and is_track:
1353 for artist in search_track.artists:
1354 if compare_strings(artist_name, artist.name, False):
1355 break
1356 else:
1357 # no artist match found: abort
1358 continue
1359 # check optional album
1360 if (
1361 album_name
1362 and is_track
1363 and not compare_strings(album_name, search_track.album.name, False)
1364 ):
1365 # no album match found: abort
1366 continue
1367 # if we reach this, we found a match
1368 if not isinstance(search_track, Track):
1369 # ensure we return an actual Track object
1370 return await self.mass.music.tracks.get(
1371 item_id=search_track.item_id,
1372 provider_instance_id_or_domain=search_track.provider,
1373 )
1374 return search_track
1375
1376 # try to handle case where something is appended to the title
1377 for splitter in ("•", "-", "|", "(", "["):
1378 if splitter in track_name:
1379 return await self.get_track_by_name(
1380 track_name=track_name.split(splitter)[0].strip(),
1381 artist_name=artist_name,
1382 album_name=None,
1383 track_version=track_version,
1384 )
1385 # try to handle case where multiple artists are given as single string
1386 if artist_name and (artists := split_artists(artist_name, True)) and len(artists) > 1:
1387 for artist in artists:
1388 return await self.get_track_by_name(
1389 track_name=track_name,
1390 artist_name=artist.split(splitter)[0].strip(),
1391 album_name=None,
1392 track_version=track_version,
1393 )
1394 # allow non-exact album match as fallback
1395 if album_name:
1396 return await self.get_track_by_name(
1397 track_name=track_name,
1398 artist_name=artist_name,
1399 album_name=None,
1400 track_version=track_version,
1401 )
1402 # no match found
1403 return None
1404
1405 async def get_resume_position(
1406 self, media_item: Audiobook | PodcastEpisode, userid: str | None = None
1407 ) -> tuple[bool, int]:
1408 """
1409 Get progress (resume point) details for the given audiobook or episode.
1410
1411 This is a separate call to ensure the resume position is always up-to-date
1412 and because many providers have this info present on a dedicated endpoint.
1413
1414 Will be called right before playback starts to ensure the resume position is correct.
1415
1416 Returns a boolean with the fully_played status
1417 and an integer with the resume position in ms.
1418 """
1419 provider_fully_played = False
1420 provider_position_ms = 0
1421
1422 # Try to get position from providers
1423 for prov_mapping in media_item.provider_mappings:
1424 if not (
1425 provider := self.mass.get_provider(
1426 prov_mapping.provider_instance, provider_type=MusicProvider
1427 )
1428 ):
1429 continue
1430 with suppress(NotImplementedError):
1431 (
1432 provider_fully_played,
1433 provider_position_ms,
1434 ) = await provider.get_resume_position(prov_mapping.item_id, media_item.media_type)
1435 break # Use first provider that returns data
1436
1437 # Get MA's internal position from playlog
1438 ma_fully_played = False
1439 ma_position_ms = 0
1440 params = {
1441 "media_type": media_item.media_type.value,
1442 "item_id": media_item.item_id,
1443 "provider": media_item.provider,
1444 }
1445 if userid:
1446 params["userid"] = userid
1447 if db_entry := await self.database.get_row(DB_TABLE_PLAYLOG, params):
1448 ma_position_ms = db_entry["seconds_played"] * 1000 if db_entry["seconds_played"] else 0
1449 ma_fully_played = parse_optional_bool(db_entry["fully_played"])
1450
1451 # Return the higher position to ensure users never lose progress
1452 if ma_position_ms >= provider_position_ms:
1453 return ma_fully_played, ma_position_ms
1454 return provider_fully_played, provider_position_ms
1455
1456 def get_controller(
1457 self, media_type: MediaType
1458 ) -> (
1459 ArtistsController
1460 | AlbumsController
1461 | TracksController
1462 | RadioController
1463 | PlaylistController
1464 | AudiobooksController
1465 | PodcastsController
1466 | GenreController
1467 ):
1468 """Return controller for MediaType."""
1469 if media_type == MediaType.ARTIST:
1470 return self.artists
1471 if media_type == MediaType.ALBUM:
1472 return self.albums
1473 if media_type == MediaType.TRACK:
1474 return self.tracks
1475 if media_type == MediaType.RADIO:
1476 return self.radio
1477 if media_type == MediaType.PLAYLIST:
1478 return self.playlists
1479 if media_type == MediaType.AUDIOBOOK:
1480 return self.audiobooks
1481 if media_type == MediaType.PODCAST:
1482 return self.podcasts
1483 if media_type == MediaType.PODCAST_EPISODE:
1484 return self.podcasts
1485 if media_type == MediaType.GENRE:
1486 return self.genres
1487 raise NotImplementedError
1488
1489 def get_provider_instances(
1490 self, domain: str, return_unavailable: bool = False
1491 ) -> list[MusicProvider]:
1492 """
1493 Return all provider instances for a given domain.
1494
1495 Note that this skips user filters so may only be called from internal code.
1496 """
1497 return cast(
1498 "list[MusicProvider]",
1499 self.mass.get_provider_instances(domain, return_unavailable, ProviderType.MUSIC),
1500 )
1501
1502 def get_unique_providers(self) -> list[str]:
1503 """
1504 Return all unique MusicProvider (instance or domain) ids.
1505
1506 This will return a set of provider instance ids but will only return
1507 a single instance_id per streaming provider domain.
1508
1509 Applies user provider filters (for non-admin users).
1510 """
1511 processed_domains: set[str] = set()
1512 # Get user provider filter if set
1513 user = get_current_user()
1514 user_provider_filter = user.provider_filter if user and user.provider_filter else None
1515 result: list[str] = []
1516 for provider in self.providers:
1517 if provider.is_streaming_provider and provider.domain in processed_domains:
1518 continue
1519 if user_provider_filter and provider.instance_id not in user_provider_filter:
1520 continue
1521 result.append(provider.instance_id)
1522 processed_domains.add(provider.domain)
1523 return result
1524
1525 async def cleanup_provider(self, provider_instance: str) -> None:
1526 """Cleanup provider records from the database."""
1527 if provider_instance.startswith(("filesystem", "jellyfin", "plex", "opensubsonic")):
1528 # removal of a local provider can become messy very fast due to the relations
1529 # such as images pointing at the files etc. so we just reset the whole db
1530 # TODO: Handle this more gracefully in the future where we remove the provider
1531 # and traverse the database to also remove all related items.
1532 self.logger.warning(
1533 "Removal of local provider detected, issuing full database reset..."
1534 )
1535 await self._reset_database()
1536 return
1537 deleted_providers = self.mass.config.get_raw_core_config_value(
1538 self.domain, CONF_DELETED_PROVIDERS, []
1539 )
1540 # we add the provider to this hidden config setting just to make sure that
1541 # we can survive this over a restart to make sure that entries are cleaned up
1542 if provider_instance not in deleted_providers:
1543 deleted_providers.append(provider_instance)
1544 self.mass.config.set_raw_core_config_value(
1545 self.domain, CONF_DELETED_PROVIDERS, deleted_providers
1546 )
1547 self.mass.config.save(True)
1548
1549 # always clear cache when a provider is removed
1550 await self.mass.cache.clear()
1551
1552 # cleanup media items from db matched to deleted provider
1553 self.logger.info(
1554 "Removing provider %s from library, this can take a a while...",
1555 provider_instance,
1556 )
1557 errors = 0
1558 for ctrl in (
1559 # order is important here to recursively cleanup bottom up
1560 self.mass.music.radio,
1561 self.mass.music.playlists,
1562 self.mass.music.tracks,
1563 self.mass.music.albums,
1564 self.mass.music.artists,
1565 self.mass.music.podcasts,
1566 self.mass.music.audiobooks,
1567 # run main controllers twice to rule out relations
1568 self.mass.music.tracks,
1569 self.mass.music.albums,
1570 self.mass.music.artists,
1571 ):
1572 query = (
1573 f"SELECT item_id FROM {DB_TABLE_PROVIDER_MAPPINGS} "
1574 f"WHERE media_type = '{ctrl.media_type}' "
1575 f"AND provider_instance = '{provider_instance}'"
1576 )
1577 for db_row in await self.database.get_rows_from_query(query, limit=100000):
1578 try:
1579 await ctrl.remove_provider_mappings(db_row["item_id"], provider_instance)
1580 except Exception as err:
1581 # we dont want the whole removal process to stall on one item
1582 # so in case of an unexpected error, we log and move on.
1583 self.logger.warning(
1584 "Error while removing %s: %s",
1585 db_row["item_id"],
1586 str(err),
1587 exc_info=err if self.logger.isEnabledFor(logging.DEBUG) else None,
1588 )
1589 errors += 1
1590
1591 # remove all orphaned items (not in provider mappings table anymore)
1592 query = (
1593 f"SELECT item_id FROM {DB_TABLE_PROVIDER_MAPPINGS} "
1594 f"WHERE provider_instance = '{provider_instance}'"
1595 )
1596 if remaining_items_count := await self.database.get_count_from_query(query):
1597 errors += remaining_items_count
1598
1599 # cleanup playlog table
1600 await self.mass.music.database.delete(
1601 DB_TABLE_PLAYLOG,
1602 {
1603 "provider": provider_instance,
1604 },
1605 )
1606
1607 if errors == 0:
1608 # cleanup successful, remove from the deleted_providers setting
1609 self.logger.info("Provider %s removed from library", provider_instance)
1610 deleted_providers.remove(provider_instance)
1611 self.mass.config.set_raw_core_config_value(
1612 self.domain, CONF_DELETED_PROVIDERS, deleted_providers
1613 )
1614 else:
1615 self.logger.warning(
1616 "Provider %s was not not fully removed from library", provider_instance
1617 )
1618
1619 async def schedule_provider_sync(self, provider_instance_id: str) -> None:
1620 """Schedule Library sync for given provider."""
1621 if not (provider := self.mass.get_provider(provider_instance_id)):
1622 return
1623 self.unschedule_provider_sync(provider.instance_id)
1624 for media_type in MediaType:
1625 if not provider.library_supported(media_type):
1626 continue
1627 await self._schedule_provider_mediatype_sync(provider, media_type, True)
1628
1629 def unschedule_provider_sync(self, provider_instance_id: str) -> None:
1630 """Unschedule Library sync for given provider."""
1631 # cancel all scheduled sync tasks
1632 for media_type in MediaType:
1633 key = f"sync_{provider_instance_id}_{media_type.value}"
1634 self.mass.cancel_timer(key)
1635 # cancel any running sync tasks
1636 for sync_task in list(self.in_progress_syncs):
1637 if sync_task.provider_instance == provider_instance_id:
1638 sync_task.task.cancel()
1639
1640 def match_provider_instances(
1641 self,
1642 item: MediaItemType,
1643 ) -> bool:
1644 """Match all provider instances for the given item."""
1645 mappings_added = False
1646 for provider_mapping in list(item.provider_mappings):
1647 if provider_mapping.is_unique:
1648 # unique mapping, no need to map
1649 continue
1650 if not (provider := self.mass.get_provider(provider_mapping.provider_instance)):
1651 continue
1652 if not provider.is_streaming_provider:
1653 continue
1654 provider_instances = self.get_provider_instances(
1655 provider.domain, return_unavailable=True
1656 )
1657 if len(provider_instances) <= 1:
1658 # only a single instance, no need to map
1659 continue
1660 for prov_instance in provider_instances:
1661 if prov_instance.instance_id == provider.instance_id:
1662 continue
1663 if any(
1664 pm.provider_instance == prov_instance.instance_id
1665 for pm in item.provider_mappings
1666 ):
1667 # mapping already exists
1668 continue
1669 # create additional mapping for other provider instances of the same provider
1670 item.provider_mappings.add(
1671 ProviderMapping(
1672 item_id=provider_mapping.item_id,
1673 provider_domain=provider.domain,
1674 provider_instance=prov_instance.instance_id,
1675 available=provider_mapping.available,
1676 is_unique=provider_mapping.is_unique,
1677 audio_format=provider_mapping.audio_format,
1678 url=provider_mapping.url,
1679 details=provider_mapping.details,
1680 in_library=None,
1681 )
1682 )
1683 mappings_added = True
1684 return mappings_added
1685
1686 @api_command("music/add_provider_mapping")
1687 async def add_provider_mapping(
1688 self, media_type: MediaType, db_id: str, mapping: ProviderMapping
1689 ) -> None:
1690 """Add provider mapping to the given library item."""
1691 ctrl = self.get_controller(media_type)
1692 await ctrl.add_provider_mappings(db_id, [mapping])
1693
1694 @api_command("music/remove_provider_mapping")
1695 async def remove_provider_mapping(
1696 self, media_type: MediaType, db_id: str, mapping: ProviderMapping
1697 ) -> None:
1698 """Remove provider mapping from the given library item."""
1699 ctrl = self.get_controller(media_type)
1700 await ctrl.remove_provider_mapping(db_id, mapping.provider_instance, mapping.item_id)
1701
1702 @api_command("music/match_providers")
1703 async def match_providers(self, media_type: MediaType, db_id: str) -> None:
1704 """Search for mappings on all providers for the given library item."""
1705 ctrl = self.get_controller(media_type)
1706 db_item = await ctrl.get_library_item(db_id)
1707 await ctrl.match_providers(db_item)
1708
1709 async def update_provider_mapping(
1710 self,
1711 media_type: MediaType,
1712 db_id: str | int,
1713 provider_instance_id: str,
1714 provider_item_id: str,
1715 *,
1716 available: bool | Any = UNSET,
1717 in_library: bool | Any = UNSET,
1718 is_unique: bool | None | Any = UNSET,
1719 url: str | None | Any = UNSET,
1720 details: str | None | Any = UNSET,
1721 audio_format: AudioFormat | Any = UNSET,
1722 ) -> None:
1723 """Update an existing provider mapping for a library item."""
1724 ctrl = self.get_controller(media_type)
1725 await ctrl.update_provider_mapping(
1726 item_id=db_id,
1727 provider_instance_id=provider_instance_id,
1728 provider_item_id=provider_item_id,
1729 available=available,
1730 in_library=in_library,
1731 is_unique=is_unique,
1732 url=url,
1733 details=details,
1734 audio_format=audio_format,
1735 )
1736
1737 async def _get_default_recommendations(self) -> list[RecommendationFolder]:
1738 """Return default recommendations."""
1739 return [
1740 RecommendationFolder(
1741 item_id="in_progress",
1742 provider="library",
1743 name="In progress",
1744 translation_key="in_progress_items",
1745 icon="mdi-motion-play",
1746 items=await self.in_progress_items(limit=10),
1747 ),
1748 RecommendationFolder(
1749 item_id="recently_played",
1750 provider="library",
1751 name="Recently played",
1752 translation_key="recently_played",
1753 icon="mdi-motion-play",
1754 items=await self.recently_played(limit=10, user_initiated_only=True),
1755 ),
1756 RecommendationFolder(
1757 item_id="recently_added_tracks",
1758 provider="library",
1759 name="Recently added tracks",
1760 translation_key="recently_added_tracks",
1761 icon="music-note-plus",
1762 items=await self.tracks.library_items(limit=10, order_by="timestamp_added_desc"),
1763 ),
1764 RecommendationFolder(
1765 item_id="recently_added_albums",
1766 provider="library",
1767 name="Recently added albums",
1768 translation_key="recently_added_albums",
1769 icon="music-note-plus",
1770 items=await self.albums.library_items(limit=10, order_by="timestamp_added_desc"),
1771 ),
1772 RecommendationFolder(
1773 item_id="random_artists",
1774 provider="library",
1775 name="Random artists",
1776 translation_key="random_artists",
1777 icon="mdi-account-music",
1778 items=await self.artists.library_items(limit=10, order_by="random_play_count"),
1779 ),
1780 RecommendationFolder(
1781 item_id="random_albums",
1782 provider="library",
1783 name="Random albums",
1784 translation_key="random_albums",
1785 icon="mdi-album",
1786 items=await self.albums.library_items(limit=10, order_by="random_play_count"),
1787 ),
1788 RecommendationFolder(
1789 item_id="recent_favorite_tracks",
1790 provider="library",
1791 name="Recently favorited tracks",
1792 translation_key="recent_favorite_tracks",
1793 icon="mdi-file-music",
1794 items=await self.tracks.library_items(
1795 favorite=True, limit=10, order_by="timestamp_modified_desc"
1796 ),
1797 ),
1798 RecommendationFolder(
1799 item_id="favorite_playlists",
1800 provider="library",
1801 name="Favorite playlists",
1802 translation_key="favorite_playlists",
1803 icon="mdi-playlist-music",
1804 items=await self.playlists.library_items(
1805 favorite=True, limit=10, order_by="random"
1806 ),
1807 ),
1808 RecommendationFolder(
1809 item_id="favorite_radio",
1810 provider="library",
1811 name="Favorite Radio stations",
1812 translation_key="favorite_radio_stations",
1813 icon="mdi-access-point",
1814 items=await self.radio.library_items(
1815 favorite=True, limit=10, order_by="play_count_desc"
1816 ),
1817 ),
1818 ]
1819
1820 async def _get_provider_recommendations(
1821 self, provider: MusicProvider
1822 ) -> list[RecommendationFolder]:
1823 """Return recommendations from a provider."""
1824 try:
1825 return await provider.recommendations()
1826 except Exception as err:
1827 self.logger.warning(
1828 "Error while fetching recommendations from %s: %s",
1829 provider.name,
1830 str(err),
1831 exc_info=err if self.logger.isEnabledFor(logging.DEBUG) else None,
1832 )
1833 return []
1834
1835 def _start_provider_sync(self, provider: MusicProvider, media_type: MediaType) -> None:
1836 """Start sync task on provider and track progress."""
1837 # check if we're not already running a sync task for this provider/mediatype
1838 for sync_task in list(self.in_progress_syncs):
1839 if sync_task.provider_instance != provider.instance_id:
1840 continue
1841 if sync_task.task.done():
1842 continue
1843 if media_type in sync_task.media_types:
1844 self.logger.debug(
1845 "Skip sync task for %s/%ss because another task is already in progress",
1846 provider.name,
1847 media_type.value,
1848 )
1849 return
1850
1851 async def run_sync() -> None:
1852 # Wrap the provider sync into a lock to prevent
1853 # race conditions when multiple providers are syncing at the same time.
1854 async with self._sync_lock:
1855 await provider.sync_library(media_type)
1856
1857 # we keep track of running sync tasks
1858 task = self.mass.create_task(run_sync())
1859 sync_spec = SyncTask(
1860 provider_domain=provider.domain,
1861 provider_instance=provider.instance_id,
1862 media_types=(media_type,),
1863 task=task,
1864 )
1865 self.in_progress_syncs.append(sync_spec)
1866
1867 self.mass.signal_event(EventType.SYNC_TASKS_UPDATED, data=self.in_progress_syncs)
1868
1869 def on_sync_task_done(task: asyncio.Task) -> None:
1870 self.in_progress_syncs.remove(sync_spec)
1871 if task.cancelled():
1872 return
1873 if task_err := task.exception():
1874 self.logger.warning(
1875 "Sync task for %s/%ss completed with errors",
1876 provider.name,
1877 media_type.value,
1878 exc_info=task_err if self.logger.isEnabledFor(10) else None,
1879 )
1880 else:
1881 self.logger.info("Sync task for %s/%ss completed", provider.name, media_type.value)
1882 self.mass.signal_event(EventType.SYNC_TASKS_UPDATED, data=self.in_progress_syncs)
1883 self.mass.create_task(
1884 self.mass.cache.set(
1885 key=media_type.value,
1886 data=self.mass.loop.time(),
1887 provider=provider.instance_id,
1888 category=CACHE_CATEGORY_LAST_SYNC,
1889 )
1890 )
1891 # schedule db cleanup after sync
1892 if not self.in_progress_syncs:
1893 self.mass.create_task(self._cleanup_database())
1894 # reschedule next execution
1895 self.mass.create_task(self._schedule_provider_mediatype_sync(provider, media_type))
1896
1897 task.add_done_callback(on_sync_task_done)
1898 return
1899
1900 def _sort_search_result(
1901 self,
1902 search_query: str,
1903 items: Sequence[MediaItemType | ItemMapping],
1904 ) -> UniqueList[MediaItemType | ItemMapping]:
1905 """Sort search results on priority/preference."""
1906 scored_items: list[tuple[int, MediaItemType | ItemMapping]] = []
1907 # search results are already sorted by (streaming) providers on relevance
1908 # but we prefer exact name matches and library items so we simply put those
1909 # on top of the list.
1910 safe_title_str = create_safe_string(search_query)
1911 if " - " in search_query:
1912 artist, title_alt = search_query.split(" - ", 1)
1913 safe_title_alt = create_safe_string(title_alt)
1914 safe_artist_str = create_safe_string(artist)
1915 else:
1916 safe_artist_str = None
1917 safe_title_alt = None
1918 for item in items:
1919 score = 0
1920 if create_safe_string(item.name) not in (safe_title_str, safe_title_alt):
1921 # literal name match is mandatory to get a score at all
1922 continue
1923 # bonus point if artist provided and exact match
1924 if safe_artist_str:
1925 artist: Artist | ItemMapping
1926 for artist in getattr(item, "artists", []):
1927 if create_safe_string(artist.name) == safe_artist_str:
1928 score += 1
1929 # bonus point for library items
1930 if item.provider == "library":
1931 score += 1
1932 scored_items.append((score, item))
1933 scored_items.sort(key=lambda x: x[0], reverse=True)
1934 # combine it all with uniquelist, so this will deduplicated by default
1935 # note that streaming provider results are already (most likely) sorted on relevance
1936 # so we add all remaining items in their original order. We just prioritize
1937 # exact name matches and library items.
1938 return UniqueList([*[x[1] for x in scored_items], *items])
1939
1940 async def _schedule_provider_mediatype_sync(
1941 self, provider: MusicProvider, media_type: MediaType, is_initial: bool = False
1942 ) -> None:
1943 """Schedule Library sync for given provider and media type."""
1944 job_key = f"sync_{provider.instance_id}_{media_type.value}"
1945 # cancel any existing timers
1946 self.mass.cancel_timer(job_key)
1947 # handle mediatype specific sync config
1948 conf_key = f"library_sync_{media_type}s"
1949 sync_conf = await self.mass.config.get_provider_config_value(provider.instance_id, conf_key)
1950 if not sync_conf:
1951 return
1952 conf_key = f"provider_sync_interval_{media_type.value}s"
1953 sync_interval = await self.mass.config.get_provider_config_value(
1954 provider.instance_id, conf_key, return_type=int
1955 )
1956 if sync_interval <= 0:
1957 # sync disabled for this media type
1958 return
1959 sync_interval = sync_interval * 60 # config interval is in minutes - convert to seconds
1960
1961 if is_initial:
1962 # schedule the first sync run
1963 initial_interval = 10
1964 if last_sync := await self.mass.cache.get(
1965 key=media_type.value,
1966 provider=provider.instance_id,
1967 category=CACHE_CATEGORY_LAST_SYNC,
1968 ):
1969 initial_interval += max(0, sync_interval - (self.mass.loop.time() - last_sync))
1970 sync_interval = initial_interval
1971
1972 self.mass.call_later(
1973 sync_interval,
1974 self._start_provider_sync,
1975 provider,
1976 media_type,
1977 task_id=job_key,
1978 )
1979
1980 async def _cleanup_database(self) -> None:
1981 """Perform database cleanup/maintenance."""
1982 self.logger.debug("Performing database cleanup...")
1983 # Remove playlog entries older than 90 days
1984 await self.database.delete_where_query(
1985 DB_TABLE_PLAYLOG, f"timestamp < strftime('%s','now') - {3600 * 24 * 90}"
1986 )
1987 # db tables cleanup
1988 for ctrl in (
1989 self.albums,
1990 self.artists,
1991 self.tracks,
1992 self.playlists,
1993 self.radio,
1994 ):
1995 # Provider mappings where the db item is removed
1996 query = (
1997 f"item_id not in (SELECT item_id from {ctrl.db_table}) "
1998 f"AND media_type = '{ctrl.media_type}'"
1999 )
2000 await self.database.delete_where_query(DB_TABLE_PROVIDER_MAPPINGS, query)
2001 # Orphaned db items
2002 query = (
2003 f"item_id not in (SELECT item_id from {DB_TABLE_PROVIDER_MAPPINGS} "
2004 f"WHERE media_type = '{ctrl.media_type}')"
2005 )
2006 await self.database.delete_where_query(ctrl.db_table, query)
2007 # Cleanup removed db items from the playlog
2008 where_clause = (
2009 f"media_type = '{ctrl.media_type}' AND provider = 'library' "
2010 f"AND item_id not in (select item_id from {ctrl.db_table})"
2011 )
2012 await self.mass.music.database.delete_where_query(DB_TABLE_PLAYLOG, where_clause)
2013 self.logger.debug("Database cleanup done")
2014
2015 async def _setup_database(self) -> None:
2016 """Initialize database."""
2017 db_path = os.path.join(self.mass.storage_path, "library.db")
2018 self._database = DatabaseConnection(db_path)
2019 await self._database.setup()
2020
2021 # always create db tables if they don't exist to prevent errors trying to access them later
2022 await self.__create_database_tables()
2023 try:
2024 if db_row := await self._database.get_row(DB_TABLE_SETTINGS, {"key": "version"}):
2025 prev_version = int(db_row["value"])
2026 else:
2027 prev_version = 0
2028 except (KeyError, ValueError):
2029 prev_version = 0
2030
2031 if prev_version not in (0, DB_SCHEMA_VERSION):
2032 # db version mismatch - we need to do a migration
2033 # make a backup of db file
2034 db_path_backup = db_path + ".backup"
2035 await asyncio.to_thread(shutil.copyfile, db_path, db_path_backup)
2036
2037 # handle db migration from previous schema(s) to this one
2038 try:
2039 await self.__migrate_database(prev_version)
2040 except Exception as err:
2041 # if the migration fails completely we reset the db
2042 # so the user at least can have a working situation back
2043 # a backup file is made with the previous version
2044 self.logger.error(
2045 "Database migration failed - starting with a fresh library database, "
2046 "a full rescan will be performed, this can take a while!",
2047 )
2048 if not isinstance(err, MusicAssistantError):
2049 self.logger.exception(err)
2050
2051 await self._database.close()
2052 await asyncio.to_thread(os.remove, db_path)
2053 self._database = DatabaseConnection(db_path)
2054 await self._database.setup()
2055 await self.mass.cache.clear()
2056 await self.__create_database_tables()
2057
2058 # store current schema version
2059 await self._database.insert_or_replace(
2060 DB_TABLE_SETTINGS,
2061 {"key": "version", "value": str(DB_SCHEMA_VERSION), "type": "str"},
2062 )
2063 # create indexes and triggers if needed
2064 await self.__create_database_indexes()
2065 await self.__create_database_triggers()
2066 # compact db
2067 self.logger.debug("Compacting database...")
2068 try:
2069 await self._database.vacuum()
2070 except Exception as err:
2071 self.logger.warning("Database vacuum failed: %s", str(err))
2072 else:
2073 self.logger.debug("Compacting database done")
2074
2075 async def __migrate_database(self, prev_version: int) -> None: # noqa: PLR0915
2076 """Perform a database migration."""
2077 self.logger.info(
2078 "Migrating database from version %s to %s", prev_version, DB_SCHEMA_VERSION
2079 )
2080
2081 if prev_version < 15:
2082 raise MusicAssistantError("Database schema version too old to migrate")
2083
2084 if prev_version <= 15:
2085 # add search_name and search_sort_name columns to all tables
2086 # and populate them with the name and sort_name values
2087 # this is to allow for local/case independent searches
2088 for table in (
2089 DB_TABLE_TRACKS,
2090 DB_TABLE_ALBUMS,
2091 DB_TABLE_ARTISTS,
2092 DB_TABLE_RADIOS,
2093 DB_TABLE_PLAYLISTS,
2094 DB_TABLE_AUDIOBOOKS,
2095 DB_TABLE_PODCASTS,
2096 ):
2097 try:
2098 await self._database.execute(
2099 f"ALTER TABLE {table} ADD COLUMN search_name TEXT DEFAULT '' NOT NULL"
2100 )
2101 await self._database.execute(
2102 f"ALTER TABLE {table} ADD COLUMN search_sort_name TEXT DEFAULT '' NOT NULL"
2103 )
2104 except Exception as err:
2105 if "duplicate column" not in str(err):
2106 raise
2107 # migrate all existing values
2108 async for db_row in self._database.iter_items(table):
2109 await self._database.update(
2110 table,
2111 {"item_id": db_row["item_id"]},
2112 {
2113 "search_name": create_safe_string(db_row["name"], True, True),
2114 "search_sort_name": create_safe_string(db_row["sort_name"], True, True),
2115 },
2116 )
2117
2118 if prev_version <= 16:
2119 # cleanup invalid release_date field in metadata
2120 for table in (
2121 DB_TABLE_TRACKS,
2122 DB_TABLE_ALBUMS,
2123 DB_TABLE_AUDIOBOOKS,
2124 DB_TABLE_PODCASTS,
2125 ):
2126 async for db_row in self._database.iter_items(table):
2127 if '"release_date":null' in db_row["metadata"]:
2128 continue
2129 metadata = json_loads(db_row["metadata"])
2130 try:
2131 datetime.fromisoformat(metadata["release_date"])
2132 except (KeyError, ValueError):
2133 # this is not a valid date, so we set it to None
2134 metadata["release_date"] = None
2135 await self._database.update(
2136 table,
2137 {"item_id": db_row["item_id"]},
2138 {
2139 "metadata": serialize_to_json(metadata),
2140 },
2141 )
2142
2143 if prev_version <= 17:
2144 # migrate triggers to auto update timestamps
2145 # it had an error in the previous version where it was not created
2146 for db_table in (
2147 "artists",
2148 "albums",
2149 "tracks",
2150 "playlists",
2151 "radios",
2152 "audiobooks",
2153 "podcasts",
2154 ):
2155 await self._database.execute(f"DROP TRIGGER IF EXISTS update_{db_table}_timestamp;")
2156
2157 if prev_version <= 18:
2158 # add in_library column to provider_mappings table
2159 await self._database.execute(
2160 f"ALTER TABLE {DB_TABLE_PROVIDER_MAPPINGS} ADD COLUMN in_library "
2161 "BOOLEAN NOT NULL DEFAULT 0;"
2162 )
2163 # migrate existing entries in provider_mappings which are filesystem
2164 await self._database.execute(
2165 f"UPDATE {DB_TABLE_PROVIDER_MAPPINGS} SET in_library = 1 "
2166 "WHERE provider_domain in ('filesystem_local', 'filesystem_smb');"
2167 )
2168
2169 if prev_version <= 20:
2170 # drop column cache_checksum from playlists table
2171 # this is no longer used and is a leftover from previous designs
2172 try:
2173 await self._database.execute(
2174 f"ALTER TABLE {DB_TABLE_PLAYLISTS} DROP COLUMN cache_checksum"
2175 )
2176 except Exception as err:
2177 if "no such column" not in str(err):
2178 raise
2179
2180 if prev_version <= 21:
2181 # drop table for smart fades analysis - it will be recreated with needed columns
2182 await self._database.execute(f"DROP TABLE IF EXISTS {DB_TABLE_SMART_FADES_ANALYSIS}")
2183 await self.__create_database_tables()
2184
2185 if prev_version <= 22:
2186 # add userid column to playlog table
2187 try:
2188 await self._database.execute(
2189 f"ALTER TABLE {DB_TABLE_PLAYLOG} ADD COLUMN userid TEXT"
2190 )
2191 except Exception as err:
2192 if "duplicate column" not in str(err):
2193 raise
2194 # Note: SQLite doesn't support modifying constraints directly
2195 # The UNIQUE constraint will be updated when the table is recreated
2196 # For now, we'll keep the old constraint and add a new one via unique index
2197 try:
2198 await self._database.execute(f"DROP INDEX IF EXISTS {DB_TABLE_PLAYLOG}_unique_idx")
2199 await self._database.execute(
2200 f"CREATE UNIQUE INDEX {DB_TABLE_PLAYLOG}_unique_idx "
2201 f"ON {DB_TABLE_PLAYLOG}(item_id,provider,media_type,userid)"
2202 )
2203 except Exception as err:
2204 # If we can't create the index due to duplicate entries, log and continue
2205 self.logger.warning("Could not create unique index on playlog: %s", err)
2206
2207 if prev_version <= 23:
2208 # add is_unique column to provider_mappings table
2209 try:
2210 await self._database.execute(
2211 f"ALTER TABLE {DB_TABLE_PROVIDER_MAPPINGS} ADD COLUMN is_unique BOOLEAN"
2212 )
2213 except Exception as err:
2214 if "duplicate column" not in str(err):
2215 raise
2216
2217 if prev_version <= 24:
2218 # add queue_id and user_initiated columns to playlog table
2219 try:
2220 await self._database.execute(
2221 f"ALTER TABLE {DB_TABLE_PLAYLOG} ADD COLUMN queue_id TEXT"
2222 )
2223 except Exception as err:
2224 if "duplicate column" not in str(err):
2225 raise
2226 try:
2227 await self._database.execute(
2228 f"ALTER TABLE {DB_TABLE_PLAYLOG} "
2229 "ADD COLUMN user_initiated BOOLEAN NOT NULL DEFAULT 1"
2230 )
2231 except Exception as err:
2232 if "duplicate column" not in str(err):
2233 raise
2234
2235 if prev_version <= 26:
2236 # force in_library=True for provider mappings from non-streaming providers
2237 # streaming providers will be automatically added to library when synced
2238 await self._database.execute(
2239 f"UPDATE {DB_TABLE_PROVIDER_MAPPINGS} SET in_library = 1 "
2240 "WHERE provider_domain NOT IN "
2241 "('spotify', 'deezer', 'tidal', 'qobuz', 'apple_music', 'ytmusic');"
2242 )
2243 # also set in_library=True for all radio items
2244 await self._database.execute(
2245 f"UPDATE {DB_TABLE_PROVIDER_MAPPINGS} SET in_library = 1 "
2246 "WHERE media_type = 'radio';"
2247 )
2248 # remove invalid playlist provider mappings for playlists which are not in library
2249 await self._database.execute(
2250 f"DELETE FROM {DB_TABLE_PROVIDER_MAPPINGS} "
2251 "WHERE media_type = 'playlist' AND in_library = 0;"
2252 )
2253
2254 if prev_version <= 27:
2255 # set streaming provider mappings to in_library=True, but only for items
2256 # that do not already have any mapping with in_library=True
2257 # (to avoid overwriting explicit values in multi-instance setups)
2258 await self._database.execute(
2259 f"UPDATE {DB_TABLE_PROVIDER_MAPPINGS} SET in_library = 1 "
2260 "WHERE provider_domain NOT IN "
2261 "('filesystem_local', 'builtin', 'test', 'jellyfin', 'emby', "
2262 "'plex', 'opensubsonic', 'audiobookshelf', 'gpodder', 'podcastfeed') "
2263 "AND NOT EXISTS ("
2264 f"SELECT 1 FROM {DB_TABLE_PROVIDER_MAPPINGS} AS pm2 "
2265 f"WHERE pm2.media_type = {DB_TABLE_PROVIDER_MAPPINGS}.media_type "
2266 f"AND pm2.item_id = {DB_TABLE_PROVIDER_MAPPINGS}.item_id "
2267 "AND pm2.in_library = 1)"
2268 )
2269
2270 if prev_version <= 28:
2271 # create genre/alias tables
2272 await self.__create_database_tables()
2273
2274 # Use raw aiosqlite connection for bulk operations.
2275 db = self._database._db
2276
2277 empty_metadata = serialize_to_json({})
2278 empty_external_ids = serialize_to_json(set())
2279
2280 def _normalize_name(raw_name: str) -> tuple[str, str, str, str]:
2281 name = raw_name.strip()
2282 sort_name = name
2283 search_name = create_safe_string(name, True, True)
2284 search_sort_name = create_safe_string(sort_name or "", True, True)
2285 return name, sort_name, search_name, search_sort_name
2286
2287 genre_cache: dict[str, int] = {}
2288
2289 genre_insert_sql = (
2290 f"INSERT OR IGNORE INTO {DB_TABLE_GENRES}"
2291 "(name, sort_name, translation_key, description, favorite, "
2292 "metadata, external_ids, genre_aliases, play_count, last_played, "
2293 "search_name, search_sort_name) "
2294 "VALUES (?, ?, ?, NULL, 0, ?, ?, ?, 0, 0, ?, ?)"
2295 )
2296 genre_select_sql = f"SELECT item_id FROM {DB_TABLE_GENRES} WHERE search_name = ?"
2297
2298 async def _get_or_create_genre(
2299 raw_name: str,
2300 aliases: list[str] | None = None,
2301 translation_key: str | None = None,
2302 ) -> int:
2303 name, sort_name, search_name, search_sort_name = _normalize_name(raw_name)
2304 if not search_name:
2305 return 0
2306 if search_name in genre_cache:
2307 return genre_cache[search_name]
2308 aliases_json = serialize_to_json(aliases or [name])
2309 icon_metadata = GenreController._get_genre_icon_metadata(translation_key)
2310 metadata_json = (
2311 serialize_to_json(icon_metadata.to_dict()) if icon_metadata else empty_metadata
2312 )
2313 row_id = await db.execute_insert(
2314 genre_insert_sql,
2315 (
2316 name,
2317 sort_name,
2318 translation_key,
2319 metadata_json,
2320 empty_external_ids,
2321 aliases_json,
2322 search_name,
2323 search_sort_name,
2324 ),
2325 )
2326 if row_id and row_id[0]:
2327 genre_cache[search_name] = row_id[0]
2328 return row_id[0]
2329 async with db.execute(genre_select_sql, (search_name,)) as cursor:
2330 row = await cursor.fetchone()
2331 if row:
2332 genre_cache[search_name] = row[0]
2333 return row[0]
2334 return 0
2335
2336 # Phase 1: Seed DEFAULT_GENRE_MAPPING — create genres with aliases.
2337 # Build n:n lookup: normalized alias name -> list of genre_ids.
2338 # One alias can belong to multiple genres (e.g. "funk" is both
2339 # a standalone genre and an alias of Soul/R&B).
2340 alias_to_genre: dict[str, list[int]] = {}
2341 for entry in DEFAULT_GENRE_MAPPING:
2342 genre_name = entry.get("genre")
2343 if not genre_name:
2344 continue
2345 all_aliases = [genre_name, *entry.get("aliases", [])]
2346 genre_id = await _get_or_create_genre(
2347 genre_name,
2348 aliases=all_aliases,
2349 translation_key=entry.get("translation_key"),
2350 )
2351 if not genre_id:
2352 continue
2353 for alias in all_aliases:
2354 norm = create_safe_string(alias.strip(), True, True)
2355 if norm:
2356 alias_to_genre.setdefault(norm, [])
2357 if genre_id not in alias_to_genre[norm]:
2358 alias_to_genre[norm].append(genre_id)
2359 await db.commit()
2360
2361 # Phase 2: Discover unique genre names from all media items,
2362 # create genres for unknown names, then bulk-insert mappings.
2363 media_tables = (
2364 (DB_TABLE_TRACKS, MediaType.TRACK),
2365 (DB_TABLE_ALBUMS, MediaType.ALBUM),
2366 (DB_TABLE_ARTISTS, MediaType.ARTIST),
2367 (DB_TABLE_PLAYLISTS, MediaType.PLAYLIST),
2368 (DB_TABLE_RADIOS, MediaType.RADIO),
2369 (DB_TABLE_AUDIOBOOKS, MediaType.AUDIOBOOK),
2370 (DB_TABLE_PODCASTS, MediaType.PODCAST),
2371 )
2372
2373 # 2a: Extract all unique raw genre names from metadata
2374 union_parts = [
2375 f"SELECT DISTINCT TRIM(g.value) AS raw_name "
2376 f"FROM {table}, json_each(json_extract({table}.metadata, '$.genres')) AS g "
2377 f"WHERE json_extract({table}.metadata, '$.genres') IS NOT NULL "
2378 f"AND json_extract({table}.metadata, '$.genres') != '[]'"
2379 for table, _ in media_tables
2380 ]
2381 unique_names_sql = " UNION ".join(union_parts)
2382 self.logger.info("Genre migration - unique names query:\n%s", unique_names_sql)
2383 async with db.execute(unique_names_sql) as cursor:
2384 unique_raw_names = [row[0] for row in await cursor.fetchall() if row[0]]
2385 self.logger.info(
2386 "Genre migration - discovered %d unique genre names", len(unique_raw_names)
2387 )
2388
2389 # 2b: Ensure genres exist for all discovered names.
2390 # Names already covered by Phase 1 aliases just reuse those genre(s).
2391 # New names get their own genre. One alias can map to multiple genres (n:n).
2392 raw_name_to_genres: dict[str, list[int]] = {}
2393 for raw_name in unique_raw_names:
2394 norm = create_safe_string(raw_name.strip(), True, True)
2395 if not norm:
2396 continue
2397 if norm in alias_to_genre:
2398 raw_name_to_genres[raw_name] = list(alias_to_genre[norm])
2399 self.logger.debug(
2400 "Genre migration - resolved %r -> genre_ids %s (alias match)",
2401 raw_name,
2402 alias_to_genre[norm],
2403 )
2404 else:
2405 genre_id = await _get_or_create_genre(raw_name)
2406 if genre_id:
2407 raw_name_to_genres[raw_name] = [genre_id]
2408 alias_to_genre[norm] = [genre_id]
2409 self.logger.debug(
2410 "Genre migration - resolved %r -> genre_id %d (new genre)",
2411 raw_name,
2412 genre_id,
2413 )
2414 await db.commit()
2415 self.logger.info(
2416 "Genre migration - resolved %d unique genre names", len(raw_name_to_genres)
2417 )
2418
2419 # 2c: Add discovered raw names as aliases to their resolved genres
2420 # so that frontend searches by raw name find the parent genre.
2421 genre_new_aliases: dict[int, list[str]] = {}
2422 for raw_name, gids in raw_name_to_genres.items():
2423 for gid in gids:
2424 genre_new_aliases.setdefault(gid, []).append(raw_name)
2425 for gid, new_aliases in genre_new_aliases.items():
2426 async with db.execute(
2427 f"SELECT genre_aliases FROM {DB_TABLE_GENRES} WHERE item_id = :gid",
2428 {"gid": gid},
2429 ) as cursor:
2430 row = await cursor.fetchone()
2431 if not row:
2432 continue
2433 existing = json_loads(row[0]) if row[0] else []
2434 existing_norms = {create_safe_string(a, True, True) for a in existing}
2435 to_add = [
2436 a
2437 for a in new_aliases
2438 if create_safe_string(a, True, True) not in existing_norms
2439 ]
2440 if to_add:
2441 merged = existing + to_add
2442 await db.execute(
2443 f"UPDATE {DB_TABLE_GENRES} SET genre_aliases = :aliases "
2444 "WHERE item_id = :gid",
2445 {"aliases": json_dumps(merged), "gid": gid},
2446 )
2447 await db.commit()
2448
2449 # 2d: Build CTE with (raw_name, genre_id) and do one INSERT per
2450 # media type using json_each to map media items directly to genres.
2451 # One raw_name can map to multiple genre_ids (n:n).
2452 if raw_name_to_genres:
2453 cte_values = ", ".join(
2454 f"(LOWER('{name.replace(chr(39), chr(39) + chr(39))}'), {gid})"
2455 for name, gids in raw_name_to_genres.items()
2456 for gid in gids
2457 )
2458 cte = f"WITH genre_lookup(raw_name, genre_id) AS (VALUES {cte_values})"
2459
2460 for table, media_type in media_tables:
2461 full_query = (
2462 f"{cte} INSERT OR REPLACE INTO {DB_TABLE_GENRE_MEDIA_ITEM_MAPPING}"
2463 f"(genre_id, media_id, media_type, alias) "
2464 f"SELECT gl.genre_id, {table}.item_id, "
2465 f"'{media_type.value}', TRIM(g.value) "
2466 f"FROM {table}, "
2467 f"json_each(json_extract({table}.metadata, '$.genres')) AS g "
2468 f"JOIN genre_lookup gl ON gl.raw_name = LOWER(TRIM(g.value)) "
2469 f"WHERE json_extract({table}.metadata, '$.genres') IS NOT NULL "
2470 f"AND json_extract({table}.metadata, '$.genres') != '[]'"
2471 )
2472 self.logger.info(
2473 "Genre migration - %s query:\n%s", media_type.value, full_query
2474 )
2475 await db.execute(full_query)
2476 await db.commit()
2477
2478 # save changes
2479 await self._database.commit()
2480
2481 # always clear the cache after a db migration
2482 await self.mass.cache.clear()
2483
2484 async def _reset_database(self) -> None:
2485 """Reset the database."""
2486 await self.close()
2487 db_path = os.path.join(self.mass.storage_path, "library.db")
2488 await asyncio.to_thread(os.remove, db_path)
2489 await self._setup_database()
2490 # initiate full sync
2491 await self.start_sync()
2492
2493 async def __create_database_tables(self) -> None:
2494 """Create database tables."""
2495 await self.database.execute(
2496 f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_SETTINGS}(
2497 [key] TEXT PRIMARY KEY,
2498 [value] TEXT,
2499 [type] TEXT
2500 );"""
2501 )
2502 await self.database.execute(
2503 f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_PLAYLOG}(
2504 [id] INTEGER PRIMARY KEY AUTOINCREMENT,
2505 [item_id] TEXT NOT NULL,
2506 [provider] TEXT NOT NULL,
2507 [media_type] TEXT NOT NULL,
2508 [name] TEXT NOT NULL,
2509 [image] json,
2510 [timestamp] INTEGER DEFAULT 0,
2511 [fully_played] BOOLEAN,
2512 [seconds_played] INTEGER,
2513 [userid] TEXT NOT NULL,
2514 [queue_id] TEXT,
2515 [user_initiated] BOOLEAN NOT NULL DEFAULT 1,
2516 UNIQUE(item_id, provider, media_type, userid));"""
2517 )
2518 await self.database.execute(
2519 f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_ALBUMS}(
2520 [item_id] INTEGER PRIMARY KEY AUTOINCREMENT,
2521 [name] TEXT NOT NULL,
2522 [sort_name] TEXT NOT NULL,
2523 [version] TEXT,
2524 [album_type] TEXT NOT NULL,
2525 [year] INTEGER,
2526 [favorite] BOOLEAN NOT NULL DEFAULT 0,
2527 [metadata] json NOT NULL,
2528 [external_ids] json NOT NULL,
2529 [play_count] INTEGER NOT NULL DEFAULT 0,
2530 [last_played] INTEGER NOT NULL DEFAULT 0,
2531 [timestamp_added] INTEGER DEFAULT (cast(strftime('%s','now') as int)),
2532 [timestamp_modified] INTEGER NOT NULL DEFAULT 0,
2533 [search_name] TEXT NOT NULL,
2534 [search_sort_name] TEXT NOT NULL
2535 );"""
2536 )
2537 await self.database.execute(
2538 f"""
2539 CREATE TABLE IF NOT EXISTS {DB_TABLE_ARTISTS}(
2540 [item_id] INTEGER PRIMARY KEY AUTOINCREMENT,
2541 [name] TEXT NOT NULL,
2542 [sort_name] TEXT NOT NULL,
2543 [favorite] BOOLEAN NOT NULL DEFAULT 0,
2544 [metadata] json NOT NULL,
2545 [external_ids] json NOT NULL,
2546 [play_count] INTEGER DEFAULT 0,
2547 [last_played] INTEGER DEFAULT 0,
2548 [timestamp_added] INTEGER DEFAULT (cast(strftime('%s','now') as int)),
2549 [timestamp_modified] INTEGER NOT NULL DEFAULT 0,
2550 [search_name] TEXT NOT NULL,
2551 [search_sort_name] TEXT NOT NULL
2552 );"""
2553 )
2554 await self.database.execute(
2555 f"""
2556 CREATE TABLE IF NOT EXISTS {DB_TABLE_TRACKS}(
2557 [item_id] INTEGER PRIMARY KEY AUTOINCREMENT,
2558 [name] TEXT NOT NULL,
2559 [sort_name] TEXT NOT NULL,
2560 [version] TEXT,
2561 [duration] INTEGER,
2562 [favorite] BOOLEAN NOT NULL DEFAULT 0,
2563 [metadata] json NOT NULL,
2564 [external_ids] json NOT NULL,
2565 [play_count] INTEGER DEFAULT 0,
2566 [last_played] INTEGER DEFAULT 0,
2567 [timestamp_added] INTEGER DEFAULT (cast(strftime('%s','now') as int)),
2568 [timestamp_modified] INTEGER NOT NULL DEFAULT 0,
2569 [search_name] TEXT NOT NULL,
2570 [search_sort_name] TEXT NOT NULL
2571 );"""
2572 )
2573 await self.database.execute(
2574 f"""
2575 CREATE TABLE IF NOT EXISTS {DB_TABLE_PLAYLISTS}(
2576 [item_id] INTEGER PRIMARY KEY AUTOINCREMENT,
2577 [name] TEXT NOT NULL,
2578 [sort_name] TEXT NOT NULL,
2579 [owner] TEXT NOT NULL,
2580 [is_editable] BOOLEAN NOT NULL,
2581 [favorite] BOOLEAN NOT NULL DEFAULT 0,
2582 [metadata] json NOT NULL,
2583 [external_ids] json NOT NULL,
2584 [play_count] INTEGER DEFAULT 0,
2585 [last_played] INTEGER DEFAULT 0,
2586 [timestamp_added] INTEGER DEFAULT (cast(strftime('%s','now') as int)),
2587 [timestamp_modified] INTEGER NOT NULL DEFAULT 0,
2588 [search_name] TEXT NOT NULL,
2589 [search_sort_name] TEXT NOT NULL
2590 );"""
2591 )
2592 await self.database.execute(
2593 f"""
2594 CREATE TABLE IF NOT EXISTS {DB_TABLE_RADIOS}(
2595 [item_id] INTEGER PRIMARY KEY AUTOINCREMENT,
2596 [name] TEXT NOT NULL,
2597 [sort_name] TEXT NOT NULL,
2598 [favorite] BOOLEAN NOT NULL DEFAULT 0,
2599 [metadata] json NOT NULL,
2600 [external_ids] json NOT NULL,
2601 [play_count] INTEGER DEFAULT 0,
2602 [last_played] INTEGER DEFAULT 0,
2603 [timestamp_added] INTEGER DEFAULT (cast(strftime('%s','now') as int)),
2604 [timestamp_modified] INTEGER NOT NULL DEFAULT 0,
2605 [search_name] TEXT NOT NULL,
2606 [search_sort_name] TEXT NOT NULL
2607 );"""
2608 )
2609 await self.database.execute(
2610 f"""
2611 CREATE TABLE IF NOT EXISTS {DB_TABLE_AUDIOBOOKS}(
2612 [item_id] INTEGER PRIMARY KEY AUTOINCREMENT,
2613 [name] TEXT NOT NULL,
2614 [sort_name] TEXT NOT NULL,
2615 [version] TEXT,
2616 [favorite] BOOLEAN NOT NULL DEFAULT 0,
2617 [publisher] TEXT,
2618 [authors] json NOT NULL,
2619 [narrators] json NOT NULL,
2620 [metadata] json NOT NULL,
2621 [duration] INTEGER,
2622 [external_ids] json NOT NULL,
2623 [play_count] INTEGER DEFAULT 0,
2624 [last_played] INTEGER DEFAULT 0,
2625 [timestamp_added] INTEGER DEFAULT (cast(strftime('%s','now') as int)),
2626 [timestamp_modified] INTEGER NOT NULL DEFAULT 0,
2627 [search_name] TEXT NOT NULL,
2628 [search_sort_name] TEXT NOT NULL
2629 );"""
2630 )
2631 await self.database.execute(
2632 f"""
2633 CREATE TABLE IF NOT EXISTS {DB_TABLE_PODCASTS}(
2634 [item_id] INTEGER PRIMARY KEY AUTOINCREMENT,
2635 [name] TEXT NOT NULL,
2636 [sort_name] TEXT NOT NULL,
2637 [version] TEXT,
2638 [favorite] BOOLEAN NOT NULL DEFAULT 0,
2639 [publisher] TEXT,
2640 [total_episodes] INTEGER NOT NULL,
2641 [metadata] json NOT NULL,
2642 [external_ids] json NOT NULL,
2643 [play_count] INTEGER NOT NULL DEFAULT 0,
2644 [last_played] INTEGER NOT NULL DEFAULT 0,
2645 [timestamp_added] INTEGER DEFAULT (cast(strftime('%s','now') as int)),
2646 [timestamp_modified] INTEGER NOT NULL DEFAULT 0,
2647 [search_name] TEXT NOT NULL,
2648 [search_sort_name] TEXT NOT NULL
2649 );"""
2650 )
2651 await self.database.execute(
2652 f"""
2653 CREATE TABLE IF NOT EXISTS {DB_TABLE_GENRES}(
2654 [item_id] INTEGER PRIMARY KEY AUTOINCREMENT,
2655 [name] TEXT NOT NULL,
2656 [sort_name] TEXT NOT NULL,
2657 [translation_key] TEXT,
2658 [description] TEXT,
2659 [favorite] BOOLEAN NOT NULL DEFAULT 0,
2660 [metadata] json NOT NULL,
2661 [external_ids] json NOT NULL,
2662 [genre_aliases] json NOT NULL DEFAULT '[]',
2663 [play_count] INTEGER NOT NULL DEFAULT 0,
2664 [last_played] INTEGER NOT NULL DEFAULT 0,
2665 [timestamp_added] INTEGER DEFAULT (cast(strftime('%s','now') as int)),
2666 [timestamp_modified] INTEGER NOT NULL DEFAULT 0,
2667 [search_name] TEXT NOT NULL,
2668 [search_sort_name] TEXT NOT NULL
2669 );"""
2670 )
2671 await self.database.execute(
2672 f"""
2673 CREATE TABLE IF NOT EXISTS {DB_TABLE_GENRE_MEDIA_ITEM_MAPPING}(
2674 [genre_id] INTEGER NOT NULL,
2675 [media_id] INTEGER NOT NULL,
2676 [media_type] TEXT NOT NULL,
2677 [alias] TEXT NOT NULL,
2678 FOREIGN KEY([genre_id]) REFERENCES [genres]([item_id]),
2679 UNIQUE(genre_id, media_id, media_type)
2680 );"""
2681 )
2682 await self.database.execute(
2683 f"""
2684 CREATE TABLE IF NOT EXISTS {DB_TABLE_ALBUM_TRACKS}(
2685 [id] INTEGER PRIMARY KEY AUTOINCREMENT,
2686 [track_id] INTEGER NOT NULL,
2687 [album_id] INTEGER NOT NULL,
2688 [disc_number] INTEGER NOT NULL,
2689 [track_number] INTEGER NOT NULL,
2690 FOREIGN KEY([track_id]) REFERENCES [tracks]([item_id]),
2691 FOREIGN KEY([album_id]) REFERENCES [albums]([item_id]),
2692 UNIQUE(track_id, album_id)
2693 );"""
2694 )
2695 await self.database.execute(
2696 f"""
2697 CREATE TABLE IF NOT EXISTS {DB_TABLE_PROVIDER_MAPPINGS}(
2698 [media_type] TEXT NOT NULL,
2699 [item_id] INTEGER NOT NULL,
2700 [provider_domain] TEXT NOT NULL,
2701 [provider_instance] TEXT NOT NULL,
2702 [provider_item_id] TEXT NOT NULL,
2703 [available] BOOLEAN NOT NULL DEFAULT 1,
2704 [in_library] BOOLEAN NOT NULL DEFAULT 0,
2705 [is_unique] BOOLEAN,
2706 [url] text,
2707 [audio_format] json,
2708 [details] TEXT,
2709 UNIQUE(media_type, provider_instance, provider_item_id)
2710 );"""
2711 )
2712 await self.database.execute(
2713 f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_TRACK_ARTISTS}(
2714 [track_id] INTEGER NOT NULL,
2715 [artist_id] INTEGER NOT NULL,
2716 FOREIGN KEY([track_id]) REFERENCES [tracks]([item_id]),
2717 FOREIGN KEY([artist_id]) REFERENCES [artists]([item_id]),
2718 UNIQUE(track_id, artist_id)
2719 );"""
2720 )
2721 await self.database.execute(
2722 f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_ALBUM_ARTISTS}(
2723 [album_id] INTEGER NOT NULL,
2724 [artist_id] INTEGER NOT NULL,
2725 FOREIGN KEY([album_id]) REFERENCES [albums]([item_id]),
2726 FOREIGN KEY([artist_id]) REFERENCES [artists]([item_id]),
2727 UNIQUE(album_id, artist_id)
2728 );"""
2729 )
2730
2731 await self.database.execute(
2732 f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_LOUDNESS_MEASUREMENTS}(
2733 [id] INTEGER PRIMARY KEY AUTOINCREMENT,
2734 [media_type] TEXT NOT NULL,
2735 [item_id] TEXT NOT NULL,
2736 [provider] TEXT NOT NULL,
2737 [loudness] REAL,
2738 [loudness_album] REAL,
2739 UNIQUE(media_type,item_id,provider));"""
2740 )
2741
2742 await self.database.execute(
2743 f"""CREATE TABLE IF NOT EXISTS {DB_TABLE_SMART_FADES_ANALYSIS}(
2744 [id] INTEGER PRIMARY KEY AUTOINCREMENT,
2745 [item_id] TEXT NOT NULL,
2746 [provider] TEXT NOT NULL,
2747 [fragment] INTEGER NOT NULL,
2748 [bpm] REAL NOT NULL,
2749 [beats] TEXT NOT NULL,
2750 [downbeats] TEXT NOT NULL,
2751 [confidence] REAL NOT NULL,
2752 [duration] REAL,
2753 [analysis_version] INTEGER DEFAULT 1,
2754 [timestamp_created] INTEGER DEFAULT (cast(strftime('%s','now') as int)),
2755 UNIQUE(item_id,provider,fragment));"""
2756 )
2757
2758 await self.database.commit()
2759
2760 async def __create_database_indexes(self) -> None:
2761 """Create database indexes."""
2762 for db_table in (
2763 DB_TABLE_ARTISTS,
2764 DB_TABLE_ALBUMS,
2765 DB_TABLE_TRACKS,
2766 DB_TABLE_PLAYLISTS,
2767 DB_TABLE_RADIOS,
2768 DB_TABLE_AUDIOBOOKS,
2769 DB_TABLE_PODCASTS,
2770 DB_TABLE_GENRES,
2771 ):
2772 # index on favorite column
2773 await self.database.execute(
2774 f"CREATE INDEX IF NOT EXISTS {db_table}_favorite_idx on {db_table}(favorite);"
2775 )
2776 # index on name
2777 await self.database.execute(
2778 f"CREATE INDEX IF NOT EXISTS {db_table}_name_idx on {db_table}(name);"
2779 )
2780 # index on search_name (=lowercase name without diacritics)
2781 await self.database.execute(
2782 f"CREATE INDEX IF NOT EXISTS {db_table}_name_nocase_idx ON {db_table}(search_name);"
2783 )
2784 # index on sort_name
2785 await self.database.execute(
2786 f"CREATE INDEX IF NOT EXISTS {db_table}_sort_name_idx on {db_table}(sort_name);"
2787 )
2788 # index on search_sort_name (=lowercase sort_name without diacritics)
2789 await self.database.execute(
2790 f"CREATE INDEX IF NOT EXISTS {db_table}_search_sort_name_idx "
2791 f"ON {db_table}(search_sort_name);"
2792 )
2793 # index on external_ids
2794 await self.database.execute(
2795 f"CREATE INDEX IF NOT EXISTS {db_table}_external_ids_idx "
2796 f"ON {db_table}(external_ids);"
2797 )
2798 # index on timestamp_added
2799 await self.database.execute(
2800 f"CREATE INDEX IF NOT EXISTS {db_table}_timestamp_added_idx "
2801 f"on {db_table}(timestamp_added);"
2802 )
2803 # index on play_count
2804 await self.database.execute(
2805 f"CREATE INDEX IF NOT EXISTS {db_table}_play_count_idx on {db_table}(play_count);"
2806 )
2807 # index on last_played
2808 await self.database.execute(
2809 f"CREATE INDEX IF NOT EXISTS {db_table}_last_played_idx on {db_table}(last_played);"
2810 )
2811
2812 # indexes on provider_mappings table
2813 await self.database.execute(
2814 f"CREATE INDEX IF NOT EXISTS {DB_TABLE_PROVIDER_MAPPINGS}_media_type_item_id_idx "
2815 f"on {DB_TABLE_PROVIDER_MAPPINGS}(media_type,item_id);"
2816 )
2817 await self.database.execute(
2818 f"CREATE INDEX IF NOT EXISTS {DB_TABLE_PROVIDER_MAPPINGS}_provider_domain_idx "
2819 f"on {DB_TABLE_PROVIDER_MAPPINGS}(media_type,provider_domain,provider_item_id);"
2820 )
2821 await self.database.execute(
2822 f"CREATE UNIQUE INDEX IF NOT EXISTS {DB_TABLE_PROVIDER_MAPPINGS}_provider_instance_idx "
2823 f"on {DB_TABLE_PROVIDER_MAPPINGS}(media_type,provider_instance,provider_item_id);"
2824 )
2825 await self.database.execute(
2826 "CREATE INDEX IF NOT EXISTS "
2827 f"{DB_TABLE_PROVIDER_MAPPINGS}_media_type_provider_instance_idx "
2828 f"on {DB_TABLE_PROVIDER_MAPPINGS}(media_type,provider_instance);"
2829 )
2830 await self.database.execute(
2831 "CREATE INDEX IF NOT EXISTS "
2832 f"{DB_TABLE_PROVIDER_MAPPINGS}_media_type_provider_domain_idx "
2833 f"on {DB_TABLE_PROVIDER_MAPPINGS}(media_type,provider_domain);"
2834 )
2835 await self.database.execute(
2836 "CREATE INDEX IF NOT EXISTS "
2837 f"{DB_TABLE_PROVIDER_MAPPINGS}_media_type_provider_instance_library_idx "
2838 f"on {DB_TABLE_PROVIDER_MAPPINGS}(media_type,provider_instance,in_library);"
2839 )
2840
2841 # indexes on track_artists table
2842 await self.database.execute(
2843 f"CREATE INDEX IF NOT EXISTS {DB_TABLE_TRACK_ARTISTS}_track_id_idx "
2844 f"on {DB_TABLE_TRACK_ARTISTS}(track_id);"
2845 )
2846 await self.database.execute(
2847 f"CREATE INDEX IF NOT EXISTS {DB_TABLE_TRACK_ARTISTS}_artist_id_idx "
2848 f"on {DB_TABLE_TRACK_ARTISTS}(artist_id);"
2849 )
2850 # indexes on album_artists table
2851 await self.database.execute(
2852 f"CREATE INDEX IF NOT EXISTS {DB_TABLE_ALBUM_ARTISTS}_album_id_idx "
2853 f"on {DB_TABLE_ALBUM_ARTISTS}(album_id);"
2854 )
2855 await self.database.execute(
2856 f"CREATE INDEX IF NOT EXISTS {DB_TABLE_ALBUM_ARTISTS}_artist_id_idx "
2857 f"on {DB_TABLE_ALBUM_ARTISTS}(artist_id);"
2858 )
2859 # index on loudness measurements table
2860 await self.database.execute(
2861 f"CREATE INDEX IF NOT EXISTS {DB_TABLE_LOUDNESS_MEASUREMENTS}_idx "
2862 f"on {DB_TABLE_LOUDNESS_MEASUREMENTS}(media_type,item_id,provider);"
2863 )
2864 # index on smart fades analysis table
2865 await self.database.execute(
2866 f"CREATE INDEX IF NOT EXISTS {DB_TABLE_SMART_FADES_ANALYSIS}_idx "
2867 f"on {DB_TABLE_SMART_FADES_ANALYSIS}(item_id,provider,fragment);"
2868 )
2869 # indexes on genre_media_item_mapping table
2870 await self.database.execute(
2871 f"CREATE INDEX IF NOT EXISTS {DB_TABLE_GENRE_MEDIA_ITEM_MAPPING}_media_idx "
2872 f"on {DB_TABLE_GENRE_MEDIA_ITEM_MAPPING}(media_id,media_type);"
2873 )
2874 await self.database.execute(
2875 f"CREATE INDEX IF NOT EXISTS {DB_TABLE_GENRE_MEDIA_ITEM_MAPPING}_genre_alias_idx "
2876 f"on {DB_TABLE_GENRE_MEDIA_ITEM_MAPPING}(genre_id,alias);"
2877 )
2878 # unique index on playlog table
2879 await self.database.execute(
2880 f"CREATE UNIQUE INDEX IF NOT EXISTS {DB_TABLE_PLAYLOG}_unique_idx "
2881 f"on {DB_TABLE_PLAYLOG}(item_id,provider,media_type,userid);"
2882 )
2883 await self.database.commit()
2884
2885 async def __create_database_triggers(self) -> None:
2886 """Create database triggers."""
2887 # triggers to auto update timestamps
2888 for db_table in (
2889 "artists",
2890 "albums",
2891 "tracks",
2892 "playlists",
2893 "radios",
2894 "audiobooks",
2895 "podcasts",
2896 "genres",
2897 ):
2898 await self.database.execute(
2899 f"""
2900 CREATE TRIGGER IF NOT EXISTS update_{db_table}_timestamp
2901 AFTER UPDATE ON {db_table}
2902 BEGIN
2903 UPDATE {db_table} SET timestamp_modified=cast(strftime('%s','now') as int)
2904 WHERE rowid = new.rowid;
2905 END;
2906 """
2907 )
2908 await self.database.commit()
2909
2910 async def correct_multi_instance_provider_mappings(self) -> None:
2911 """Correct provider mappings for multi-instance providers."""
2912 self.logger.debug("Correcting provider mappings for multi-instance providers...")
2913 multi_instance_providers: set[str] = set()
2914 for provider in self.providers:
2915 if len(self.get_provider_instances(provider.domain)) > 1:
2916 multi_instance_providers.add(provider.instance_id)
2917 if not multi_instance_providers:
2918 return # no multi-instance providers found, nothing to do
2919
2920 for ctrl in (
2921 self.albums,
2922 self.artists,
2923 self.tracks,
2924 self.playlists,
2925 self.radio,
2926 self.audiobooks,
2927 self.podcasts,
2928 ):
2929 async for db_item in ctrl.iter_library_items(
2930 provider=list(multi_instance_providers), library_items_only=False
2931 ):
2932 if self.match_provider_instances(db_item):
2933 await ctrl.update_item_in_library(db_item.item_id, db_item)
2934 # prevent overwhelming the event loop
2935 await asyncio.sleep(0.2)
2936 self.mass.config.set_raw_core_config_value(
2937 self.domain, LAST_PROVIDER_INSTANCE_SCAN, int(time.time())
2938 )
2939 self.logger.debug("Provider mappings correction done")
2940
2941 async def _get_user_for_provider(
2942 self, provider_mappings_or_instance_id: Iterable[ProviderMapping] | str
2943 ) -> User | None:
2944 """Try to get the MA User based on provider mappings and provider filter."""
2945 all_users = await self.mass.webserver.auth.list_users()
2946 for mapping_or_instance_id in provider_mappings_or_instance_id:
2947 for user in all_users:
2948 if not user.provider_filter:
2949 continue
2950 if isinstance(mapping_or_instance_id, str):
2951 if provider_mappings_or_instance_id in user.provider_filter:
2952 return user
2953 elif mapping_or_instance_id.provider_instance in user.provider_filter:
2954 return user
2955 return None
2956