/
/
/
1"""Run the Music Assistant Server."""
2
3from __future__ import annotations
4
5import argparse
6import asyncio
7import logging
8import os
9import subprocess
10import sys
11import threading
12import traceback
13from contextlib import suppress
14from logging.handlers import RotatingFileHandler
15from typing import Any, Final
16
17from aiorun import run
18from colorlog import ColoredFormatter
19
20from music_assistant.constants import MASS_LOGGER_NAME, VERBOSE_LOG_LEVEL
21from music_assistant.helpers.json import json_loads
22from music_assistant.helpers.logging import activate_log_queue_handler
23from music_assistant.mass import MusicAssistant
24
25FORMAT_DATE: Final = "%Y-%m-%d"
26FORMAT_TIME: Final = "%H:%M:%S"
27FORMAT_DATETIME: Final = f"{FORMAT_DATE} {FORMAT_TIME}"
28MAX_LOG_FILESIZE = 1000000 * 10 # 10 MB
29ALPINE_RELEASE_FILE = "/etc/alpine-release"
30
31LOGGER = logging.getLogger(MASS_LOGGER_NAME)
32
33
34def get_arguments() -> argparse.Namespace:
35 """Arguments handling."""
36 parser = argparse.ArgumentParser(description="MusicAssistant")
37
38 # determine default data directory
39 if xdg_data_home := os.getenv("XDG_DATA_HOME"):
40 default_data_dir = os.path.join(xdg_data_home, "music-assistant")
41 else:
42 default_data_dir = os.path.join(os.path.expanduser("~"), ".musicassistant")
43 # determine default cache directory
44 if xdg_cache_home := os.getenv("XDG_CACHE_HOME"):
45 default_cache_dir = os.path.join(xdg_cache_home, "music-assistant")
46 else:
47 default_cache_dir = os.path.join(default_data_dir, ".cache")
48
49 parser.add_argument(
50 "--data-dir",
51 "-c",
52 "--config",
53 metavar="path_to_data_dir",
54 default=default_data_dir,
55 help="Directory that contains MusicAssistant persistent data",
56 )
57 parser.add_argument(
58 "--cache-dir",
59 metavar="path_to_cache_dir",
60 default=default_cache_dir,
61 help="Directory that contains MusicAssistant cache data [optional]",
62 )
63 parser.add_argument(
64 "--log-level",
65 type=str,
66 default=os.environ.get("LOG_LEVEL", "info"),
67 help="Provide logging level. Example --log-level debug, "
68 "default=info, possible=(critical, error, warning, info, debug, verbose)",
69 )
70 parser.add_argument(
71 "--safe-mode",
72 action=argparse.BooleanOptionalAction,
73 help="Start in safe mode (core controllers only, no providers)",
74 )
75
76 return parser.parse_args()
77
78
79def setup_logger(data_path: str, level: str = "DEBUG") -> logging.Logger:
80 """Initialize logger."""
81 # define log formatter
82 log_fmt = "%(asctime)s.%(msecs)03d %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
83
84 # base logging config for the root logger
85 logging.basicConfig(level=logging.INFO)
86
87 colorfmt = f"%(log_color)s{log_fmt}%(reset)s"
88 logging.getLogger().handlers[0].setFormatter(
89 ColoredFormatter(
90 colorfmt,
91 datefmt=FORMAT_DATETIME,
92 reset=True,
93 log_colors={
94 "VERBOSE": "light_black",
95 "DEBUG": "cyan",
96 "INFO": "green",
97 "WARNING": "yellow",
98 "ERROR": "red",
99 "CRITICAL": "red",
100 },
101 )
102 )
103
104 # Capture warnings.warn(...) and friends messages in logs.
105 # The standard destination for them is stderr, which may end up unnoticed.
106 # This way they're where other messages are, and can be filtered as usual.
107 logging.captureWarnings(True)
108
109 # setup file handler
110 log_filename = os.path.join(data_path, "musicassistant.log")
111 file_handler = RotatingFileHandler(log_filename, maxBytes=MAX_LOG_FILESIZE, backupCount=1)
112 # rotate log at each start
113 with suppress(OSError):
114 file_handler.doRollover()
115 file_handler.setFormatter(logging.Formatter(log_fmt, datefmt=FORMAT_DATETIME))
116
117 logger = logging.getLogger()
118 logger.addHandler(file_handler)
119 logging.addLevelName(VERBOSE_LOG_LEVEL, "VERBOSE")
120
121 # apply the configured global log level to the (root) music assistant logger
122 logging.getLogger(MASS_LOGGER_NAME).setLevel(level)
123
124 # silence some noisy loggers
125 logging.getLogger("asyncio").setLevel(logging.WARNING)
126 logging.getLogger("aiosqlite").setLevel(logging.WARNING)
127 logging.getLogger("databases").setLevel(logging.WARNING)
128 logging.getLogger("requests").setLevel(logging.WARNING)
129 logging.getLogger("urllib3").setLevel(logging.WARNING)
130 logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
131 logging.getLogger("httpx").setLevel(logging.WARNING)
132 logging.getLogger("charset_normalizer").setLevel(logging.WARNING)
133 logging.getLogger("urllib3.connectionpool").setLevel(logging.ERROR)
134 logging.getLogger("numba").setLevel(logging.WARNING)
135
136 # Add a filter to suppress slow callback warnings from buffered audio streaming
137 # These warnings are expected when audio buffers fill up and producers wait for consumers
138 class BufferedGeneratorFilter(logging.Filter):
139 """Filter out expected slow callback warnings from buffered audio generators."""
140
141 def filter(self, record: logging.LogRecord) -> bool:
142 """Return False to suppress the log record."""
143 if record.levelno != logging.WARNING:
144 return True
145 # Check the formatted message, not the format string
146 msg = record.getMessage()
147 return "buffered.<locals>.producer()" not in msg
148
149 logging.getLogger("asyncio").addFilter(BufferedGeneratorFilter())
150
151 sys.excepthook = lambda *args: logging.getLogger(None).exception(
152 "Uncaught exception",
153 exc_info=args,
154 )
155 threading.excepthook = lambda args: logging.getLogger(None).exception(
156 "Uncaught thread exception",
157 exc_info=( # type: ignore[arg-type]
158 args.exc_type,
159 args.exc_value,
160 args.exc_traceback,
161 ),
162 )
163
164 return logger
165
166
167def _enable_posix_spawn() -> None:
168 """Enable posix_spawn on Alpine Linux."""
169 if subprocess._USE_POSIX_SPAWN:
170 return
171
172 # The subprocess module does not know about Alpine Linux/musl
173 # and will use fork() instead of posix_spawn() which significantly
174 # less efficient. This is a workaround to force posix_spawn()
175 # on Alpine Linux which is supported by musl.
176 subprocess._USE_POSIX_SPAWN = os.path.exists(ALPINE_RELEASE_FILE) # type: ignore[misc]
177
178
179def _global_loop_exception_handler(_: Any, context: dict[str, Any]) -> None:
180 """Handle all exception inside the core loop."""
181 kwargs = {}
182 if exception := context.get("exception"):
183 kwargs["exc_info"] = (type(exception), exception, exception.__traceback__)
184
185 logger = logging.getLogger(__package__)
186 if source_traceback := context.get("source_traceback"):
187 stack_summary = "".join(traceback.format_list(source_traceback))
188 logger.error(
189 "Error doing job: %s: %s",
190 context["message"],
191 stack_summary,
192 **kwargs, # type: ignore[arg-type]
193 )
194 return
195
196 logger.error(
197 "Error doing task: %s",
198 context["message"],
199 **kwargs, # type: ignore[arg-type]
200 )
201
202
203def main() -> None:
204 """Start MusicAssistant."""
205 # parse arguments
206 args = get_arguments()
207
208 data_dir = args.data_dir
209 cache_dir = args.cache_dir
210
211 os.makedirs(data_dir, exist_ok=True)
212 os.makedirs(cache_dir, exist_ok=True)
213
214 # Override options though hass add-on config file
215 hass_options_file = os.path.join(data_dir, "options.json")
216 if os.path.isfile(hass_options_file):
217 # we are running as a hass add-on
218 with open(hass_options_file, "rb") as _file:
219 hass_options = json_loads(_file.read())
220 else:
221 hass_options = {}
222
223 # prefer value in hass_options
224 log_level = hass_options.get("log_level", args.log_level).upper()
225 dev_mode = os.environ.get("PYTHONDEVMODE", "0") == "1"
226 safe_mode = bool(
227 args.safe_mode or hass_options.get("safe_mode") or os.environ.get("MASS_SAFE_MODE")
228 )
229
230 # setup logger
231 logger = setup_logger(data_dir, log_level)
232 mass = MusicAssistant(data_dir, cache_dir, safe_mode)
233
234 # enable alpine subprocess workaround
235 _enable_posix_spawn()
236
237 def on_shutdown(loop: asyncio.AbstractEventLoop) -> None:
238 logger.info("shutdown requested!")
239 loop.run_until_complete(mass.stop())
240
241 async def start_mass() -> None:
242 loop = asyncio.get_running_loop()
243 activate_log_queue_handler()
244 if dev_mode or log_level == "DEBUG":
245 loop.set_debug(True)
246 loop.set_exception_handler(_global_loop_exception_handler)
247 try:
248 await mass.start()
249 except Exception:
250 # exit immediately if startup fails
251 loop.stop()
252 raise
253
254 run(
255 start_mass(),
256 shutdown_callback=on_shutdown,
257 executor_workers=16,
258 )
259
260
261if __name__ == "__main__":
262 main()
263