diff --git a/.github/codecov.yml b/.github/codecov.yml new file mode 100644 index 0000000000..732df82b6f --- /dev/null +++ b/.github/codecov.yml @@ -0,0 +1,5 @@ +coverage: + status: + project: + default: + threshold: 0.3% diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2251936589..ec222e4eb1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,6 +1,11 @@ name: CI -on: [push, pull_request] +on: + pull_request: + push: + branches: + - develop + - main jobs: main: @@ -8,51 +13,44 @@ jobs: fail-fast: false matrix: include: - - name: "Test: Python 3.7" - python: "3.7" - tox: py37 - - name: "Test: Python 3.8" - python: "3.8" - tox: py38 - - name: "Test: Python 3.9" - python: "3.9" - tox: py39 + - name: "Test: Python 3.11" + python: "3.11" + tox: py311 + - name: "Test: Python 3.12" + python: "3.12" + tox: py312 coverage: true - - name: "Lint: check-manifest" - python: "3.9" - tox: check-manifest - - name: "Lint: flake8" - python: "3.9" - tox: flake8 - - name: "Lint: mypy" - python: "3.9" - tox: mypy + - name: "Lint: pyright" + python: "3.12" + tox: pyright + - name: "Lint: ruff lint" + python: "3.12" + tox: ruff-lint + - name: "Lint: ruff format" + python: "3.12" + tox: ruff-format - name: "Docs" - python: "3.9" + python: "3.12" tox: docs name: ${{ matrix.name }} - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 container: ghcr.io/mopidy/ci:latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python }} + - uses: actions/checkout@v4 - name: Fix home dir permissions to enable pip caching run: chown -R root /github/home - - name: Cache pip - uses: actions/cache@v2 + - uses: actions/setup-python@v5 with: - path: ~/.cache/pip - key: ${{ runner.os }}-${{ matrix.python }}-${{ matrix.tox }}-pip-${{ hashFiles('setup.cfg') }}-${{ hashFiles('tox.ini') }} - restore-keys: | - ${{ runner.os }}-${{ matrix.python }}-${{ matrix.tox }}-pip- - - run: python -m pip install pygobject tox + python-version: ${{ matrix.python }} + cache: pip + - run: python -m pip install tox - run: python -m tox -e ${{ matrix.tox }} if: ${{ ! matrix.coverage }} - run: python -m tox -e ${{ matrix.tox }} -- --cov-report=xml if: ${{ matrix.coverage }} - - uses: codecov/codecov-action@v1 + - uses: codecov/codecov-action@v4 if: ${{ matrix.coverage }} + with: + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index 4f95e5b37f..0000000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: "CodeQL" - -on: - push: - branches: [ develop, master ] - pull_request: - branches: [ develop, master ] - schedule: - - cron: '32 4 * * 4' - -jobs: - analyze: - name: Analyze Python - runs-on: ubuntu-20.04 - - steps: - - uses: actions/checkout@v2 - - uses: github/codeql-action/init@v1 - with: - languages: "python" - - uses: github/codeql-action/autobuild@v1 - - uses: github/codeql-action/analyze@v1 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 705fb97256..133b557af9 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -6,18 +6,17 @@ on: jobs: release: - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.12" - name: "Install dependencies" run: python3 -m pip install build - name: "Build package" run: python3 -m build - - uses: pypa/gh-action-pypi-publish@v1.4.1 + - uses: pypa/gh-action-pypi-publish@release/v1 with: - user: __token__ password: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/ubuntu-devel.yml b/.github/workflows/ubuntu-devel.yml index 9be69e80a3..4b81a6848f 100644 --- a/.github/workflows/ubuntu-devel.yml +++ b/.github/workflows/ubuntu-devel.yml @@ -8,22 +8,26 @@ on: jobs: main: - name: "Test: Python 3.9" - runs-on: ubuntu-20.04 + name: "Test: Python 3.12" + runs-on: ubuntu-22.04 # The container should be automatically updated from time to time. container: ubuntu:devel steps: - - uses: actions/checkout@v2 - name: Install dependencies run: | apt-get update DEBIAN_FRONTEND=noninteractive apt-get install -y \ + git \ + python3 \ + tox \ gstreamer1.0-plugins-bad \ gstreamer1.0-plugins-good \ gstreamer1.0-plugins-ugly \ python3-gst-1.0 \ - python3 \ - tox - - run: tox -e py39 + libcairo2-dev \ + libgirepository1.0-dev \ + libglib2.0-dev + - uses: actions/checkout@v4 + - run: tox -e py312 diff --git a/.gitignore b/.gitignore index 1960d616ba..579dcb6c64 100644 --- a/.gitignore +++ b/.gitignore @@ -6,9 +6,9 @@ .cache/ .coverage .idea -.mypy_cache/ .noseids .pytest_cache/ +.ruff_cache/ .tox MANIFEST build/ diff --git a/.mailmap b/.mailmap index ec45390467..4b3ec3bb6c 100644 --- a/.mailmap +++ b/.mailmap @@ -47,3 +47,5 @@ Hugo van Kemenade Tobias Girstmair Jonathan Jefferies Matthew H. Flamm +Keith Scroggs +Jan Iversen diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..6193b995d8 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,24 @@ +# Run `pre-commit install` to install the pre-commit hooks. +# +# Run `pre-commit autoupdate` to update all the plugins here. +# +# See https://pre-commit.com for more information. + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: trailing-whitespace + - id: check-toml + - repo: local + hooks: + - id: ruff + name: ruff check + entry: ruff + language: system + types: [python] + - id: ruff-format + name: ruff format + entry: ruff format + language: system + types: [python] diff --git a/.readthedocs.yml b/.readthedocs.yml index 30311511de..94c9648131 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,7 +1,17 @@ version: 2 +build: + os: ubuntu-22.04 + tools: + python: "3.11" + apt_packages: + # pyobject dependencies + - libcairo2-dev + - libgirepository1.0-dev + # Diagrams in docs + - graphviz + python: - version: "3.7" install: - method: pip path: . diff --git a/AUTHORS b/AUTHORS index 2f374dd744..114f036bda 100644 --- a/AUTHORS +++ b/AUTHORS @@ -125,6 +125,19 @@ - Asmi Jafar <47150162+asmijafar20@users.noreply.github.com> - Saloni Gupta <60188408+salonigupta1@users.noreply.github.com> - Parth Verma -- Flamm, Matthew H +- Matthew H. Flamm - Matthew Gamble -- very-amused +- Keith Scroggs +- Andrzej Rybczak +- Tim Gates +- Davis Mosenkovs +- Jonathan +- Kunal Attri +- Archish Thakkar +- grdorin <94456679+grdorin@users.noreply.github.com> +- solo +- lmdc45 <47163513+lmdc45@users.noreply.github.com> +- Matthias Meulien +- Jan Iversen +- SandeshPyakurel <85491057+SandeshPyakurel@users.noreply.github.com> +- ayushrakesh <115995339+ayushrakesh@users.noreply.github.com> diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index deac78fe93..0000000000 --- a/MANIFEST.in +++ /dev/null @@ -1,26 +0,0 @@ -include *.py -include *.rst -include *.toml -include .mailmap -include .readthedocs.yml -include AUTHORS -include AUTHORS.update -include LICENSE -include MANIFEST.in -include mopidy/py.typed -include pyproject.toml -include tox.ini - -recursive-include .github * - -recursive-include docs * -prune docs/_build -prune docs/.doctrees - -recursive-include extra * - -recursive-include mopidy *.conf -recursive-include mopidy/http/data * - -recursive-include tests *.py -recursive-include tests/data * diff --git a/README.rst b/README.rst index edd671f8a2..7dcd029e4a 100644 --- a/README.rst +++ b/README.rst @@ -33,7 +33,7 @@ systems, you can control the music from any phone, tablet, or computer. **Mopidy on Raspberry Pi** -The `Raspberry Pi`_ is an popular device to run Mopidy on, either using +The `Raspberry Pi`_ is a popular device to run Mopidy on, either using Raspbian, Ubuntu, or Arch Linux. Pimoroni recommends Mopidy for use with their `Pirate Audio`_ audio gear for Raspberry Pi. @@ -45,7 +45,7 @@ audio jukebox system for Raspberry Pi. Mopidy's extension support and Python, JSON-RPC, and JavaScript APIs make Mopidy a perfect base for your projects. In one hack, a Raspberry Pi was embedded in an old cassette player. The buttons -and volume control are wired up with GPIO on the Raspberry Pi, and is used to +and volume control are wired up with GPIO on the Raspberry Pi, and are used to control playback through a custom Mopidy extension. The cassettes have NFC tags used to select playlists from Spotify. @@ -63,6 +63,18 @@ To get started with Mopidy, begin by reading the `installation docs `_. +**Contributing** + +Begin by reading the +`contributing `_ +section of our documentation. +If you are a developer, please also read +`Development environment `_ +and/or +`Extension development `_. +We welcome all kinds of help with bug fixing, testing, documentation, and supporting other users. + + **Project resources** - `Documentation `_ @@ -71,19 +83,19 @@ To get started with Mopidy, begin by reading the - `Source code `_ - `Issue tracker `_ -.. image:: https://img.shields.io/pypi/v/Mopidy.svg?style=flat - :target: https://pypi.python.org/pypi/Mopidy/ +.. image:: https://img.shields.io/pypi/v/mopidy + :target: https://pypi.org/project/mopidy/ :alt: Latest PyPI version -.. image:: https://img.shields.io/github/workflow/status/mopidy/mopidy/CI - :target: https://github.com/mopidy/mopidy/actions +.. image:: https://img.shields.io/github/actions/workflow/status/mopidy/mopidy/ci.yml + :target: https://github.com/mopidy/mopidy/actions/workflows/ci.yml :alt: CI build status -.. image:: https://img.shields.io/readthedocs/mopidy.svg +.. image:: https://img.shields.io/readthedocs/mopidy :target: https://docs.mopidy.com/ :alt: Read the Docs build status -.. image:: https://img.shields.io/codecov/c/github/mopidy/mopidy/develop.svg +.. image:: https://img.shields.io/codecov/c/github/mopidy/mopidy :target: https://codecov.io/gh/mopidy/mopidy :alt: Test coverage diff --git a/docs/api/backend.rst b/docs/api/backend.rst index 4f8e709628..27a0a55ca7 100644 --- a/docs/api/backend.rst +++ b/docs/api/backend.rst @@ -27,8 +27,7 @@ GStreamer knows how to play right before playback. For example: - Spotify already has its own URI scheme (``spotify:track:...``, ``spotify:playlist:...``, etc.) used throughout their applications, and thus - Mopidy-Spotify simply uses the same URI scheme. Playback is handled by - pushing raw audio data into a GStreamer ``appsrc`` element. + Mopidy-Spotify simply uses the same URI scheme. - Mopidy-SoundCloud created it's own URI scheme, after the model of Spotify, and uses URIs of the following forms: ``soundcloud:search``, diff --git a/AUTHORS.update b/docs/authors-update.sh similarity index 100% rename from AUTHORS.update rename to docs/authors-update.sh diff --git a/docs/authors.rst b/docs/authors.rst index 4a807d3f5c..6764543572 100644 --- a/docs/authors.rst +++ b/docs/authors.rst @@ -4,7 +4,7 @@ Authors ******* -Mopidy is copyright 2009-2021 Stein Magnus Jodal and contributors. Mopidy is +Mopidy is copyright 2009-2023 Stein Magnus Jodal and contributors. Mopidy is licensed under the `Apache License, Version 2.0 `_. diff --git a/docs/changelog.rst b/docs/changelog.rst index 96b5e7cbec..ca025a0caf 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -9,41 +9,217 @@ This changelog is used to track all major changes to Mopidy. For older releases, see :ref:`history`. -v3.2.1 (UNRELEASED) +v4.0.0 (UNRELEASED) =================== -- HTTP: Fix support for websocket clients connecting/disconnecting - during broadcast. (PR: :issue:`1993`) +Mopidy 4.0 is a backward-incompatible release because we've dropped support for +old versions of our dependencies and a number of deprecated APIs. -- Core: Fix startup crash when loading invalid extensions. (PR: - :issue:`1990`) +Dependencies +------------ -- HTTP: Improve CSRF protection Content-Type check. (PR: :issue:`1997`) +- Python >= 3.11 is now required. Python 3.7-3.10 are no longer supported. -- Core: Fix error-handling when fetching backend support info. (PR: - :issue:`1964`) +- GStreamer >= 1.22.0 is now required. + +- PyGObject >= 3.42 is now an explicit Python dependency, and not something we + assume you'll install together with GStreamer. + +- Pykka >= 4.0 is now required. + +- Requests >= 2.28 is now required. + +- Setuptools >= 66 is now required. + +- Tornado >= 6.2 is now required. + +- Replaced :mod:`pkg_resources` with :mod:`importlib.metadata` from Python's + standard library. + +Core API +-------- + +Changes to the Core API may affect Mopidy clients. + +Some of the changes in the Core API are related to replacing the use of +full ``TlTrack`` objects as API arguments with tracklist IDs, ``tlid``. +This is especially relevant for remote clients, like web clients, which may +pass a lot less data over the network when using tracklist IDs in API calls. + +Root object +^^^^^^^^^^^ + +- The :class:`mopidy.core.Core` class now requires the `config` argument to be + present. As this argument is provided by Mopidy itself at runtime, this + should only affect the setup of extension's test suites. + +Library controller +^^^^^^^^^^^^^^^^^^ + +- No changes so far. + +Playback controller +^^^^^^^^^^^^^^^^^^^ + +- :meth:`mopidy.core.PlaybackController.play` + no longer accepts ``TlTrack`` objects, + which has been deprecated since Mopidy 3.0. + Use tracklist IDs (``tlid``) instead. + (Fixes :issue:`1855`, PR: :issue:`2150`) + +Playlist controller +^^^^^^^^^^^^^^^^^^^ + +- No changes so far. + +Tracklist controller +^^^^^^^^^^^^^^^^^^^^ + +- No changes so far. + +Backend API +----------- + +Changes to the Backend API may affect Mopidy backend extensions. + +- No changes so far. + +Models +------ + +Changes to the data models may affect any Mopidy extension or client. + +- No changes so far. + +Audio API +--------- + +Changes to the Audio API may affect a few Mopidy backend extensions. + +- Removed APIs only used by Mopidy-Spotify's bespoke audio delivery mechanism, + which has not been used since Spotify shut down their libspotify APIs in + May 2022. The removed functions/methods are: + + - :meth:`mopidy.audio.Audio.emit_data` + - :meth:`mopidy.audio.Audio.set_appsrc` + - :meth:`mopidy.audio.Audio.set_metadata` + - :func:`mopidy.audio.calculate_duration` + - :func:`mopidy.audio.create_buffer` + - :func:`mopidy.audio.millisecond_to_clocktime` + +Extension support +----------------- + +- The command :command:`mopidy deps` no longer repeats transitive dependencies + that have already been listed. This reduces the length of the command's output + drastically. (PR: :issue:`2152`) + +Internals +--------- + +- Dropped split between the ``main`` and ``develop`` branches. We now use + ``main`` for all development, and have removed the ``develop`` branch. + +- Added type hints to most of the source code. + +- Switched from mypy to pyright for type checking. + + +v3.4.2 (2023-11-01) +=================== + +- Deps: Python 3.11 and 3.12 are now included in the testing matrix. + +- M3U: Stop following symlinks when :confval:`file/follow_symlinks` is false. + (PR: :issue:`2094`) + +- zeroconf: Fix exception on shutdown if `dbus` is not installed. + +- Docs: Fix crash when building docs on recent Sphinx versions. + +- Dev: Make stacktraces from deprecation warnings include the offending call + site, to help upgrade API usage in extensions. + +- Dev: Upgrade CI workflows to fix Node.js 12 deprecation notices and avoid + Codecov's bash uploader. + +- Dev: Make tests pass on macOS. (PR: :issue:`2092`) + +- Dev: Incease test coverage of Mopidy-File to 100%. (PR: :issue:`2096`) + +- Dev: Added ``"tox -e ci``", to allow easy CI check before ``git push``. + + +v3.4.1 (2022-12-07) +=================== + +- HTTP: Fix non-optional :confval:`http/allowed_origins` config setting. (PR: + :issue:`2066`) + + +v3.4.0 (2022-11-28) +=================== + +- Config: Handle DBus "Algorithm plain is not supported" error. (PR: :issue:`2061`) + +- File: Fix uppercase :confval:`file/excluded_file_extensions`. (PR: + :issue:`2063`) + +- Add :meth:`mopidy.backend.PlaybackProvider.on_source_setup` which can be + implemented by Backend playback providers that want to set GStreamer source + properties in the ``source-setup`` callback. (PR: :issue:`2060`) + +- HTTP: Improve handling of :confval:`http/allowed_origins` config setting. (PR: :issue:`2054`) + + +v3.3.0 (2022-04-29) +=================== + +- Core: Fixes invalid verbosity logging levels. (Fixes: :issue:`1947`, + PR: :issue:`2021`) - Core: Fix TypeError exception when playing track with unnamed artists. (Fixes: :issue:`1991`, PR: :issue:`2012`) -- Core: Fixes invalid verbosity logging levels. - (Fixes: :issue:`1947`, PR: :issue:`2021`) +- Core: Fix startup crash when loading invalid extensions. (PR: + :issue:`1990`) -- Audio: Fix TypeError when handling create output pipeline errors. - (Fixes: :issue:`1924`, PR: :issue:`2040`) +- Core: Fix error-handling when fetching backend support info. (PR: + :issue:`1964`) - Core: Align values supported by the ``field`` argument to :meth:`mopidy.core.LibraryController.get_distinct` with Mopidy search query fields, with the exception of 'any'. Deprecated field 'track' with the goal of removing it in the next major release, use 'track_name' instead. Backends should support both `track` and `track_name` until they require - a version of Mopidy where `track` has been removed. + a version of Mopidy where `track` has been removed. (Fixes: :issue:`1900`, PR: :issue:`1899`) - Core: Add ``musicbrainz_albumid``, ``musicbrainz_artistid``, ``musicbrainz_trackid``, and ``disc_no`` to the permitted search query fields. (Fixes: :issue:`1900`, PR: :issue:`1899`) +- Audio: Fix TypeError when handling create output pipeline errors. + (Fixes: :issue:`1924`, PR: :issue:`2040`) + +- Audio: Fix seek when stopped. (Fixes: :issue:`2005`, PR: :issue:`2006`) + +- Config: Fix support for inline comments, a regression introduced during + our Python 3 migration. (Fixes: :issue:`1868`, PR: :issue:`2041`) + +- HTTP: Fix missing CORS headers on RPC response. (Fixes: :issue:`2028`, + PR: :issue:`2029`) + +- HTTP: Improve CSRF protection Content-Type check. (PR: :issue:`1997`) + +- HTTP: Fix support for websocket clients connecting/disconnecting + during broadcast. (PR: :issue:`1993`) + +- Add Python 3.10 to our test matrix. + +- Core: Added and improved configuration parsing code for extension + developers. (PR: :issue:`2010`) + v3.2.0 (2021-07-08) =================== diff --git a/docs/clients.rst b/docs/clients.rst index 3e02268943..d237bf41bf 100644 --- a/docs/clients.rst +++ b/docs/clients.rst @@ -56,11 +56,19 @@ Mopidy extensions: Web-based MPD clients --------------------- -Lastly, there are several web based MPD clients, which doesn't use the +There are several web based MPD clients, which doesn't use the :ref:`ext-http` frontend at all, but connect to Mopidy through the -Mopidy-MPD frontend. For a list of those, see the "Web clients" section of the -`MPD wiki's clients list `_. +Mopidy-MPD frontend. For a list of those, see the "Web clients" +section of the `MPD wiki's clients list +`_. +Standalone applications +----------------------- + +Lastly, there are Mopidy clients implemented as standalone +applications: + +- `Argos `_ .. _mpd-clients: diff --git a/docs/codestyle.rst b/docs/codestyle.rst index 53be99fd79..75327909c2 100644 --- a/docs/codestyle.rst +++ b/docs/codestyle.rst @@ -6,19 +6,13 @@ Code style All projects in the Mopidy organization follows the following code style: -- Automatically format all code with `Black `_. - Use Black's string normalization, which prefers ``"`` quotes over ``'``, - unless the string contains ``"``. +- Automatically format all code with `Ruff `_, + using the default configuration. -- Automatically sort imports using `isort `_. - -- Follow :pep:`8`. - Run `flake8 `_ to check your code - against the guidelines. +- Automatically sort imports using Ruff_, using the default configuration. -The strict adherence to Black and flake8 are enforced by our CI setup. -Pull requests that do not pass these checks will not be merged. +- As far as reasonable and possible, comply with the lint warnings produced by + Ruff_. -For more general advise, -take a look at :pep:`20` for a nice peek into a general mindset -useful for Python coding. +The strict adherence to Ruff are enforced by our CI setup. +Pull requests that do not pass these checks will not be merged. diff --git a/docs/conf.py b/docs/conf.py index f17b3c9aef..9bc97104f8 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,59 +1,12 @@ """Mopidy documentation build configuration file""" +from importlib.metadata import version -import os -import sys - -sys.path.insert(0, os.path.abspath(os.path.dirname(__file__))) -sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + "/../")) - -from mopidy.internal.versioning import get_version # isort:skip # noqa - - -# -- Workarounds to have autodoc generate API docs ---------------------------- - - -class Mock: - def __init__(self, *args, **kwargs): - pass - - def __call__(self, *args, **kwargs): - return Mock() - - def __or__(self, other): - return Mock() - - def __mro_entries__(self, bases): - return tuple() - - @classmethod - def __getattr__(cls, name): - if name == "get_system_config_dirs": # GLib.get_system_config_dirs() - return list - elif name == "get_user_config_dir": # GLib.get_user_config_dir() - return str - else: - return Mock() - - -MOCK_MODULES = [ - "dbus", - "dbus.mainloop", - "dbus.mainloop.glib", - "dbus.service", - "mopidy.internal.gi", -] -for mod_name in MOCK_MODULES: - sys.modules[mod_name] = Mock() - - -# -- Custom Sphinx object types ----------------------------------------------- +# -- Custom Sphinx setup ------------------------------------------------------ def setup(app): - from sphinx.ext.autodoc import cut_lines - - app.connect("autodoc-process-docstring", cut_lines(4, what=["module"])) + # Add custom Sphinx object type for Mopidy's config values app.add_object_type( "confval", "confval", @@ -64,7 +17,7 @@ def setup(app): # -- General configuration ---------------------------------------------------- -needs_sphinx = "1.3" +needs_sphinx = "5.3" extensions = [ "sphinx.ext.autodoc", @@ -72,6 +25,8 @@ def setup(app): "sphinx.ext.graphviz", "sphinx.ext.intersphinx", "sphinx.ext.viewcode", + "sphinx_autodoc_typehints", + "sphinx_rtd_theme", ] templates_path = ["_templates"] @@ -79,10 +34,10 @@ def setup(app): master_doc = "index" project = "Mopidy" -copyright = "2009-2021, Stein Magnus Jodal and contributors" +copyright = "2009-2023, Stein Magnus Jodal and contributors" # noqa: A001 -release = get_version() +release = version("Mopidy") version = ".".join(release.split(".")[:2]) # To make the build reproducible, avoid using today's date in the manpages @@ -128,17 +83,32 @@ def setup(app): ] +# -- Options for autodoc extension -------------------------------------------- + +autodoc_mock_imports = [ + "dbus", + "mopidy.internal.gi", +] + +typehints_document_rtype = True +typehints_use_signature = False +typehints_use_signature_return = True + + # -- Options for extlink extension -------------------------------------------- extlinks = { - "issue": ("https://github.com/mopidy/mopidy/issues/%s", "#"), - "commit": ("https://github.com/mopidy/mopidy/commit/%s", "commit "), - "js": ("https://github.com/mopidy/mopidy.js/issues/%s", "mopidy.js#"), + "issue": ("https://github.com/mopidy/mopidy/issues/%s", "#%s"), + "commit": ("https://github.com/mopidy/mopidy/commit/%s", "commit %s"), + "js": ("https://github.com/mopidy/mopidy.js/issues/%s", "mopidy.js#%s"), "mpris": ( "https://github.com/mopidy/mopidy-mpris/issues/%s", - "mopidy-mpris#", + "mopidy-mpris#%s", + ), + "discuss": ( + "https://discourse.mopidy.com/t/%s", + "discourse.mopidy.com/t/%s", ), - "discuss": ("https://discourse.mopidy.com/t/%s", "discourse.mopidy.com/t/"), } diff --git a/docs/config.rst b/docs/config.rst index 1381761ed8..c96e65aae0 100644 --- a/docs/config.rst +++ b/docs/config.rst @@ -63,7 +63,7 @@ below. This is the default configuration for Mopidy itself: -.. literalinclude:: ../mopidy/config/default.conf +.. literalinclude:: ../src/mopidy/config/default.conf :language: ini diff --git a/docs/contributing.rst b/docs/contributing.rst index 2dc21f1c3a..c0d012b05f 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -73,21 +73,13 @@ Pull request guidelines Making sure your ideas and solutions are aligned with other contributors greatly increases the odds of your pull request being quickly accepted. -#. Create a new branch, based on the ``develop`` branch, for every feature or +#. Create a new branch, based on the ``main`` branch, for every feature or bug fix. Keep branches small and on topic, as that makes them far easier to - review. We often use the following naming convention for branches: - - - Features get the prefix ``feature/``, e.g. - ``feature/track-last-modified-as-ms``. - - - Bug fixes get the prefix ``fix/``, e.g. ``fix/902-consume-track-on-next``. - - - Improvements to the documentation get the prefix ``docs/``, e.g. - ``docs/add-ext-mopidy-spotify-tunigo``. + review. #. Follow the :ref:`code style `, especially make sure the - ``flake8`` linter does not complain about anything. Our CI setup will - check that your pull request is "flake8 clean". See :ref:`code-linting`. + ``ruff`` linter does not complain about anything. Our CI setup will + check that your pull request is "ruff clean". See :ref:`code-linting`. #. Include tests for any new feature or substantial bug fix. See :ref:`running-tests`. @@ -120,13 +112,5 @@ Pull request guidelines - `On commit messages `_ -#. Send a pull request to the ``develop`` branch. See the `GitHub pull request +#. Send a pull request to the ``main`` branch. See the `GitHub pull request docs `_ for help. - -.. note:: - - If you are contributing a bug fix for a specific minor version of Mopidy - you should create the branch based on ``release-x.y`` instead of - ``develop``. When the release is done the changes will be merged back into - ``develop`` automatically as part of the normal release process. See - :ref:`creating-releases`. diff --git a/docs/devenv.rst b/docs/devenv.rst index da2a82710b..2490abf24f 100644 --- a/docs/devenv.rst +++ b/docs/devenv.rst @@ -46,32 +46,27 @@ Mopidy and extensions. Make a virtualenv ----------------- -Make a Python `virtualenv `_ for Mopidy -development. The virtualenv will wall off Mopidy and its dependencies from the -rest of your system. All development and installation of Python dependencies, -versions of Mopidy, and extensions are done inside the virtualenv. This way -your regular Mopidy install, which you set up in the first step, is unaffected -by your hacking and will always be working. +Make a Python virtualenv for Mopidy development. +The virtualenv will wall off Mopidy and its dependencies from the rest of your system. +All development and installation of Python dependencies, +versions of Mopidy, and extensions are done inside the virtualenv. +This way your regular Mopidy install, +which you set up in the first step, +is unaffected by your hacking and will always be working. -Most of us use the `virtualenvwrapper -`_ to ease working with -virtualenvs, so that's what we'll be using for the examples here. First, -install and setup virtualenvwrapper as described in their docs. +To create a virtualenv in the Mopidy workspace directory, run:: -To create a virtualenv named ``mopidy`` which uses Python 3.7, allows access to -system-wide packages like GStreamer, and uses the Mopidy workspace directory as -the "project path", run:: - - mkvirtualenv -a ~/mopidy-dev --python $(which python3.7) \ - --system-site-packages mopidy + python3 -m venv ~/mopidy-dev/.venv Now, each time you open a terminal and want to activate the ``mopidy`` virtualenv, run:: - workon mopidy + . ~/mopidy-dev/.venv/bin/activate -This will both activate the ``mopidy`` virtualenv, and change the current -working directory to ``~/mopidy-dev``. +There are lots of ways to set up your shell to automatically activate the virtualenv, +e.g. when changing directory into ``~/mopidy-dev/`` or a subdirectory. +As this is just convenience and not strictly required, +it is left as an exercise for the reader. Clone the repo from GitHub @@ -80,13 +75,14 @@ Clone the repo from GitHub Once inside the virtualenv, it's time to clone the ``mopidy/mopidy`` Git repo from GitHub:: + cd ~/mopidy-dev/ git clone https://github.com/mopidy/mopidy.git When you've cloned the ``mopidy`` Git repo, ``cd`` into it:: cd ~/mopidy-dev/mopidy/ -With a fresh clone of the Git repo, you should start out on the ``develop`` +With a fresh clone of the Git repo, you should start out on the ``main`` branch. This is where all features for the next feature release land. To confirm that you're on the right branch, run:: @@ -107,54 +103,40 @@ Git repo in an "editable" form:: pip install --upgrade --editable . -This will not copy the source code into the virtualenv's ``site-packages`` -directory, but instead create a link there pointing to the Git repo. Using -``cdsitepackages`` from virtualenvwrapper, we can quickly show that the -installed :file:`Mopidy.egg-link` file points back to the Git repo:: - - $ cdsitepackages - $ cat Mopidy.egg-link - /home/user/mopidy-dev/mopidy - .% - $ +When using the ``--editable`` flag, the source code is not copied into the +virtualenv's ``site-packages`` directory, but instead creates a link there +pointing to the Git repo. This way, you can change the source code in the Git +repo and the changes will be visible inside the virtualenv without having to +reinstall Mopidy. It will also create a ``mopidy`` executable inside the virtualenv that will -always run the latest code from the Git repo. Using another -virtualenvwrapper command, ``cdvirtualenv``, we can show that too:: +always run the latest code from the Git repo:: - $ cdvirtualenv - $ cat bin/mopidy + $ cat ~/mopidy-dev/.venv/bin/mopidy ... -The executable should contain something like this, using :mod:`pkg_resources` -to look up Mopidy's "console script" entry point:: - - #!/home/user/virtualenvs/mopidy/bin/python2 - # EASY-INSTALL-ENTRY-SCRIPT: 'Mopidy==0.19.5','console_scripts','mopidy' - __requires__ = 'Mopidy==0.19.5' - import sys - from pkg_resources import load_entry_point +This file is on the path when the virtualenv is active, so you can run it from +anywhere, simply by running:: - if __name__ == '__main__': - sys.exit( - load_entry_point('Mopidy==0.19.5', 'console_scripts', 'mopidy')() - ) + mopidy .. note:: - It still works to run ``python mopidy`` directly on the - :file:`~/mopidy-dev/mopidy/mopidy/` Python package directory, but if - you don't run the ``pip install`` command above, the extensions bundled - with Mopidy will not be registered with :mod:`pkg_resources`, making Mopidy - quite useless. - -Third, the ``pip install`` command will register the bundled Mopidy -extensions so that Mopidy may find them through :mod:`pkg_resources`. The -result of this can be seen in the Git repo, in a new directory called -:file:`Mopidy.egg-info`, which is ignored by Git. The -:file:`Mopidy.egg-info/entry_points.txt` file is of special interest as it -shows both how the above executable and the bundled extensions are connected to -the Mopidy source code: + It is also possible to run Python apps directly, + e.g. using ``python3 src/mopidy`` directly on the + :file:`~/mopidy-dev/mopidy/src/mopidy/` Python package directory. + However, if you don't run the install command above, + the extensions bundled with Mopidy will not be registered and made available + for use, making Mopidy quite useless. + +Third, the install command will register the bundled Mopidy +extensions so that Mopidy may find them through :mod:`importlib`. +The result of this can be seen in a file named :file:`entry_points.txt` +which can be found inside the virtualenv dir, e.g. +:file:`~/mopidy-dev/.venv/lib/python3.11/site-packages/Mopidy-4.0.0.dist-info/entry_points.txt`. +The :file:`entry_points.txt` file is of special interest as it shows both how +the above executable and the bundled extensions are connected to the Mopidy +source code: .. code-block:: ini @@ -162,28 +144,12 @@ the Mopidy source code: mopidy = mopidy.__main__:main [mopidy.ext] + file = mopidy.file:Extension http = mopidy.http:Extension + m3u = mopidy.m3u:Extension softwaremixer = mopidy.softwaremixer:Extension stream = mopidy.stream:Extension -.. warning:: - - It's not uncommon to clean up in the Git repo now and then, e.g. by running - ``git clean``. - - If you do this, then the :file:`Mopidy.egg-info` directory will be removed, - and :mod:`pkg_resources` will no longer know how to locate the "console - script" entry point or the bundled Mopidy extensions. - - The fix is simply to run the install command again:: - - pip install --editable . - -Finally, we can go back to the workspace, again using a virtualenvwrapper -tool:: - - cdproject - Install development tools ------------------------- @@ -191,12 +157,13 @@ Install development tools Before continuing, you will probably want to install the development tools we use as well. These can be installed into the active virtualenv by running:: + cd ~/mopidy-dev/mopidy/ pip install --upgrade --editable ".[dev]" Note that this is the same command as you used to install Mopidy from the Git repo, with the addition of the ``[dev]`` suffix after ``.``. This makes pip install the "dev" set of extra dependencies. Exactly what the "dev" set -includes are defined in ``setup.cfg``. +includes are defined in :file:`pyproject.toml`. To upgrade the development tools in the future, just rerun the exact same command. @@ -252,20 +219,27 @@ is all you need to know. Always run this command before pushing your changes to GitHub. If you take a look at the tox config file, :file:`tox.ini`, you'll see that tox -runs tests in multiple environments, including a ``flake8`` environment that +runs tests in multiple environments, including a ``ruff`` environment that lints the source code for issues and a ``docs`` environment that tests that the documentation can be built. You can also limit tox to just test specific -environments using the ``-e`` option, e.g. to run just unit tests:: +environments using the ``-e`` option, e.g. to run just unit tests on Python 3.11:: - tox -e py37 + tox -e py311 To learn more, see the `tox documentation `_ . +Before submitting a pull request, we recommend running:: + + tox -e ci + +This will locally run similar tests to what we use in our CI runs and help us to +merge high-quality contributions. + Running unit tests ------------------ -Under the hood, ``tox -e py37`` will use `pytest `_ +Under the hood, ``tox -e py311`` will use `pytest `_ as the test runner. We can also use it directly to run all tests:: pytest @@ -278,12 +252,12 @@ We can limit to just tests in a single directory to save time:: pytest tests/http/ -With the help of the pytest-xdist plugin, we can run tests with four Python +With the help of the ``pytest-xdist`` plugin, we can run tests with four Python processes in parallel, which usually cuts the test time in half or more:: pytest -n 4 -Another useful feature from pytest-xdist, is the possibility to stop on the +Another useful feature from ``pytest-xdist``, is the possibility to stop on the first test failure, watch the file system for changes, and then rerun the tests. This makes for a very quick code-test cycle:: @@ -331,26 +305,26 @@ Style checking and linting We're quite pedantic about :ref:`codestyle` and try hard to keep the Mopidy code base a very clean and nice place to work in. -Luckily, you can get very far by using the `flake8 -`_ linter to check your code for issues before -submitting a pull request. Mopidy passes all of flake8's checks, with only a -very few exceptions configured in :file:`setup.cfg`. You can either run the -``flake8`` tox environment, like our CI setup will do on your pull request:: +Luckily, you can get very far by using the `ruff +`_ linter to check your code for issues before +submitting a pull request. Mopidy's ruff rules are configured in :file:`pyproject.toml`. +You can either run the ``ruff`` tox environment, like our CI setup will do on +your pull request:: - tox -e flake8 + tox -e ruff -Or you can run flake8 directly:: +Or you can run ruff directly:: - flake8 + ruff . If successful, the command will not print anything at all. .. note:: - In some rare cases it doesn't make sense to listen to flake8's warnings. In + In some rare cases it doesn't make sense to listen to ruff's warnings. In those cases, ignore the check by appending ``# noqa: `` to the source line that triggers the warning. The ``# noqa`` part will make - flake8 skip all checks on the line, while the warning code will help other + ruff skip all checks on the line, while the warning code will help other developers lookup what you are ignoring. @@ -400,8 +374,8 @@ Working on extensions Much of the above also applies to Mopidy extensions, though they're often a bit simpler. They don't have documentation sites and their test suites are either -small and fast, or sadly missing entirely. Most of them use tox and flake8, and -pytest can be used to run their test suites. +small and fast, or sadly missing entirely. Most of them use tox to run various +linters, and pytest can be used to run their test suites. .. contents:: :local: @@ -413,7 +387,7 @@ Installing extensions As always, the ``mopidy`` virtualenv should be active when working on extensions:: - workon mopidy + . ~/mopidy-dev/.venv/bin/activate Just like with non-development Mopidy installations, you can install extensions using pip:: @@ -423,7 +397,7 @@ using pip:: Installing an extension from its Git repo works the same way as with Mopidy itself. First, go to the Mopidy workspace:: - cdproject # or cd ~/mopidy-dev/ + cd ~/mopidy-dev/ Clone the desired Mopidy extension:: @@ -431,17 +405,17 @@ Clone the desired Mopidy extension:: Change to the newly created extension directory:: - cd mopidy-spotify/ + cd ~/mopidy-dev/mopidy-spotify/ Then, install the extension in "editable" mode, so that it can be imported from anywhere inside the virtualenv and the extension is registered and discoverable -through :mod:`pkg_resources`:: +through :mod:`importlib`:: pip install --editable . Every extension will have a ``README.rst`` file. It may contain information about extra dependencies required, development process, etc. Extensions usually -have a changelog in the readme file. +have a changelog in their GitHub relases page. Upgrading extensions @@ -451,10 +425,7 @@ Extensions often have a much quicker life cycle than Mopidy itself, often with daily releases in periods of active development. To find outdated extensions in your virtualenv, you can run:: - pip search mopidy - -This will list all available Mopidy extensions and compare the installed -versions with the latest available ones. + pip list --outdated To upgrade an extension installed with pip, simply use pip:: @@ -530,12 +501,12 @@ Creating a branch Fetch the latest data from all remotes without affecting your working directory:: - git remote update + git remote update --prune Now, we are ready to create and checkout a new branch off of the upstream -``develop`` branch for our work:: +``main`` branch for our work:: - git checkout -b fix/666-crash-on-foo upstream/develop + git checkout -b fix-crash-on-foo upstream/main Do the work, while remembering to adhere to code style, test the changes, make necessary updates to the documentation, and making small commits with good @@ -548,7 +519,7 @@ Creating a pull request When everything is done and committed, push the branch to your fork on GitHub:: - git push myuser fix/666-crash-on-foo + git push myuser fix-crash-on-foo Go to the repository on GitHub where you want the change merged, in this case https://github.com/mopidy/mopidy, and `create a pull request @@ -558,27 +529,18 @@ https://github.com/mopidy/mopidy, and `create a pull request Updating a pull request ----------------------- -When the pull request is created, our CI setup will run all tests on it. If -something fails, you'll get notified by email. You might as well just fix the -issues right away, as we won't merge a pull request without all CI builds -being green. See :ref:`running-tests` on how to run the same tests locally as +When the pull request is created, our CI setup will run all tests on it. +If something fails, you'll usually get a notification from GitHub. +You might as well just fix the issues right away, +as we won't merge a pull request without all CI builds being green. +See :ref:`running-tests` on how to run the same tests locally as our CI setup runs on your pull request. When you've fixed the issues, you can update the pull request simply by pushing more commits to the same branch in your fork:: - git push myuser fix/666-crash-on-foo + git push myuser fix-crash-on-foo Likewise, when you get review comments from other developers on your pull request, you're expected to create additional commits which addresses the comments. Push them to your branch so that the pull request is updated. - -.. note:: - - Setup the remote as the default push target for your branch:: - - git branch --set-upstream-to myuser/fix/666-crash-on-foo - - Then you can push more commits without specifying the remote:: - - git push diff --git a/docs/ext/file.rst b/docs/ext/file.rst index 7a540464c2..0281f9729b 100644 --- a/docs/ext/file.rst +++ b/docs/ext/file.rst @@ -5,9 +5,10 @@ Mopidy-File ************ Mopidy-File is an extension for playing music from your local music archive. -It is bundled with Mopidy and enabled by default. +It is bundled with Mopidy and enabled by default. It allows you to browse through your local file system. Only files that are considered playable will be shown. +For large music collections and search functionality consider `Mopidy-Local `_ instead. This backend handles URIs starting with ``file:``. @@ -19,7 +20,7 @@ Configuration See :ref:`config` for general help on configuring Mopidy. -.. literalinclude:: ../../mopidy/file/ext.conf +.. literalinclude:: ../../src/mopidy/file/ext.conf :language: ini .. confval:: file/enabled diff --git a/docs/ext/http.rst b/docs/ext/http.rst index d79c575c49..27f1fd8472 100644 --- a/docs/ext/http.rst +++ b/docs/ext/http.rst @@ -47,7 +47,7 @@ Configuration See :ref:`config` for general help on configuring Mopidy. -.. literalinclude:: ../../mopidy/http/ext.conf +.. literalinclude:: ../../src/mopidy/http/ext.conf :language: ini .. confval:: http/enabled diff --git a/docs/ext/m3u.rst b/docs/ext/m3u.rst index 591e203e84..e68a8a0a17 100644 --- a/docs/ext/m3u.rst +++ b/docs/ext/m3u.rst @@ -33,7 +33,7 @@ Configuration See :ref:`config` for general help on configuring Mopidy. -.. literalinclude:: ../../mopidy/m3u/ext.conf +.. literalinclude:: ../../src/mopidy/m3u/ext.conf :language: ini .. confval:: m3u/enabled diff --git a/docs/ext/softwaremixer.rst b/docs/ext/softwaremixer.rst index 4297870b9b..dae50fd0cc 100644 --- a/docs/ext/softwaremixer.rst +++ b/docs/ext/softwaremixer.rst @@ -25,7 +25,7 @@ used. See :ref:`config` for general help on configuring Mopidy. -.. literalinclude:: ../../mopidy/softwaremixer/ext.conf +.. literalinclude:: ../../src/mopidy/softwaremixer/ext.conf :language: ini .. confval:: softwaremixer/enabled diff --git a/docs/ext/stream.rst b/docs/ext/stream.rst index 088e25310d..4dff90d7e3 100644 --- a/docs/ext/stream.rst +++ b/docs/ext/stream.rst @@ -27,7 +27,7 @@ Configuration See :ref:`config` for general help on configuring Mopidy. -.. literalinclude:: ../../mopidy/stream/ext.conf +.. literalinclude:: ../../src/mopidy/stream/ext.conf :language: ini .. confval:: stream/enabled diff --git a/docs/extensiondev.rst b/docs/extensiondev.rst index 1637b948a6..7855d33239 100644 --- a/docs/extensiondev.rst +++ b/docs/extensiondev.rst @@ -495,7 +495,7 @@ an example of how to use it:: ... f'{mopidy_soundspot.Extension.dist_name}/' ... f'{mopidy_soundspot.__version__}' ... ) - 'Mopidy-SoundSpot/2.0.0 Mopidy/3.0.0 Python/3.7.5' + 'Mopidy-SoundSpot/2.0.0 Mopidy/3.0.0 Python/3.9.2' Example using Requests sessions ------------------------------- @@ -779,5 +779,5 @@ the events fire:: For further details and examples, refer to the -`/tests `_ -directory on the Mopidy development branch. +`/tests `_ +directory in the Mopidy repo. diff --git a/docs/history/changelog-0.x.rst b/docs/history/changelog-0.x.rst index de3cf00a4f..435cffa0ac 100644 --- a/docs/history/changelog-0.x.rst +++ b/docs/history/changelog-0.x.rst @@ -573,7 +573,7 @@ guys. Thanks to everyone that has contributed! **Internal changes** - Events from the audio actor, backends, and core actor are now emitted - asyncronously through the GObject event loop. This should resolve the issue + asynchronously through the GObject event loop. This should resolve the issue that has blocked the merge of the EOT-vs-EOS fix for a long time. @@ -1074,7 +1074,7 @@ one new. - Developers running Mopidy from a Git clone now need to run ``python setup.py develop`` to register the bundled extensions. If you don't do this, Mopidy - will not find any frontends or backends. Note that we highly recomend you do + will not find any frontends or backends. Note that we highly recommend you do this in a virtualenv, not system wide. As a bonus, the command also gives you a ``mopidy`` executable in your search path. @@ -1538,7 +1538,7 @@ backends and the future HTTP frontend. dependencies where looked up in Pykka's actor registry. - All properties in the core API now got getters, and setters if setting them - is allowed. They are not explictly listed in the docs as they have the same + is allowed. They are not explicitly listed in the docs as they have the same behavior as the documented properties, but they are available and may be used. This is useful for the future HTTP frontend. @@ -1744,7 +1744,7 @@ resolved a bunch of related issues. - :issue:`150`: Fix bug which caused some clients to block Mopidy completely. The bug was caused by some clients sending ``close`` and then shutting down - the connection right away. This trigged a situation in which the connection + the connection right away. This triggered a situation in which the connection cleanup code would wait for an response that would never come inside the event loop, blocking everything else. @@ -2041,7 +2041,7 @@ minor bugs. The GStreamer `appsrc` bin wasn't being linked due to lack of default caps. (Fixes: :issue:`85`) -- Fix crash in :mod:`mopidy.mixers.nad` that occures at startup when the +- Fix crash in :mod:`mopidy.mixers.nad` that occurs at startup when the :mod:`io` module is available. We used an `eol` keyword argument which is supported by :meth:`serial.FileLike.readline`, but not by :meth:`io.RawBaseIO.readline`. When the :mod:`io` module is available, it is @@ -2266,7 +2266,7 @@ to this problem. - Settings: - - Automatically expand the "~" characted to the user's home directory and + - Automatically expand the "~" character to the user's home directory and make the path absolute for settings with names ending in ``_PATH`` or ``_FILE``. @@ -2545,7 +2545,7 @@ multiple times in a playlist, and support for IPv6. We have also fixed the choppy playback on the libspotify backend. For the road ahead of us, we got an updated release roadmap with our goals for the 0.1 to 0.3 releases. -Enjoy the best alpha relase of Mopidy ever :-) +Enjoy the best alpha release of Mopidy ever :-) **Changes** @@ -2677,7 +2677,7 @@ v0.1.0a0 (2010-03-27) Raymond in *The Cathedral and the Bazaar*. Three months of development should be more than enough. We have more to do, but -Mopidy is working and usable. 0.1.0a0 is an alpha release, which basicly means +Mopidy is working and usable. 0.1.0a0 is an alpha release, which basically means we will still change APIs, add features, etc. before the final 0.1.0 release. But the software is usable as is, so we release it. Please give it a try and give us feedback, either at our IRC channel or through the `issue tracker diff --git a/docs/index.rst b/docs/index.rst index 1115fc4883..9813673d06 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -33,7 +33,7 @@ systems, you can control the music from any phone, tablet, or computer. **Mopidy on Raspberry Pi** -The `Raspberry Pi`_ is an popular device to run Mopidy on, either using +The `Raspberry Pi`_ is a popular device to run Mopidy on, either using Raspbian, Ubuntu, or Arch Linux. Pimoroni recommends Mopidy for use with their `Pirate Audio`_ audio gear for Raspberry Pi. @@ -45,7 +45,7 @@ audio jukebox system for Raspberry Pi. Mopidy's extension support and Python, JSON-RPC, and JavaScript APIs make Mopidy a perfect base for your projects. In one hack, a Raspberry Pi was embedded in an old cassette player. The buttons -and volume control are wired up with GPIO on the Raspberry Pi, and is used to +and volume control are wired up with GPIO on the Raspberry Pi, and are used to control playback through a custom Mopidy extension. The cassettes have NFC tags used to select playlists from Spotify. diff --git a/docs/installation/debian.rst b/docs/installation/debian.rst index 4e46ecd923..0373d6010d 100644 --- a/docs/installation/debian.rst +++ b/docs/installation/debian.rst @@ -10,7 +10,7 @@ easiest way to install Mopidy is from the When installing from the APT archive, you will automatically get updates to Mopidy in the same way as you get updates to the rest of your system. -If you're on a Raspberry Pi running Debian or Raspbian, the following +If you're on a Raspberry Pi running Debian or Raspberry Pi OS, the following instructions will work for you as well. If you're setting up a Raspberry Pi from scratch, we have a guide for installing Debian/Raspbian and Mopidy. See :ref:`raspberrypi-installation`. @@ -21,8 +21,8 @@ Distribution and architecture support The packages in the apt.mopidy.com archive are built for: -- **Debian 10 (Buster)**, - which also works for Raspbian Buster and Ubuntu 19.10 and newer. +- **Debian 12 (Bookworm)**, + which also works for Ubuntu 23.10 and Raspberry Pi OS 2023-10-10 or newer. The few packages that are compiled are available for multiple CPU architectures: @@ -42,11 +42,13 @@ Install from apt.mopidy.com #. Add the archive's GPG key:: - wget -q -O - https://apt.mopidy.com/mopidy.gpg | sudo apt-key add - + sudo mkdir -p /etc/apt/keyrings + sudo wget -q -O /etc/apt/keyrings/mopidy-archive-keyring.gpg \ + https://apt.mopidy.com/mopidy.gpg #. Add the APT repo to your package sources:: - sudo wget -q -O /etc/apt/sources.list.d/mopidy.list https://apt.mopidy.com/buster.list + sudo wget -q -O /etc/apt/sources.list.d/mopidy.list https://apt.mopidy.com/bookworm.list #. Install Mopidy and all dependencies:: @@ -81,7 +83,11 @@ To install one of the listed packages, e.g. ``mopidy-mpd``, simply run:: sudo apt install mopidy-mpd If you cannot find the extension you want in the APT search result, you can -install it from PyPI using ``pip`` instead. +install it from PyPI using ``pip`` instead. You need to make sure you have +``pip``, the Python package installer installed:: + + sudo apt install python3-pip + Even if Mopidy itself is installed from APT it will correctly detect and use extensions from PyPI installed globally on your system using:: diff --git a/docs/installation/pypi.rst b/docs/installation/pypi.rst index 34804a9153..99970c1497 100644 --- a/docs/installation/pypi.rst +++ b/docs/installation/pypi.rst @@ -12,14 +12,14 @@ you can install Mopidy from PyPI using the ``pip`` installer. If you are looking to contribute or wish to install from source using ``git`` please see :ref:`contributing`. -#. First of all, you need Python 3.7 or newer. Check if you have Python and +#. First of all, you need Python 3.11 or newer. Check if you have Python and what version by running:: python3 --version #. You need to make sure you have ``pip``, the Python package installer. You'll also need a C compiler and the Python development headers to install some - Mopidy extensions, like Mopidy-Spotify. + Mopidy extensions. This is how you install it on Debian/Ubuntu:: @@ -33,22 +33,26 @@ please see :ref:`contributing`. sudo dnf install -y gcc python3-devel python3-pip -#. Then you'll need to install GStreamer >= 1.14.0, with Python bindings. - GStreamer is packaged for most popular Linux distributions. Search for - GStreamer in your package manager, and make sure to install the Python - bindings, and the "good" and "ugly" plugin sets. +#. Then you'll need to install GStreamer >= 1.22.0. + GStreamer is packaged for most popular Linux distributions. + Search for GStreamer in your package manager and make sure to install the + "good" and "ugly" plugin sets, as well as the Python bindings. + To be able to build the Python bindings from source, + also install the development headers for ``libcairo2`` and ``libgirepository1.0``. **Debian/Ubuntu** If you use Debian/Ubuntu you can install GStreamer like this:: sudo apt install \ - python3-gst-1.0 \ - gir1.2-gstreamer-1.0 \ gir1.2-gst-plugins-base-1.0 \ + gir1.2-gstreamer-1.0 \ gstreamer1.0-plugins-good \ gstreamer1.0-plugins-ugly \ - gstreamer1.0-tools + gstreamer1.0-tools \ + libcairo2-dev \ + libgirepository1.0-dev \ + python3-gst-1.0 **Arch Linux** @@ -56,6 +60,8 @@ please see :ref:`contributing`. repository:: sudo pacman -S \ + cairo \ + gobject-introspection \ gst-python \ gst-plugins-good \ gst-plugins-ugly @@ -65,6 +71,8 @@ please see :ref:`contributing`. If you use Fedora you can install GStreamer like this:: sudo dnf install -y \ + cairo-devel \ + gobject-introspection-devel \ python3-gstreamer1 \ gstreamer1-plugins-good \ gstreamer1-plugins-ugly-free @@ -73,7 +81,11 @@ please see :ref:`contributing`. If you use Gentoo you can install GStreamer like this:: - emerge -av gst-python gst-plugins-meta + emerge -av \ + dev-libs/gobject-introspection \ + dev-python/gst-python \ + media-plugins/gst-plugins-meta \ + x11-libs/cairo ``gst-plugins-meta`` is the one that actually pulls in the plugins you want, so pay attention to the USE flags, e.g. ``alsa``, ``mp3``, etc. @@ -83,18 +95,27 @@ please see :ref:`contributing`. If you use macOS, you can install GStreamer from Homebrew:: brew install \ + cairo \ + gobject-introspection \ gst-python \ gst-plugins-base \ gst-plugins-good \ gst-plugins-ugly -#. Install the latest release of Mopidy:: +#. You are now ready to install the latest release of Mopidy. + + If you're installing Mopidy inside a Python virtual environment, + activate the virtualenv and run:: + + python3 -m pip install --upgrade mopidy + + If you want to install Mopidy globally on your system, you can run:: - sudo python3 -m pip install --upgrade mopidy + sudo python3 -m pip install --upgrade --break-system-packages mopidy This will use ``pip`` to install the latest release of `Mopidy from PyPI - `_. To upgrade Mopidy to future - releases, just rerun this command. + `_. + To upgrade Mopidy in the future, just rerun the same command. #. Now, you're ready to :ref:`run Mopidy `. @@ -105,14 +126,16 @@ Installing extensions If you want to use any Mopidy extensions, like Spotify support or Last.fm scrobbling, you need to install additional Mopidy extensions. -You can install any Mopidy extension directly from PyPI with ``pip``. To list -all the extensions available from PyPI, run:: +You can install any Mopidy extension directly from PyPI with ``pip``. +Search the PyPI website to find available extensions. +To install one of the listed packages, e.g. ``Mopidy-MPD``, +inside a virtualenv, simply run:: - python3 -m pip search mopidy + python3 -m pip install Mopidy-MPD -To install one of the listed packages, e.g. ``Mopidy-MPD``, simply run:: +To install the same package globally on your system, run:: - sudo python3 -m pip install Mopidy-MPD + sudo python3 -m pip install --break-system-packages Mopidy-MPD Note that extensions installed with ``pip`` will only install Python dependencies. Please refer to the extension's documentation for information diff --git a/docs/installation/raspberrypi.rst b/docs/installation/raspberrypi.rst index 8c7dc118c8..948793b35b 100644 --- a/docs/installation/raspberrypi.rst +++ b/docs/installation/raspberrypi.rst @@ -9,6 +9,10 @@ However, note that the later models are significantly more powerful than the Raspberry Pi 1 and Raspberry Pi Zero; Mopidy will run noticably faster on the later models. +.. warning:: Update needed + + This page is outdated and needs to be updated for Raspberry Pi OS released 2023-10-10. + How to for Raspbian =================== diff --git a/docs/releasing.rst b/docs/releasing.rst index e99cfc46d5..91852b6ff3 100644 --- a/docs/releasing.rst +++ b/docs/releasing.rst @@ -15,7 +15,7 @@ Releasing extensions Extensions that are maintained in the Mopidy organization use a quite stream-lined release procedure. -#. Make sure that everything has been merged into the ``master`` branch on +#. Make sure that everything has been merged into the ``main`` branch on GitHub, and that all CI checks are green. #. Perform any manual tests you feel are required. @@ -27,15 +27,15 @@ stream-lined release procedure. git commit -m "Release v2.0.2" -#. Tag the commit:: +#. Tag the commit with an annotated tag:: - git tag -m "Release v2.0.2" v2.0.2 + git tag -a -m "Release v2.0.2" v2.0.2 It is encouraged to use ``-s`` to sign the tag if you have a GnuPG setup. #. Push to GitHub:: - git push origin master --follow-tags + git push origin main --follow-tags #. Go to the GitHub repository's tags page, e.g. ``https://github.com/mopidy/mopidy-foo/tags``. Find the tag and select @@ -70,7 +70,7 @@ To setup this on your own repo, you must: #. Copy ``.github/workflows/release.yml`` from the Mopidy `cookiecutter project - `_. + `_. #. Create an API token in your account settings at PyPI with scope to access the extension's PyPI package. @@ -86,51 +86,53 @@ Releasing Mopidy itself ======================= Mopidy itself is a bit more complicated than extensions because the changelog -is maintained in the Git repo, and because we maintain multiple branches to be -able to work towards the next bugfix release and the next feature release at -the same time. +is maintained in the Git repo. Preparations ------------ -#. Update the changelog. Commit and push it. - -#. Make sure that everything has been merged into the ``develop`` branch on +#. Make sure that everything has been merged into the ``main`` branch on GitHub, and that all CI checks are green. +#. Make sure the changelog in the ``docs/changelog.rst`` file includes all + significant changes since the last release. Commit and push it. + #. Perform any manual tests you feel are required. Release ------- -#. Bump the version in ``setup.cfg`` in line with :ref:`our strategy `. - For example, to ``3.3.0``, and set the release date in the changelog. +#. Select a version number in line with :ref:`our strategy `, + e.g. ``v3.3.0`` in the following examples. -#. Commit the bumped version and release date:: +#. Update the release in ``docs/changelog.rst`` with the right version number + and release date. - git commit -m "Prepare release of v3.3.0" +#. Commit the final touches to the changelog:: -#. Merge the release branch (``develop`` in the example) into ``master``:: + git commit -m "Release v3.3.0" - git checkout master - git pull - git merge --no-ff -m "Release v3.3.0" develop +#. Tag the commit with an annotated tag:: -#. Tag the commit:: - - git tag -m "Release v3.3.0" v3.3.0 + git tag -a -m "Release v3.3.0" v3.3.0 It is encouraged to use ``-s`` to sign the tag if you have a GnuPG setup. +#. Verify that Mopidy reports the new version number:: + + mopidy --version + + If it doesn't, check that you've properly tagged the release. + #. Push to GitHub:: - git push origin master --follow-tags + git push origin main --follow-tags -#. Go to the GitHub repository's tags page at - ``https://github.com/mopidy/mopidy/tags``. Find the tag and select - "Create release" in the tag's dropdown menu. +#. Go to the GitHub repository's + `tags page `_. + Find the tag and select "Create release" in the tag's dropdown menu. #. Copy the tag, e.g. ``v3.3.0`` into the "title" field. Write a changelog entry in the description field, and hit "Publish release". @@ -141,16 +143,9 @@ Release Post-release ------------ -#. To prepare for further development, merge the ``master`` branch back into - the ``develop`` branch and push it to GitHub:: - - git checkout develop - git merge master - git push origin develop - #. Make sure the new tag is built by `Read the Docs `_, - and that the `"latest" version `_ + and that the `"stable" version `_ shows the newly released version. #. Spread the word through an announcement post on the `Discourse forum @@ -158,6 +153,6 @@ Post-release #. Notify distribution packagers, including but not limited to: - - `Arch Linux `_ + - `Arch Linux `_ - `Debian `_ - `Homebrew `_ diff --git a/docs/sponsors.rst b/docs/sponsors.rst index dc302789ca..a5b31ee2a1 100644 --- a/docs/sponsors.rst +++ b/docs/sponsors.rst @@ -13,3 +13,10 @@ Discourse `Discourse `_ sponsors Mopidy with free hosting of our discussion forum at https://discourse.mopidy.com. + + +Zulip +===== + +`Zulip `_ sponsors Mopidy with free hosting of our chat app +at https://mopidy.zulipchat.com. diff --git a/mopidy/audio/__init__.py b/mopidy/audio/__init__.py deleted file mode 100644 index 8858a0bb7b..0000000000 --- a/mopidy/audio/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# flake8: noqa -from .actor import Audio -from .constants import PlaybackState -from .listener import AudioListener -from .utils import ( - calculate_duration, - create_buffer, - millisecond_to_clocktime, - supported_uri_schemes, -) diff --git a/mopidy/config/types.py b/mopidy/config/types.py deleted file mode 100644 index ced3a0a524..0000000000 --- a/mopidy/config/types.py +++ /dev/null @@ -1,323 +0,0 @@ -import logging -import re -import socket - -from mopidy.config import validators -from mopidy.internal import log, path - - -def decode(value): - if isinstance(value, bytes): - value = value.decode(errors="surrogateescape") - - for char in ("\\", "\n", "\t"): - value = value.replace( - char.encode(encoding="unicode-escape").decode(), char - ) - - return value - - -def encode(value): - if isinstance(value, bytes): - value = value.decode(errors="surrogateescape") - - for char in ("\\", "\n", "\t"): - value = value.replace( - char, char.encode(encoding="unicode-escape").decode() - ) - - return value - - -class DeprecatedValue: - pass - - -class ConfigValue: - """Represents a config key's value and how to handle it. - - Normally you will only be interacting with sub-classes for config values - that encode either deserialization behavior and/or validation. - - Each config value should be used for the following actions: - - 1. Deserializing from a raw string and validating, raising ValueError on - failure. - 2. Serializing a value back to a string that can be stored in a config. - 3. Formatting a value to a printable form (useful for masking secrets). - - :class:`None` values should not be deserialized, serialized or formatted, - the code interacting with the config should simply skip None config values. - """ - - def deserialize(self, value): - """Cast raw string to appropriate type.""" - return decode(value) - - def serialize(self, value, display=False): - """Convert value back to string for saving.""" - if value is None: - return "" - return str(value) - - -class Deprecated(ConfigValue): - """Deprecated value. - - Used for ignoring old config values that are no longer in use, but should - not cause the config parser to crash. - """ - - def deserialize(self, value): - return DeprecatedValue() - - def serialize(self, value, display=False): - return DeprecatedValue() - - -class String(ConfigValue): - """String value. - - Is decoded as utf-8 and \\n \\t escapes should work and be preserved. - """ - - def __init__(self, optional=False, choices=None): - self._required = not optional - self._choices = choices - - def deserialize(self, value): - value = decode(value).strip() - validators.validate_required(value, self._required) - if not value: - return None - validators.validate_choice(value, self._choices) - return value - - def serialize(self, value, display=False): - if value is None: - return "" - return encode(value) - - -class Secret(String): - """Secret string value. - - Is decoded as utf-8 and \\n \\t escapes should work and be preserved. - - Should be used for passwords, auth tokens etc. Will mask value when being - displayed. - """ - - def __init__(self, optional=False, choices=None): - self._required = not optional - self._choices = None # Choices doesn't make sense for secrets - - def serialize(self, value, display=False): - if value is not None and display: - return "********" - return super().serialize(value, display) - - -class Integer(ConfigValue): - """Integer value.""" - - def __init__( - self, minimum=None, maximum=None, choices=None, optional=False - ): - self._required = not optional - self._minimum = minimum - self._maximum = maximum - self._choices = choices - - def deserialize(self, value): - value = decode(value) - validators.validate_required(value, self._required) - if not value: - return None - value = int(value) - validators.validate_choice(value, self._choices) - validators.validate_minimum(value, self._minimum) - validators.validate_maximum(value, self._maximum) - return value - - -class Boolean(ConfigValue): - """Boolean value. - - Accepts ``1``, ``yes``, ``true``, and ``on`` with any casing as - :class:`True`. - - Accepts ``0``, ``no``, ``false``, and ``off`` with any casing as - :class:`False`. - """ - - true_values = ("1", "yes", "true", "on") - false_values = ("0", "no", "false", "off") - - def __init__(self, optional=False): - self._required = not optional - - def deserialize(self, value): - value = decode(value) - validators.validate_required(value, self._required) - if not value: - return None - if value.lower() in self.true_values: - return True - elif value.lower() in self.false_values: - return False - raise ValueError(f"invalid value for boolean: {value!r}") - - def serialize(self, value, display=False): - if value is True: - return "true" - elif value in (False, None): - return "false" - else: - raise ValueError(f"{value!r} is not a boolean") - - -class List(ConfigValue): - """List value. - - Supports elements split by commas or newlines. Newlines take presedence and - empty list items will be filtered out. - """ - - def __init__(self, optional=False): - self._required = not optional - - def deserialize(self, value): - value = decode(value) - if "\n" in value: - values = re.split(r"\s*\n\s*", value) - else: - values = re.split(r"\s*,\s*", value) - values = tuple(v.strip() for v in values if v.strip()) - validators.validate_required(values, self._required) - return tuple(values) - - def serialize(self, value, display=False): - if not value: - return "" - return "\n " + "\n ".join(encode(v) for v in value if v) - - -class LogColor(ConfigValue): - def deserialize(self, value): - value = decode(value) - validators.validate_choice(value.lower(), log.COLORS) - return value.lower() - - def serialize(self, value, display=False): - if value.lower() in log.COLORS: - return encode(value.lower()) - return "" - - -class LogLevel(ConfigValue): - """Log level value. - - Expects one of ``critical``, ``error``, ``warning``, ``info``, ``debug``, - ``trace``, or ``all``, with any casing. - """ - - levels = { - "critical": logging.CRITICAL, - "error": logging.ERROR, - "warning": logging.WARNING, - "info": logging.INFO, - "debug": logging.DEBUG, - "trace": log.TRACE_LOG_LEVEL, - "all": logging.NOTSET, - } - - def deserialize(self, value): - value = decode(value) - validators.validate_choice(value.lower(), self.levels.keys()) - return self.levels.get(value.lower()) - - def serialize(self, value, display=False): - lookup = {v: k for k, v in self.levels.items()} - if value in lookup: - return encode(lookup[value]) - return "" - - -class Hostname(ConfigValue): - """Network hostname value.""" - - def __init__(self, optional=False): - self._required = not optional - - def deserialize(self, value, display=False): - value = decode(value).strip() - validators.validate_required(value, self._required) - if not value: - return None - - socket_path = path.get_unix_socket_path(value) - if socket_path is not None: - path_str = Path(not self._required).deserialize(socket_path) - return f"unix:{path_str}" - - try: - socket.getaddrinfo(value, None) - except OSError: - raise ValueError("must be a resolveable hostname or valid IP") - - return value - - -class Port(Integer): - """Network port value. - - Expects integer in the range 0-65535, zero tells the kernel to simply - allocate a port for us. - """ - - def __init__(self, choices=None, optional=False): - super().__init__( - minimum=0, maximum=2 ** 16 - 1, choices=choices, optional=optional - ) - - -class _ExpandedPath(str): - def __new__(cls, original, expanded): - return super().__new__(cls, expanded) - - def __init__(self, original, expanded): - self.original = original - - -class Path(ConfigValue): - """File system path. - - The following expansions of the path will be done: - - - ``~`` to the current user's home directory - - ``$XDG_CACHE_DIR`` according to the XDG spec - - ``$XDG_CONFIG_DIR`` according to the XDG spec - - ``$XDG_DATA_DIR`` according to the XDG spec - - ``$XDG_MUSIC_DIR`` according to the XDG spec - """ - - def __init__(self, optional=False): - self._required = not optional - - def deserialize(self, value): - value = decode(value).strip() - expanded = path.expand_path(value) - validators.validate_required(value, self._required) - validators.validate_required(expanded, self._required) - if not value or expanded is None: - return None - return _ExpandedPath(value, expanded) - - def serialize(self, value, display=False): - if isinstance(value, _ExpandedPath): - value = value.original - if isinstance(value, bytes): - value = value.decode(errors="surrogateescape") - return value diff --git a/mopidy/core/__init__.py b/mopidy/core/__init__.py deleted file mode 100644 index 4e73d17f44..0000000000 --- a/mopidy/core/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# flake8: noqa -from .actor import Core -from .history import HistoryController -from .library import LibraryController -from .listener import CoreListener -from .mixer import MixerController -from .playback import PlaybackController, PlaybackState -from .playlists import PlaylistsController -from .tracklist import TracklistController diff --git a/mopidy/http/__init__.py b/mopidy/http/__init__.py deleted file mode 100644 index 7a9fa05048..0000000000 --- a/mopidy/http/__init__.py +++ /dev/null @@ -1,53 +0,0 @@ -import logging -import os - -import mopidy -from mopidy import config as config_lib -from mopidy import exceptions, ext - -logger = logging.getLogger(__name__) - - -class Extension(ext.Extension): - dist_name = "Mopidy-HTTP" - ext_name = "http" - version = mopidy.__version__ - - def get_default_config(self): - conf_file = os.path.join(os.path.dirname(__file__), "ext.conf") - return config_lib.read(conf_file) - - def get_config_schema(self): - schema = super().get_config_schema() - schema["hostname"] = config_lib.Hostname() - schema["port"] = config_lib.Port() - schema["static_dir"] = config_lib.Deprecated() - schema["zeroconf"] = config_lib.String(optional=True) - schema["allowed_origins"] = config_lib.List(optional=True) - schema["csrf_protection"] = config_lib.Boolean(optional=True) - schema["default_app"] = config_lib.String(optional=True) - return schema - - def validate_environment(self): - try: - import tornado.web # noqa - except ImportError as e: - raise exceptions.ExtensionError("tornado library not found", e) - - def setup(self, registry): - from .actor import HttpFrontend - from .handlers import make_mopidy_app_factory - - HttpFrontend.apps = registry["http:app"] - HttpFrontend.statics = registry["http:static"] - - registry.add("frontend", HttpFrontend) - registry.add( - "http:app", - { - "name": "mopidy", - "factory": make_mopidy_app_factory( - registry["http:app"], registry["http:static"] - ), - }, - ) diff --git a/mopidy/internal/deps.py b/mopidy/internal/deps.py deleted file mode 100644 index c8b53a4d5b..0000000000 --- a/mopidy/internal/deps.py +++ /dev/null @@ -1,189 +0,0 @@ -import functools -import os -import platform -import sys - -import pkg_resources - -from mopidy.internal import formatting -from mopidy.internal.gi import Gst, gi - - -def format_dependency_list(adapters=None): - if adapters is None: - dist_names = { - ep.dist.project_name - for ep in pkg_resources.iter_entry_points("mopidy.ext") - if ep.dist.project_name != "Mopidy" - } - dist_infos = [ - functools.partial(pkg_info, dist_name) for dist_name in dist_names - ] - - adapters = ( - [ - executable_info, - platform_info, - python_info, - functools.partial(pkg_info, "Mopidy", True), - ] - + dist_infos - + [gstreamer_info] - ) - - return "\n".join(_format_dependency(a()) for a in adapters) - - -def _format_dependency(dep_info): - lines = [] - - if "version" not in dep_info: - lines.append(f"{dep_info['name']}: not found") - else: - source = f" from {dep_info['path']}" if "path" in dep_info else "" - lines.append(f"{dep_info['name']}: {dep_info['version']}{source}") - - if "other" in dep_info: - details = formatting.indent(dep_info["other"], places=4) - lines.append(f" Detailed information: {details}") - - if dep_info.get("dependencies", []): - for sub_dep_info in dep_info["dependencies"]: - sub_dep_lines = _format_dependency(sub_dep_info) - lines.append( - formatting.indent(sub_dep_lines, places=2, singles=True) - ) - - return "\n".join(lines) - - -def executable_info(): - return { - "name": "Executable", - "version": sys.argv[0], - } - - -def platform_info(): - return { - "name": "Platform", - "version": platform.platform(), - } - - -def python_info(): - return { - "name": "Python", - "version": ( - f"{platform.python_implementation()} {platform.python_version()}" - ), - "path": os.path.dirname(platform.__file__), - } - - -def pkg_info( - project_name=None, include_transitive_deps=True, include_extras=False -): - if project_name is None: - project_name = "Mopidy" - try: - distribution = pkg_resources.get_distribution(project_name) - extras = include_extras and distribution.extras or [] - if include_transitive_deps: - dependencies = [ - pkg_info( - d.project_name, - include_transitive_deps=d.project_name != "Mopidy", - ) - for d in distribution.requires(extras) - ] - else: - dependencies = [] - return { - "name": project_name, - "version": distribution.version, - "path": distribution.location, - "dependencies": dependencies, - } - except pkg_resources.ResolutionError: - return { - "name": project_name, - } - - -def gstreamer_info(): - other = [] - other.append(f"Python wrapper: python-gi {gi.__version__}") - - found_elements = [] - missing_elements = [] - for name, status in _gstreamer_check_elements(): - if status: - found_elements.append(name) - else: - missing_elements.append(name) - - other.append("Relevant elements:") - other.append(" Found:") - for element in found_elements: - other.append(f" {element}") - if not found_elements: - other.append(" none") - other.append(" Not found:") - for element in missing_elements: - other.append(f" {element}") - if not missing_elements: - other.append(" none") - - return { - "name": "GStreamer", - "version": ".".join(map(str, Gst.version())), - "path": os.path.dirname(gi.__file__), - "other": "\n".join(other), - } - - -def _gstreamer_check_elements(): - elements_to_check = [ - # Core playback - "uridecodebin", - # External HTTP streams - "souphttpsrc", - # Spotify - "appsrc", - # Audio sinks - "alsasink", - "osssink", - "oss4sink", - "pulsesink", - # MP3 encoding and decoding - # - # One of flump3dec, mad, and mpg123audiodec is required for MP3 - # playback. - "flump3dec", - "id3demux", - "id3v2mux", - "lamemp3enc", - "mad", - "mpegaudioparse", - "mpg123audiodec", - # Ogg Vorbis encoding and decoding - "vorbisdec", - "vorbisenc", - "vorbisparse", - "oggdemux", - "oggmux", - "oggparse", - # Flac decoding - "flacdec", - "flacparse", - # Shoutcast output - "shout2send", - ] - known_elements = [ - factory.get_name() - for factory in Gst.Registry.get().get_feature_list(Gst.ElementFactory) - ] - return [ - (element, element in known_elements) for element in elements_to_check - ] diff --git a/mopidy/internal/validation.py b/mopidy/internal/validation.py deleted file mode 100644 index 27efb02085..0000000000 --- a/mopidy/internal/validation.py +++ /dev/null @@ -1,131 +0,0 @@ -import urllib -from collections.abc import Iterable, Mapping - -from mopidy import exceptions - -PLAYBACK_STATES = {"paused", "stopped", "playing"} - -TRACK_FIELDS_WITH_TYPES = { - "uri": str, - "track_name": str, - "album": str, - "artist": str, - "albumartist": str, - "composer": str, - "performer": str, - "track_no": int, - "genre": str, - "date": str, - "comment": str, - "disc_no": int, - "musicbrainz_albumid": str, - "musicbrainz_artistid": str, - "musicbrainz_trackid": str, -} - -SEARCH_FIELDS = set(TRACK_FIELDS_WITH_TYPES).union({"any"}) - -PLAYLIST_FIELDS = {"uri", "name"} # TODO: add length and last_modified? - -TRACKLIST_FIELDS = { # TODO: add bitrate, length, disc_no, track_no, modified? - "uri", - "name", - "genre", - "date", - "comment", - "musicbrainz_id", -} - -DISTINCT_FIELDS = dict(TRACK_FIELDS_WITH_TYPES) - - -# TODO: _check_iterable(check, msg, **kwargs) + [check(a) for a in arg]? -def _check_iterable(arg, msg, **kwargs): - """Ensure we have an iterable which is not a string or an iterator""" - if isinstance(arg, str): - raise exceptions.ValidationError(msg.format(arg=arg, **kwargs)) - elif not isinstance(arg, Iterable): - raise exceptions.ValidationError(msg.format(arg=arg, **kwargs)) - elif iter(arg) is iter(arg): - raise exceptions.ValidationError(msg.format(arg=arg, **kwargs)) - - -def check_choice(arg, choices, msg="Expected one of {choices}, not {arg!r}"): - if arg not in choices: - raise exceptions.ValidationError( - msg.format(arg=arg, choices=tuple(choices)) - ) - - -def check_boolean(arg, msg="Expected a boolean, not {arg!r}"): - check_instance(arg, bool, msg=msg) - - -def check_instance(arg, cls, msg="Expected a {name} instance, not {arg!r}"): - if not isinstance(arg, cls): - raise exceptions.ValidationError(msg.format(arg=arg, name=cls.__name__)) - - -def check_instances(arg, cls, msg="Expected a list of {name}, not {arg!r}"): - _check_iterable(arg, msg, name=cls.__name__) - if not all(isinstance(instance, cls) for instance in arg): - raise exceptions.ValidationError(msg.format(arg=arg, name=cls.__name__)) - - -def check_integer(arg, min=None, max=None): - if not isinstance(arg, int): - raise exceptions.ValidationError(f"Expected an integer, not {arg!r}") - elif min is not None and arg < min: - raise exceptions.ValidationError( - f"Expected number larger or equal to {min}, not {arg!r}" - ) - elif max is not None and arg > max: - raise exceptions.ValidationError( - f"Expected number smaller or equal to {max}, not {arg!r}" - ) - - -def check_query(arg, fields=None, list_values=True): - if fields is None: - fields = SEARCH_FIELDS - # TODO: normalize name -> track_name - # TODO: normalize value -> [value] - # TODO: normalize blank -> [] or just remove field? - # TODO: remove list_values? - - if not isinstance(arg, Mapping): - raise exceptions.ValidationError( - f"Expected a query dictionary, not {arg!r}" - ) - - for key, value in arg.items(): - check_choice( - key, - fields, - msg="Expected query field to be one of " "{choices}, not {arg!r}", - ) - if list_values: - msg = 'Expected "{key}" to be list of strings, not {arg!r}' - _check_iterable(value, msg, key=key) - [_check_query_value(key, v, msg) for v in value] - else: - _check_query_value( - key, value, 'Expected "{key}" to be a string, not {arg!r}' - ) - - -def _check_query_value(key, arg, msg): - if not isinstance(arg, str) or not arg.strip(): - raise exceptions.ValidationError(msg.format(arg=arg, key=key)) - - -def check_uri(arg, msg="Expected a valid URI, not {arg!r}"): - if not isinstance(arg, str): - raise exceptions.ValidationError(msg.format(arg=arg)) - elif urllib.parse.urlparse(arg).scheme == "": - raise exceptions.ValidationError(msg.format(arg=arg)) - - -def check_uris(arg, msg="Expected a list of URIs, not {arg!r}"): - _check_iterable(arg, msg) - [check_uri(a, msg) for a in arg] diff --git a/mopidy/internal/versioning.py b/mopidy/internal/versioning.py deleted file mode 100644 index c9406ee497..0000000000 --- a/mopidy/internal/versioning.py +++ /dev/null @@ -1,29 +0,0 @@ -import os -import subprocess - -import mopidy - - -def get_version(): - try: - return get_git_version() - except OSError: - return mopidy.__version__ - - -def get_git_version(): - project_dir = os.path.abspath( - os.path.join(os.path.dirname(mopidy.__file__), "..") - ) - process = subprocess.Popen( - ["git", "describe"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - cwd=project_dir, - ) - if process.wait() != 0: - raise OSError('Execution of "git describe" failed') - version = process.stdout.read().strip().decode() - if version.startswith("v"): - version = version[1:] - return version diff --git a/mopidy/m3u/backend.py b/mopidy/m3u/backend.py deleted file mode 100644 index 704785247a..0000000000 --- a/mopidy/m3u/backend.py +++ /dev/null @@ -1,13 +0,0 @@ -import pykka - -from mopidy import backend - -from . import playlists - - -class M3UBackend(pykka.ThreadingActor, backend.Backend): - uri_schemes = ["m3u"] - - def __init__(self, config, audio): - super().__init__() - self.playlists = playlists.M3UPlaylistsProvider(self, config) diff --git a/pyproject.toml b/pyproject.toml index 1cf1aa3ae1..654d340901 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,17 +1,185 @@ [build-system] -requires = ["setuptools >= 30.3.0", "wheel"] +requires = ["setuptools >= 66", "setuptools-scm >= 7.1"] +build-backend = "setuptools.build_meta" -[tool.black] -target-version = ["py37", "py38"] -line-length = 80 +[project] +name = "Mopidy" +description = "Mopidy is an extensible music server written in Python" +readme = "README.rst" +requires-python = ">= 3.11" +license = { text = "Apache-2.0" } +authors = [{ name = "Stein Magnus Jodal", email = "stein.magnus@jodal.no" }] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Environment :: No Input/Output (Daemon)", + "Intended Audience :: End Users/Desktop", + "License :: OSI Approved :: Apache Software License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: Multimedia :: Sound/Audio :: Players", +] +dynamic = ["version"] +dependencies = [ + "pygobject >= 3.42", + "pykka >= 4.0", + "requests >= 2.28", + "setuptools >= 66", + "tornado >= 6.2", +] +[project.optional-dependencies] +docs = [ + "pygraphviz >= 0.20", + "sphinx >= 5.3", + "sphinx-autodoc-typehints >= 1.12", + "sphinx-rtd-theme >= 1.2", +] +lint = ["ruff == 0.3.0"] +test = ["pytest >= 7.2", "pytest-cov >= 4.0", "responses >= 0.18"] +typing = [ + "pygobject-stubs", + "pyright == 1.1.350", + "types-requests", + "types-setuptools", +] +dev = ["mopidy[docs,lint,test,typing]", "tox"] -[tool.isort] -multi_line_output = 3 -include_trailing_comma = true -force_grid_wrap = 0 -use_parentheses = true -line_length = 80 -known_tests = "tests" -sections = "FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,TESTS,LOCALFOLDER" +[project.urls] +Homepage = "https://mopidy.com/" +Documentation = "https://docs.mopidy.com/" +"Discourse forum" = "https://discourse.mopidy.com/" +"Zulip chat" = "https://mopidy.zulipchat.com/" +Source = "https://github.com/mopidy/mopidy" +Issues = "https://github.com/mopidy/mopidy/issues" + +[project.scripts] +mopidy = "mopidy.__main__:main" + +[project.entry-points."mopidy.ext"] +file = "mopidy.file:Extension" +http = "mopidy.http:Extension" +m3u = "mopidy.m3u:Extension" +softwaremixer = "mopidy.softwaremixer:Extension" +stream = "mopidy.stream:Extension" + + +[tool.pyright] +pythonVersion = "3.11" +# Use venv from parent directory, to share it with any extensions: +venvPath = "../" +venv = ".venv" +typeCheckingMode = "standard" +# Not all dependencies have type hints: +reportMissingTypeStubs = false +# Already covered by flake8-self: +reportPrivateImportUsage = false + + +[tool.pytest.ini_options] +filterwarnings = [ + "error::DeprecationWarning:mopidy[.*]", + "ignore::PendingDeprecationWarning:mopidy[.*]", + "ignore::DeprecationWarning:mopidy[.*]", +] + + +[tool.ruff] +target-version = "py311" + +[tool.ruff.lint] +select = [ + "A", # flake8-builtins + "ANN", # flake8-annotations + "ARG", # flake8-unused-arguments + "B", # flake8-bugbear + "C4", # flake8-comprehensions + "C90", # mccabe + "D", # pydocstyle + "DTZ", # flake8-datetimez + "E", # pycodestyle + "ERA", # eradicate + "F", # pyflakes + "FBT", # flake8-boolean-trap + "I", # isort + "INP", # flake8-no-pep420 + "ISC", # flake8-implicit-str-concat + "N", # pep8-naming + "PGH", # pygrep-hooks + "PIE", # flake8-pie + "PLC", # pylint convention + "PLE", # pylint error + "PLR", # pylint refactor + "PLW", # pylint warning + "PT", # flake8-pytest-style + "PTH", # flake8-use-pathlib + "Q", # flake8-quotes + "RET", # flake8-return + "RSE", # flake8-raise + "RUF", # ruff + "SIM", # flake8-simplify + "SLF", # flake8-self + "T20", # flake8-print + "TCH", # flake8-type-checking + "TID", # flake8-tidy-imports + "TRY", # tryceratops + "UP", # pyupgrade + "W", # pycodestyle +] +ignore = [ + "A002", # builtin-argument-shadowing # TODO + "A003", # builtin-attribute-shadowing + "ANN", # flake8-annotations # TODO + "ANN101", # missing-type-self + "ANN102", # missing-type-cls + "ANN401", # any-type + "D100", # undocumented-public-module # TODO + "D101", # undocumented-public-class # TODO + "D102", # undocumented-public-method # TODO + "D103", # undocumented-public-function # TODO + "D104", # undocumented-public-package # TODO + "D105", # undocumented-magic-method + "D107", # undocumented-public-init # TODO + "D203", # one-blank-line-before-class + "D205", # blank-line-after-summary # TODO + "D213", # multi-line-summary-second-line + "D401", # non-imperative-mood # TODO + "FBT001", # boolean-positional-arg-in-function-definition # TODO + "FBT002", # boolean-default-value-in-function-definition # TODO + "FBT003", # boolean-positional-value-in-function-call # TODO + "ISC001", # single-line-implicit-string-concatenation + "PLR2004", # magic-value-comparison + "PLW2901", # redefined-loop-name + "RET504", # unnecessary-assign + "SLF001", # private-member-access # TODO + "TCH003", # typing-only-standard-library-import + "TRY003", # raise-vanilla-args + "TRY400", # error-instead-of-exception +] + +[tool.ruff.lint.per-file-ignores] +"docs/*" = [ + "D", # pydocstyle + "INP001", # flake8-no-pep420 +] +"src/mopidy/internal/*" = [ + "D", # pydocstyle +] +"tests/*" = [ + "ANN", # flake8-annotations + "ARG", # flake8-unused-arguments + "D", # pydocstyle + "FBT", # flake8-boolean-trap + "PLR0913", # too-many-arguments + "PT007", # pytest-parametrize-values-wrong-type # TODO + "PT009", # pytest-unittest-assertion # TODO + "PT011", # pytest-raises-too-broad # TODO + "SLF001", # private-member-access + "TRY002", # raise-vanilla-class +] + + +[tool.setuptools_scm] diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 803450ca18..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,146 +0,0 @@ -[metadata] -name = Mopidy -version = 3.2.0 -url = https://mopidy.com/ -project_urls = - Documentation = https://docs.mopidy.com/ - Discourse forum = https://discourse.mopidy.com/ - Zulip chat = https://mopidy.zulipchat.com/ - Source = https://github.com/mopidy/mopidy - Issues = https://github.com/mopidy/mopidy/issues -author = Stein Magnus Jodal -author_email = stein.magnus@jodal.no -license = Apache License, Version 2.0 -license_file = LICENSE -description = Mopidy is an extensible music server written in Python -long_description = file: README.rst -classifiers = - Development Status :: 5 - Production/Stable - Environment :: No Input/Output (Daemon) - Intended Audience :: End Users/Desktop - License :: OSI Approved :: Apache Software License - Operating System :: MacOS :: MacOS X - Operating System :: POSIX :: Linux - Programming Language :: Python :: 3 - Programming Language :: Python :: 3.7 - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 - Topic :: Multimedia :: Sound/Audio :: Players - - -[options] -zip_safe = False -include_package_data = True -packages = find: -python_requires = >= 3.7 -install_requires = - Pykka >= 2.0.1 - requests >= 2.0 - setuptools - tornado >= 4.4 - - -[options.extras_require] -docs = - pygraphviz - sphinx - sphinx_rtd_theme -lint = - black - check-manifest - flake8 - flake8-black - flake8-bugbear - flake8-isort - isort - mypy - pep8-naming - types-requests - types-setuptools -test = - pytest - pytest-cov - responses -dev = - %(docs)s - %(lint)s - %(test)s - tox - - -[options.packages.find] -exclude = - tests - tests.* - - -[options.entry_points] -console_scripts = - mopidy = mopidy.__main__:main -mopidy.ext = - http = mopidy.http:Extension - file = mopidy.file:Extension - m3u = mopidy.m3u:Extension - softwaremixer = mopidy.softwaremixer:Extension - stream = mopidy.stream:Extension - - -[tool:pytest] -filterwarnings = - error::DeprecationWarning:mopidy[.*] - ignore::PendingDeprecationWarning:mopidy[.*] - ignore::DeprecationWarning:mopidy[.*] - - -[flake8] -application-import-names = mopidy, tests -max-line-length = 80 -exclude = .git, .tox, build -select = - # Regular flake8 rules - C, E, F, W - # flake8-bugbear rules - B - # B950: line too long (soft speed limit) - B950 - # flake8-isort - I - # pep8-naming rules - N -ignore = - # E203: whitespace before ':' (not PEP8 compliant) - E203 - # E501: line too long (replaced by B950) - E501 - # W503: line break before binary operator (not PEP8 compliant) - W503 - # B305: .next() is not a thing on Python 3 (used by playback controller) - B305 - - -[mypy] -warn_unused_configs = True - -[mypy-dbus.*] -ignore_missing_imports = True - -[mypy-gi.*] -ignore_missing_imports = True - -[mypy-pykka.*] -ignore_missing_imports = True - -[mypy-mopidy.backend.*] -disallow_untyped_defs = True - -[mypy-mopidy.ext.*] -disallow_untyped_defs = True - -[mypy-mopidy.internal.log.*] -disallow_untyped_defs = True - -[mypy-mopidy.internal.network.*] -disallow_untyped_defs = True - -[mypy-mopidy.mixer.*] -disallow_untyped_defs = True diff --git a/setup.py b/setup.py deleted file mode 100644 index 606849326a..0000000000 --- a/setup.py +++ /dev/null @@ -1,3 +0,0 @@ -from setuptools import setup - -setup() diff --git a/mopidy/__init__.py b/src/mopidy/__init__.py similarity index 50% rename from mopidy/__init__.py rename to src/mopidy/__init__.py index ccb8082996..475160a0f1 100644 --- a/mopidy/__init__.py +++ b/src/mopidy/__init__.py @@ -1,15 +1,14 @@ import platform import sys import warnings +from importlib.metadata import version -import pkg_resources - -if not sys.version_info >= (3, 7): +if not sys.version_info >= (3, 11): sys.exit( - f"ERROR: Mopidy requires Python >= 3.7, " + f"ERROR: Mopidy requires Python >= 3.11, " f"but found {platform.python_version()}." ) warnings.filterwarnings("ignore", "could not open display") -__version__ = pkg_resources.get_distribution("Mopidy").version +__version__ = version("Mopidy") diff --git a/mopidy/__main__.py b/src/mopidy/__main__.py similarity index 65% rename from mopidy/__main__.py rename to src/mopidy/__main__.py index 6ef2d2b58a..1b1dd65b59 100644 --- a/mopidy/__main__.py +++ b/src/mopidy/__main__.py @@ -1,31 +1,46 @@ +from __future__ import annotations + import logging import signal import sys +from pathlib import Path +from typing import TYPE_CHECKING, TypedDict, cast import pykka.debug -from mopidy import commands +import mopidy +from mopidy import commands, ext from mopidy import config as config_lib -from mopidy import ext -from mopidy.internal import log, path, process, versioning -from mopidy.internal.gi import Gst # noqa: F401 +from mopidy.internal import log, path, process +from mopidy.internal.gi import ( + GLib, + Gst, # noqa: F401 (imported to test GStreamer presence) +) try: # Make GLib's mainloop the event loop for python-dbus - import dbus.mainloop.glib + import dbus.mainloop.glib # pyright: ignore[reportMissingImports] dbus.mainloop.glib.threads_init() dbus.mainloop.glib.DBusGMainLoop(set_as_default=True) except ImportError: pass +if TYPE_CHECKING: + + class ExtensionsStatus(TypedDict): + validate: list[ext.Extension] + config: list[ext.Extension] + disabled: list[ext.Extension] + enabled: list[ext.Extension] + logger = logging.getLogger(__name__) -def main(): +def main() -> int: # noqa: C901, PLR0912, PLR0915 log.bootstrap_delayed_logging() - logger.info(f"Starting Mopidy {versioning.get_version()}") + logger.info(f"Starting Mopidy {mopidy.__version__}") signal.signal(signal.SIGTERM, process.sigterm_handler) # Windows does not have signal.SIGUSR1 @@ -52,21 +67,27 @@ def main(): args = root_cmd.parse(sys.argv[1:]) + default_config_files = [ + (Path(base) / "mopidy" / "mopidy.conf").resolve() + for base in [*GLib.get_system_config_dirs(), GLib.get_user_config_dir()] + ] + config_files = [ + Path(f) for f in args.config_files or [] + ] or default_config_files + config, config_errors = config_lib.load( - args.config_files, + config_files, [d.config_schema for d in extensions_data], [d.config_defaults for d in extensions_data], args.config_overrides, ) create_core_dirs(config) - create_initial_config_file(args, extensions_data) + create_initial_config_file(config_files, extensions_data) - log.setup_logging( - config, args.base_verbosity_level, args.verbosity_level - ) + log.setup_logging(config, args.base_verbosity_level, args.verbosity_level) - extensions = { + extensions_status: ExtensionsStatus = { "validate": [], "config": [], "disabled": [], @@ -81,50 +102,57 @@ def main(): config_errors[extension.ext_name] = { "enabled": "extension disabled by self check." } - extensions["validate"].append(extension) + extensions_status["validate"].append(extension) elif not config[extension.ext_name]["enabled"]: config[extension.ext_name] = {"enabled": False} config_errors[extension.ext_name] = { "enabled": "extension disabled by user config." } - extensions["disabled"].append(extension) + extensions_status["disabled"].append(extension) elif config_errors.get(extension.ext_name): config[extension.ext_name]["enabled"] = False - config_errors[extension.ext_name][ - "enabled" - ] = "extension disabled due to config errors." - extensions["config"].append(extension) + config_errors[extension.ext_name]["enabled"] = ( + "extension disabled due to config errors." + ) + extensions_status["config"].append(extension) else: - extensions["enabled"].append(extension) + extensions_status["enabled"].append(extension) log_extension_info( - [d.extension for d in extensions_data], extensions["enabled"] + [d.extension for d in extensions_data], extensions_status["enabled"] ) # Config and deps commands are simply special cased for now. - if args.command == config_cmd: - schemas = [d.config_schema for d in extensions_data] - return args.command.run(config, config_errors, schemas) - elif args.command == deps_cmd: - return args.command.run() + if isinstance(args.command, commands.ConfigCommand): + return args.command.run( + args=args, + config=config, + errors=config_errors, + schemas=[d.config_schema for d in extensions_data], + ) + if isinstance(args.command, commands.DepsCommand): + return args.command.run( + args=args, + config=config, + ) - check_config_errors(config, config_errors, extensions) + check_config_errors(config_errors, extensions_status) - if not extensions["enabled"]: + if not extensions_status["enabled"]: logger.error("No extension enabled, exiting...") sys.exit(1) # Read-only config from here on, please. - proxied_config = config_lib.Proxy(config) + proxied_config = cast(config_lib.Config, config_lib.Proxy(config)) - if args.extension and args.extension not in extensions["enabled"]: + if args.extension and args.extension not in extensions_status["enabled"]: logger.error( "Unable to run command provided by disabled extension %s", args.extension.ext_name, ) return 1 - for extension in extensions["enabled"]: + for extension in extensions_status["enabled"]: try: extension.setup(registry) except Exception: @@ -140,15 +168,19 @@ def main(): # Anything that wants to exit after this point must use # mopidy.internal.process.exit_process as actors can have been started. try: - return args.command.run(args, proxied_config) + assert isinstance(args.command, commands.Command) + return args.command.run( + args=args, + config=proxied_config, + ) except NotImplementedError: - print(root_cmd.format_help()) + print(root_cmd.format_help()) # noqa: T201 return 1 except KeyboardInterrupt: - pass - except Exception as ex: - logger.exception(ex) + return 0 + except Exception: + logger.exception("Unhandled exception") raise @@ -158,10 +190,9 @@ def create_core_dirs(config): path.get_or_create_dir(config["core"]["data_dir"]) -def create_initial_config_file(args, extensions_data): - """Initialize whatever the last config file is with defaults""" - - config_file = path.expand_path(args.config_files[-1]) +def create_initial_config_file(config_files, extensions_data): + """Initialize whatever the last config file is with defaults.""" + config_file = path.expand_path(config_files[-1]) if config_file.exists(): return @@ -188,13 +219,16 @@ def log_extension_info(all_extensions, enabled_extensions): logger.info("Disabled extensions: %s", ", ".join(disabled_names) or "none") -def check_config_errors(config, errors, extensions): +def check_config_errors( + errors: config_lib.ConfigErrors, + extensions_status: ExtensionsStatus, +) -> None: fatal_errors = [] extension_names = {} all_extension_names = set() - for state in extensions: - extension_names[state] = {e.ext_name for e in extensions[state]} + for state in extensions_status: + extension_names[state] = {e.ext_name for e in extensions_status[state]} all_extension_names.update(extension_names[state]) for section in sorted(errors): @@ -216,7 +250,7 @@ def check_config_errors(config, errors, extensions): for field, msg in errors[section].items(): logger.warning(f" {section}/{field} {msg}") - if extensions["config"]: + if extensions_status["config"]: logger.warning( "Please fix the extension configuration errors or " "disable the extensions to silence these messages." diff --git a/src/mopidy/audio/__init__.py b/src/mopidy/audio/__init__.py new file mode 100644 index 0000000000..61b37093b8 --- /dev/null +++ b/src/mopidy/audio/__init__.py @@ -0,0 +1,12 @@ +from .actor import Audio, AudioProxy +from .constants import PlaybackState +from .listener import AudioListener +from .utils import supported_uri_schemes + +__all__ = [ + "Audio", + "AudioProxy", + "PlaybackState", + "AudioListener", + "supported_uri_schemes", +] diff --git a/mopidy/audio/actor.py b/src/mopidy/audio/actor.py similarity index 61% rename from mopidy/audio/actor.py rename to src/mopidy/audio/actor.py index a74203956c..84062f6b93 100644 --- a/mopidy/audio/actor.py +++ b/src/mopidy/audio/actor.py @@ -1,8 +1,13 @@ +from __future__ import annotations + import logging import os import threading +from collections.abc import Callable +from typing import TYPE_CHECKING, Any, cast import pykka +from pykka.typing import ActorMemberMixin, proxy_field, proxy_method from mopidy import exceptions from mopidy.audio import tags as tags_lib @@ -10,7 +15,13 @@ from mopidy.audio.constants import PlaybackState from mopidy.audio.listener import AudioListener from mopidy.internal import process -from mopidy.internal.gi import GLib, GObject, Gst, GstPbutils +from mopidy.internal.gi import GLib, Gst, GstPbutils +from mopidy.types import DurationMs, Percentage + +if TYPE_CHECKING: + from mopidy.config import Config + from mopidy.mixer import MixerProxy + from mopidy.softwaremixer.mixer import SoftwareMixerProxy logger = logging.getLogger(__name__) @@ -22,188 +33,132 @@ _GST_PLAY_FLAGS_AUDIO = 0x02 _GST_PLAY_FLAGS_DOWNLOAD = 0x80 -_GST_STATE_MAPPING = { +_GST_STATE_MAPPING: dict[Gst.State, PlaybackState] = { Gst.State.PLAYING: PlaybackState.PLAYING, Gst.State.PAUSED: PlaybackState.PAUSED, Gst.State.NULL: PlaybackState.STOPPED, } -# TODO: expose this as a property on audio? -class _Appsrc: - - """Helper class for dealing with appsrc based playback.""" - - def __init__(self): - self._signals = utils.Signals() - self.reset() - - def reset(self): - """Reset the helper. - - Should be called whenever the source changes and we are not setting up - a new appsrc. - """ - self.prepare(None, None, None, None) - - def prepare(self, caps, need_data, enough_data, seek_data): - """Store info we will need when the appsrc element gets installed.""" - self._signals.clear() - self._source = None - self._caps = caps - self._need_data_callback = need_data - self._seek_data_callback = seek_data - self._enough_data_callback = enough_data - - def configure(self, source): - """Configure the supplied source for use. - - Should be called whenever we get a new appsrc. - """ - source.set_property("caps", self._caps) - source.set_property("format", "time") - source.set_property("stream-type", "seekable") - source.set_property("max-bytes", 1 << 20) # 1MB - source.set_property("min-percent", 50) - - if self._need_data_callback: - self._signals.connect( - source, "need-data", self._on_signal, self._need_data_callback - ) - if self._seek_data_callback: - self._signals.connect( - source, "seek-data", self._on_signal, self._seek_data_callback - ) - if self._enough_data_callback: - self._signals.connect( - source, - "enough-data", - self._on_signal, - None, - self._enough_data_callback, - ) - - self._source = source - - def push(self, buffer_): - if self._source is None: - return False - - if buffer_ is None: - gst_logger.debug("Sending appsrc end-of-stream event.") - result = self._source.emit("end-of-stream") - return result == Gst.FlowReturn.OK - else: - result = self._source.emit("push-buffer", buffer_) - return result == Gst.FlowReturn.OK - - def _on_signal(self, element, clocktime, func): - # This shim is used to ensure we always return true, and also handles - # that not all the callbacks have a time argument. - if clocktime is None: - func() - else: - func(utils.clocktime_to_millisecond(clocktime)) - return True - - # TODO: expose this as a property on audio when #790 gets further along. class _Outputs(Gst.Bin): def __init__(self): Gst.Bin.__init__(self) # TODO gst1: Set 'outputs' as the Bin name for easier debugging - self._tee = Gst.ElementFactory.make("tee") + tee = Gst.ElementFactory.make("tee") + if tee is None: + raise exceptions.AudioException("Failed to create GStreamer tee.") + self._tee = tee self.add(self._tee) - ghost_pad = Gst.GhostPad.new("sink", self._tee.get_static_pad("sink")) + tee_sink = self._tee.get_static_pad("sink") + if tee_sink is None: + raise exceptions.AudioException("Failed to get sink from GStreamer tee.") + ghost_pad = Gst.GhostPad.new("sink", tee_sink) self.add_pad(ghost_pad) - def add_output(self, description): + def add_output(self, description) -> None: # XXX This only works for pipelines not in use until #790 gets done. try: output = Gst.parse_bin_from_description( description, ghost_unlinked_pads=True ) - except GLib.GError as ex: - logger.error( - 'Failed to create audio output "%s": %s', description, ex - ) - raise exceptions.AudioException(ex) + except GLib.Error as exc: + logger.error('Failed to create audio output "%s": %s', description, exc) + raise exceptions.AudioException( + f"Failed to create audio output {description!r}" + ) from exc self._add(output) logger.info('Audio output set to "%s"', description) - def _add(self, element): - queue = Gst.ElementFactory.make("queue") + def _add(self, element) -> None: self.add(element) + + queue = Gst.ElementFactory.make("queue") + if queue is None: + raise exceptions.AudioException("Failed to create GStreamer queue.") self.add(queue) + queue.link(element) self._tee.link(queue) -class SoftwareMixer: - def __init__(self, mixer): +class SoftwareMixerAdapter: + _mixer: SoftwareMixerProxy + _element: Gst.Element | None + _last_volume: int | None + _last_mute: bool | None + _signals: utils.Signals + + def __init__(self, mixer: SoftwareMixerProxy) -> None: self._mixer = mixer self._element = None self._last_volume = None self._last_mute = None self._signals = utils.Signals() - def setup(self, element, mixer_ref): + def setup(self, element, mixer_ref) -> None: self._element = element self._mixer.setup(mixer_ref) - def teardown(self): + def teardown(self) -> None: self._signals.clear() self._mixer.teardown() - def get_volume(self): - return int(round(self._element.get_property("volume") * 100)) + def get_volume(self) -> Percentage: + assert self._element + return Percentage(round(self._element.get_property("volume") * 100)) - def set_volume(self, volume): + def set_volume(self, volume: Percentage) -> None: + assert self._element self._element.set_property("volume", volume / 100.0) self._mixer.trigger_volume_changed(self.get_volume()) - def get_mute(self): + def get_mute(self) -> bool: + assert self._element return self._element.get_property("mute") - def set_mute(self, mute): + def set_mute(self, mute: bool) -> None: + assert self._element self._element.set_property("mute", bool(mute)) self._mixer.trigger_mute_changed(self.get_mute()) class _Handler: - def __init__(self, audio): + def __init__(self, audio: Audio) -> None: self._audio = audio self._element = None self._pad = None self._message_handler_id = None self._event_handler_id = None - def setup_message_handling(self, element): + def setup_message_handling(self, element) -> None: self._element = element bus = element.get_bus() bus.add_signal_watch() self._message_handler_id = bus.connect("message", self.on_message) - def setup_event_handling(self, pad): + def setup_event_handling(self, pad) -> None: self._pad = pad self._event_handler_id = pad.add_probe( Gst.PadProbeType.EVENT_BOTH, self.on_pad_event ) - def teardown_message_handling(self): - bus = self._element.get_bus() - bus.remove_signal_watch() - bus.disconnect(self._message_handler_id) + def teardown_message_handling(self) -> None: + if self._element is not None: + bus = self._element.get_bus() + bus.remove_signal_watch() + bus.disconnect(self._message_handler_id) self._message_handler_id = None - def teardown_event_handling(self): - self._pad.remove_probe(self._event_handler_id) + def teardown_event_handling(self) -> None: + if self._pad is not None: + self._pad.remove_probe(self._event_handler_id) self._event_handler_id = None - def on_message(self, bus, msg): + def on_message(self, _bus, msg) -> None: # noqa: C901 if msg.type == Gst.MessageType.STATE_CHANGED: if msg.src != self._element: return @@ -230,7 +185,7 @@ def on_message(self, bus, msg): elif msg.type == Gst.MessageType.STREAM_START: self.on_stream_start() - def on_pad_event(self, pad, pad_probe_info): + def on_pad_event(self, _pad, pad_probe_info): event = pad_probe_info.get_event() if event.type == Gst.EventType.SEGMENT: self.on_segment(event.parse_segment()) @@ -286,19 +241,22 @@ def on_playbin_state_changed(self, old_state, new_state, pending_state): AudioListener.send("stream_changed", uri=None) if "GST_DEBUG_DUMP_DOT_DIR" in os.environ: + assert self._audio._playbin Gst.debug_bin_to_dot_file( - self._audio._playbin, Gst.DebugGraphDetails.ALL, "mopidy" + bin=cast(Gst.Bin, self._audio._playbin), + details=Gst.DebugGraphDetails.ALL, + file_name="mopidy", ) def on_buffering(self, percent, structure=None): + assert self._audio._playbin + if self._audio._target_state < Gst.State.PAUSED: gst_logger.debug("Skip buffering during track change.") return if structure is not None and structure.has_field("buffering-mode"): - buffering_mode = structure.get_enum( - "buffering-mode", Gst.BufferingMode - ) + buffering_mode = structure.get_enum("buffering-mode", Gst.BufferingMode) if buffering_mode == Gst.BufferingMode.LIVE: return # Live sources stall in paused. @@ -313,9 +271,7 @@ def on_buffering(self, percent, structure=None): self._audio._playbin.set_state(Gst.State.PLAYING) level = logging.DEBUG - gst_logger.log( - level, "Got BUFFERING bus message: percent=%d%%", percent - ) + gst_logger.log(level, "Got BUFFERING bus message: percent=%d%%", percent) def on_end_of_stream(self): gst_logger.debug("Got EOS (end of stream) bus message.") @@ -325,27 +281,21 @@ def on_end_of_stream(self): def on_error(self, error, debug): gst_logger.error(f"GStreamer error: {error.message}") - gst_logger.debug( - f"Got ERROR bus message: error={error!r} debug={debug!r}" - ) + gst_logger.debug(f"Got ERROR bus message: error={error!r} debug={debug!r}") # TODO: is this needed? self._audio.stop_playback() def on_warning(self, error, debug): gst_logger.warning(f"GStreamer warning: {error.message}") - gst_logger.debug( - f"Got WARNING bus message: error={error!r} debug={debug!r}" - ) + gst_logger.debug(f"Got WARNING bus message: error={error!r} debug={debug!r}") def on_async_done(self): gst_logger.debug("Got ASYNC_DONE bus message.") def on_tag(self, taglist): tags = tags_lib.convert_taglist(taglist) - gst_logger.debug( - f"Got TAG bus message: tags={tags_lib.repr_tags(tags)}" - ) + gst_logger.debug(f"Got TAG bus message: tags={tags_lib.repr_tags(tags)}") # Postpone emitting tags until stream start. if self._audio._pending_tags is not None: @@ -372,8 +322,7 @@ def on_missing_plugin(self, msg): logger.warning("Could not find a %s to handle media.", desc) if GstPbutils.install_plugins_supported(): logger.info( - "You might be able to fix this by running: " - 'gst-installer "%s"', + "You might be able to fix this by running: 'gst-installer \"%s\"'", debug, ) # TODO: store the missing plugins installer info in a file so we can @@ -381,6 +330,8 @@ def on_missing_plugin(self, msg): # required helper installed? def on_stream_start(self): + assert self._audio._playbin + gst_logger.debug("Got STREAM_START bus message") uri = self._audio._pending_uri logger.debug("Audio event: stream_changed(uri=%r)", uri) @@ -418,57 +369,61 @@ def on_segment(self, segment): # TODO: create a player class which replaces the actors internals class Audio(pykka.ThreadingActor): - - """ - Audio output through `GStreamer `_. - """ + """Audio output through `GStreamer `_.""" #: The GStreamer state mapped to :class:`mopidy.audio.PlaybackState` - state = PlaybackState.STOPPED + state: PlaybackState = PlaybackState.STOPPED - #: The software mixing interface :class:`mopidy.audio.actor.SoftwareMixer` - mixer = None + #: The software mixing interface :class:`mopidy.audio.actor.SoftwareMixerAdapter` + mixer: SoftwareMixerAdapter | None = None - def __init__(self, config, mixer): + def __init__( + self, + config: Config, + mixer: MixerProxy | None, + ) -> None: super().__init__() self._config = config - self._target_state = Gst.State.NULL - self._buffering = False - self._live_stream = False - self._tags = {} - self._pending_uri = None - self._pending_tags = None + self._target_state: Gst.State = Gst.State.NULL + self._buffering: bool = False + self._live_stream: bool = False + self._tags: dict[str, list[Any]] = {} + self._pending_uri: str | None = None + self._pending_tags: dict[str, list[Any]] | None = None self._pending_metadata = None - self._playbin = None + self._playbin: Gst.Element | None = None self._outputs = None self._queue = None - self._about_to_finish_callback = None + self._about_to_finish_callback: Callable | None = None + self._source_setup_callback: Callable | None = None self._handler = _Handler(self) - self._appsrc = _Appsrc() self._signals = utils.Signals() if mixer and self._config["audio"]["mixer"] == "software": - self.mixer = pykka.traversable(SoftwareMixer(mixer)) + from mopidy.softwaremixer.mixer import SoftwareMixerProxy - def on_start(self): + mixer = cast(SoftwareMixerProxy, mixer) + self.mixer = pykka.traversable(SoftwareMixerAdapter(mixer)) + + def on_start(self) -> None: self._thread = threading.current_thread() try: self._setup_preferences() self._setup_playbin() self._setup_outputs() self._setup_audio_sink() - except GLib.GError as ex: - logger.exception(ex) + except GLib.Error: + logger.exception("Unknown GLib error on audio startup.") process.exit_process() - def on_stop(self): + def on_stop(self) -> None: self._teardown_mixer() self._teardown_playbin() - def _setup_preferences(self): + def _setup_preferences(self) -> None: # TODO: move out of audio actor? # Fix for https://github.com/mopidy/mopidy/issues/604 registry = Gst.Registry.get() @@ -476,8 +431,10 @@ def _setup_preferences(self): if jacksink: jacksink.set_rank(Gst.Rank.SECONDARY) - def _setup_playbin(self): + def _setup_playbin(self) -> None: playbin = Gst.ElementFactory.make("playbin") + if playbin is None: + raise exceptions.AudioException("Failed to create GStreamer playbin.") playbin.set_property("flags", _GST_PLAY_FLAGS_AUDIO) # TODO: turn into config values... @@ -485,25 +442,29 @@ def _setup_playbin(self): playbin.set_property("buffer-duration", 5 * Gst.SECOND) self._signals.connect(playbin, "source-setup", self._on_source_setup) - self._signals.connect( - playbin, "about-to-finish", self._on_about_to_finish - ) + self._signals.connect(playbin, "about-to-finish", self._on_about_to_finish) self._playbin = playbin self._handler.setup_message_handling(playbin) - def _teardown_playbin(self): + def _teardown_playbin(self) -> None: self._handler.teardown_message_handling() self._handler.teardown_event_handling() - self._signals.disconnect(self._playbin, "about-to-finish") - self._signals.disconnect(self._playbin, "source-setup") - self._playbin.set_state(Gst.State.NULL) + if self._playbin is not None: + self._signals.disconnect(self._playbin, "about-to-finish") + self._signals.disconnect(self._playbin, "source-setup") + self._playbin.set_state(Gst.State.NULL) - def _setup_outputs(self): + def _setup_outputs(self) -> None: # We don't want to use outputs for regular testing, so just install # an unsynced fakesink when someone asks for a 'testoutput'. if self._config["audio"]["output"] == "testoutput": - self._outputs = Gst.ElementFactory.make("fakesink") + fakesink = Gst.ElementFactory.make("fakesink") + if fakesink is None: + raise exceptions.AudioException( + "Failed to create GStreamer fakesink element." + ) + self._outputs = fakesink else: self._outputs = _Outputs() try: @@ -513,18 +474,36 @@ def _setup_outputs(self): self._handler.setup_event_handling(self._outputs.get_static_pad("sink")) - def _setup_audio_sink(self): + def _setup_audio_sink(self) -> None: + assert self._playbin + + if self._outputs is None: + raise TypeError("Audio outputs must be set up before audio sinks.") + audio_sink = Gst.ElementFactory.make("bin", "audio-sink") + if audio_sink is None: + raise exceptions.AudioException( + "Failed to create GStreamer bin 'audio-sink'." + ) + audio_sink = cast(Gst.Bin, audio_sink) + queue = Gst.ElementFactory.make("queue") + if queue is None: + raise exceptions.AudioException("Failed to create GStreamer queue element.") + volume = Gst.ElementFactory.make("volume") + if volume is None: + raise exceptions.AudioException( + "Failed to create GStreamer volume element." + ) # Queue element to buy us time between the about-to-finish event and # the actual switch, i.e. about to switch can block for longer thanks # to this queue. # TODO: See if settings should be set to minimize latency. Previous - # setting breaks appsrc, and settings before that broke on a few - # systems. So leave the default to play it safe. + # setting breaks appsrc (which we no longer use), and settings before + # that broke on a few systems. So leave the default to play it safe. buffer_time = self._config["audio"]["buffer_time"] if buffer_time is not None and buffer_time > 0: queue.set_property("max-size-time", buffer_time * Gst.MSECOND) @@ -539,21 +518,22 @@ def _setup_audio_sink(self): if self.mixer: self.mixer.setup(volume, self.actor_ref.proxy().mixer) - ghost_pad = Gst.GhostPad.new("sink", queue.get_static_pad("sink")) + queue_sink = queue.get_static_pad("sink") + if queue_sink is None: + raise exceptions.AudioException("Failed to get sink from GStreamer queue.") + ghost_pad = Gst.GhostPad.new("sink", queue_sink) audio_sink.add_pad(ghost_pad) self._playbin.set_property("audio-sink", audio_sink) self._queue = queue - def _teardown_mixer(self): + def _teardown_mixer(self) -> None: if self.mixer: self.mixer.teardown() - def _on_about_to_finish(self, element): + def _on_about_to_finish(self, _element: Gst.Element) -> None: if self._thread == threading.current_thread(): - logger.error( - "about-to-finish in actor, aborting to avoid deadlock." - ) + logger.error("about-to-finish in actor, aborting to avoid deadlock.") return gst_logger.debug("Got about-to-finish event.") @@ -561,43 +541,53 @@ def _on_about_to_finish(self, element): logger.debug("Running about-to-finish callback.") self._about_to_finish_callback() - def _on_source_setup(self, element, source): + def _on_source_setup( + self, + _element: Gst.Element, + source: Gst.Element, + ) -> None: gst_logger.debug( "Got source-setup signal: element=%s", source.__class__.__name__ ) - if source.get_factory().get_name() == "appsrc": - self._appsrc.configure(source) - else: - self._appsrc.reset() + source_factory = source.get_factory() + if source_factory is None: + raise exceptions.AudioException( + "Failed to get factory from GStreamer source." + ) + + if self._source_setup_callback: + logger.debug("Running source-setup callback") + self._source_setup_callback(source) if self._live_stream and hasattr(source.props, "is_live"): gst_logger.debug("Enabling live stream mode") - source.set_live(True) + # TODO(typing): Once pygobject-stubs includes GstApp, cast to AppSrc: + # source = cast(GstApp.AppSrc, source) # noqa: ERA001 + source.set_live(True) # pyright: ignore[reportAttributeAccessIssue] utils.setup_proxy(source, self._config["proxy"]) - def set_uri(self, uri, live_stream=False, download=False): - """ - Set URI of audio to be played. + def set_uri( + self, + uri: str, + live_stream: bool = False, + download: bool = False, + ) -> None: + """Set URI of audio to be played. You *MUST* call :meth:`prepare_change` before calling this method. :param uri: the URI to play - :type uri: string :param live_stream: disables buffering, reducing latency for stream, and discarding data when paused - :type live_stream: bool :param download: enables "download" buffering mode - :type download: bool """ + assert self._playbin # XXX: Hack to workaround issue on Mac OS X where volume level # does not persist between track changes. mopidy/mopidy#886 - if self.mixer is not None: - current_volume = self.mixer.get_volume() - else: - current_volume = None + current_volume = self.mixer.get_volume() if self.mixer is not None else None flags = _GST_PLAY_FLAGS_AUDIO if download: @@ -619,121 +609,77 @@ def set_uri(self, uri, live_stream=False, download=False): if self.mixer is not None and current_volume is not None: self.mixer.set_volume(current_volume) - def set_appsrc( - self, caps, need_data=None, enough_data=None, seek_data=None - ): - """ - Switch to using appsrc for getting audio to be played. - - You *MUST* call :meth:`prepare_change` before calling this method. - - :param caps: GStreamer caps string describing the audio format to - expect - :type caps: string - :param need_data: callback for when appsrc needs data - :type need_data: callable which takes data length hint in ms - :param enough_data: callback for when appsrc has enough data - :type enough_data: callable - :param seek_data: callback for when data from a new position is needed - to continue playback - :type seek_data: callable which takes time position in ms - """ - self._appsrc.prepare( - Gst.Caps.from_string(caps), need_data, enough_data, seek_data - ) - uri = "appsrc://" - self._pending_uri = uri - self._live_stream = False - self._playbin.set_property("flags", _GST_PLAY_FLAGS_AUDIO) - self._playbin.set_property("uri", uri) - - def emit_data(self, buffer_): - """ - Call this to deliver raw audio data to be played. - - If the buffer is :class:`None`, the end-of-stream token is put on the - playbin. We will get a GStreamer message when the stream playback - reaches the token, and can then do any end-of-stream related tasks. - - Note that the URI must be set to ``appsrc://`` for this to work. + def set_source_setup_callback(self, callback: Callable) -> None: + """Configure audio to use a source-setup callback. - Returns :class:`True` if data was delivered. + This should be used to modify source-specific properties such as login + details. - :param buffer_: buffer to pass to appsrc - :type buffer_: :class:`Gst.Buffer` or :class:`None` - :rtype: boolean + :param callable: Callback to run when we setup the source. """ - return self._appsrc.push(buffer_) + self._source_setup_callback = callback - def set_about_to_finish_callback(self, callback): - """ - Configure audio to use an about-to-finish callback. + def set_about_to_finish_callback(self, callback: Callable) -> None: + """Configure audio to use an about-to-finish callback. This should be used to achieve gapless playback. For this to work the callback *MUST* call :meth:`set_uri` with the new URI to play and block until this call has been made. :meth:`prepare_change` is not needed before :meth:`set_uri` in this one special case. - :param callable callback: Callback to run when we need the next URI. + :param callable: Callback to run when we need the next URI. """ self._about_to_finish_callback = callback - def get_position(self): - """ - Get position in milliseconds. + def get_position(self) -> DurationMs: + """Get position in milliseconds.""" + assert self._playbin - :rtype: int - """ success, position = self._playbin.query_position(Gst.Format.TIME) if not success: # TODO: take state into account for this and possibly also return # None as the unknown value instead of zero? logger.debug("Position query failed") - return 0 + return DurationMs(0) return utils.clocktime_to_millisecond(position) - def set_position(self, position): - """ - Set position in milliseconds. + def set_position(self, position: DurationMs) -> bool: + """Set position in milliseconds. :param position: the position in milliseconds - :type position: int - :rtype: :class:`True` if successful, else :class:`False` """ + assert self._queue + # TODO: double check seek flags in use. gst_position = utils.millisecond_to_clocktime(position) gst_logger.debug("Sending flushing seek: position=%r", gst_position) # Send seek event to the queue not the playbin. The default behavior # for bins is to forward this event to all sinks. Which results in - # duplicate seek events making it to appsrc. Since elements are not - # allowed to act on the seek event, only modify it, this should be safe - # to do. - result = self._queue.seek_simple( + # duplicate seek events making it to appsrc (which we no longer use). + # Since elements are not allowed to act on the seek event, only modify + # it, this should be safe to do. + return self._queue.seek_simple( Gst.Format.TIME, Gst.SeekFlags.FLUSH, gst_position ) - return result - def start_playback(self): - """ - Notify GStreamer that it should start playback. + def start_playback(self) -> bool: + """Notify GStreamer that it should start playback. - :rtype: :class:`True` if successfull, else :class:`False` + Returns :class:`True` if successful, else :class:`False`. """ return self._set_state(Gst.State.PLAYING) - def pause_playback(self): - """ - Notify GStreamer that it should pause playback. + def pause_playback(self) -> bool: + """Notify GStreamer that it should pause playback. - :rtype: :class:`True` if successfull, else :class:`False` + Returns :class:`True` if successful, else :class:`False`. """ return self._set_state(Gst.State.PAUSED) - def prepare_change(self): - """ - Notify GStreamer that we are about to change state of playback. + def prepare_change(self) -> bool: + """Notify GStreamer that we are about to change state of playback. This function *MUST* be called before changing URIs or doing changes like updating data that is being pushed. The reason for this @@ -742,37 +688,41 @@ def prepare_change(self): """ return self._set_state(Gst.State.READY) - def stop_playback(self): - """ - Notify GStreamer that is should stop playback. + def stop_playback(self) -> bool: + """Notify GStreamer that it should stop playback. - :rtype: :class:`True` if successfull, else :class:`False` + Returns :class:`True` if successful, else :class:`False`. """ return self._set_state(Gst.State.NULL) - def wait_for_state_change(self): + def wait_for_state_change(self) -> None: """Block until any pending state changes are complete. Should only be used by tests. """ + assert self._playbin + self._playbin.get_state(timeout=Gst.CLOCK_TIME_NONE) - def enable_sync_handler(self): + def enable_sync_handler(self) -> None: """Enable manual processing of messages from bus. Should only be used by tests. """ + assert self._playbin def sync_handler(bus, message): self._handler.on_message(bus, message) return Gst.BusSyncReply.DROP bus = self._playbin.get_bus() + if bus is None: + raise exceptions.AudioException("Failed to get bus from GStreamer playbin.") + bus.set_sync_handler(sync_handler) - def _set_state(self, state): - """ - Internal method for setting the raw GStreamer state. + def _set_state(self, state: Gst.State) -> bool: + """Internal method for setting the raw GStreamer state. .. digraph:: gst_state_transitions @@ -786,11 +736,13 @@ def _set_state(self, state): "READY" -> "NULL" "READY" -> "PAUSED" + Returns :class:`True` if successful, else :class:`False`. + :param state: State to set playbin to. One of: `Gst.State.NULL`, `Gst.State.READY`, `Gst.State.PAUSED` and `Gst.State.PLAYING`. - :type state: :class:`Gst.State` - :rtype: :class:`True` if successfull, else :class:`False` """ + assert self._playbin + if state < Gst.State.PAUSED: self._buffering = False @@ -803,72 +755,39 @@ def _set_state(self, state): ) if result == Gst.StateChangeReturn.FAILURE: - logger.warning( - "Setting GStreamer state to %s failed", state.value_name - ) + logger.warning("Setting GStreamer state to %s failed", state.value_name) return False # TODO: at this point we could already emit stopped event instead # of faking it in the message handling when result=OK return True - # TODO: bake this into setup appsrc perhaps? - def set_metadata(self, track): - """ - Set track metadata for currently playing song. - - Only needs to be called by sources such as ``appsrc`` which do not - already inject tags in playbin, e.g. when using :meth:`emit_data` to - deliver raw audio data to GStreamer. - - :param track: the current track - :type track: :class:`mopidy.models.Track` - """ - taglist = Gst.TagList.new_empty() - artists = [a for a in (track.artists or []) if a.name] - - def set_value(tag, value): - gobject_value = GObject.Value() - gobject_value.init(GObject.TYPE_STRING) - gobject_value.set_string(value) - taglist.add_value(Gst.TagMergeMode.REPLACE, tag, gobject_value) - - # Default to blank data to trick shoutcast into clearing any previous - # values it might have. - # TODO: Verify if this works at all, likely it doesn't. - set_value(Gst.TAG_ARTIST, " ") - set_value(Gst.TAG_TITLE, " ") - set_value(Gst.TAG_ALBUM, " ") - - if artists: - set_value(Gst.TAG_ARTIST, ", ".join(a.name for a in artists)) - - if track.name: - set_value(Gst.TAG_TITLE, track.name) - - if track.album and track.album.name: - set_value(Gst.TAG_ALBUM, track.album.name) - - gst_logger.debug( - "Sending TAG event for track %r: %r", track.uri, taglist.to_string() - ) - event = Gst.Event.new_tag(taglist) - if self._pending_uri: - self._pending_metadata = event - else: - self._playbin.send_event(event) - - def get_current_tags(self): - """ - Get the currently playing media's tags. + def get_current_tags(self) -> dict[str, list[Any]]: + """Get the currently playing media's tags. If no tags have been found, or nothing is playing this returns an empty dictionary. For each set of tags we collect a tags_changed event is - emitted with the keys of the changes tags. After such calls users may + emitted with the keys of the changed tags. After such calls users may call this function to get the updated values. - - :rtype: {key: [values]} dict for the current media. """ # TODO: should this be a (deep) copy? most likely yes # TODO: should we return None when stopped? # TODO: support only fetching keys we care about? return self._tags + + +class AudioProxy(ActorMemberMixin, pykka.ActorProxy[Audio]): + """Audio layer wrapped in a Pykka actor proxy.""" + + state = proxy_field(Audio.state) + set_uri = proxy_method(Audio.set_uri) + set_source_setup_callback = proxy_method(Audio.set_source_setup_callback) + set_about_to_finish_callback = proxy_method(Audio.set_about_to_finish_callback) + get_position = proxy_method(Audio.get_position) + set_position = proxy_method(Audio.set_position) + start_playback = proxy_method(Audio.start_playback) + pause_playback = proxy_method(Audio.pause_playback) + prepare_change = proxy_method(Audio.prepare_change) + stop_playback = proxy_method(Audio.stop_playback) + wait_for_state_change = proxy_method(Audio.wait_for_state_change) + enable_sync_handler = proxy_method(Audio.enable_sync_handler) + get_current_tags = proxy_method(Audio.get_current_tags) diff --git a/mopidy/audio/constants.py b/src/mopidy/audio/constants.py similarity index 70% rename from mopidy/audio/constants.py rename to src/mopidy/audio/constants.py index c1046f45d5..c81efa1b7d 100644 --- a/mopidy/audio/constants.py +++ b/src/mopidy/audio/constants.py @@ -1,8 +1,8 @@ -class PlaybackState: +from enum import Enum - """ - Enum of playback states. - """ + +class PlaybackState(str, Enum): + """Enum of playback states.""" #: Constant representing the paused state. PAUSED = "paused" diff --git a/mopidy/audio/listener.py b/src/mopidy/audio/listener.py similarity index 61% rename from mopidy/audio/listener.py rename to src/mopidy/audio/listener.py index 1b49cf1755..2e7f2e710d 100644 --- a/mopidy/audio/listener.py +++ b/src/mopidy/audio/listener.py @@ -1,10 +1,16 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + from mopidy import listener +if TYPE_CHECKING: + from mopidy.audio import PlaybackState + from mopidy.types import DurationMs, Uri -class AudioListener(listener.Listener): - """ - Marker interface for recipients of events sent by the audio actor. +class AudioListener(listener.Listener): + """Marker interface for recipients of events sent by the audio actor. Any Pykka actor that mixes in this class will receive calls to the methods defined here when the corresponding events happen in the core actor. This @@ -14,41 +20,39 @@ class AudioListener(listener.Listener): """ @staticmethod - def send(event, **kwargs): - """Helper to allow calling of audio listener events""" + def send(event: str, **kwargs: Any) -> None: + """Helper to allow calling of audio listener events.""" listener.send(AudioListener, event, **kwargs) - def reached_end_of_stream(self): - """ - Called whenever the end of the audio stream is reached. + def reached_end_of_stream(self) -> None: + """Called whenever the end of the audio stream is reached. *MAY* be implemented by actor. """ - pass - def stream_changed(self, uri): - """ - Called whenever the audio stream changes. + def stream_changed(self, uri: Uri) -> None: + """Called whenever the audio stream changes. *MAY* be implemented by actor. :param string uri: URI the stream has started playing. """ - pass - def position_changed(self, position): - """ - Called whenever the position of the stream changes. + def position_changed(self, position: DurationMs) -> None: + """Called whenever the position of the stream changes. *MAY* be implemented by actor. :param int position: Position in milliseconds. """ - pass - def state_changed(self, old_state, new_state, target_state): - """ - Called after the playback state have changed. + def state_changed( + self, + old_state: PlaybackState, + new_state: PlaybackState, + target_state: PlaybackState | None, + ) -> None: + """Called after the playback state have changed. Will be called for both immediate and async state changes in GStreamer. @@ -65,19 +69,16 @@ def state_changed(self, old_state, new_state, target_state): *MAY* be implemented by actor. :param old_state: the state before the change - :type old_state: string from :class:`mopidy.core.PlaybackState` field + :type old_state: :class:`mopidy.audio.PlaybackState` :param new_state: the state after the change - :type new_state: A :class:`mopidy.core.PlaybackState` field - :type new_state: string from :class:`mopidy.core.PlaybackState` field + :type new_state: :class:`mopidy.audio.PlaybackState` :param target_state: the intended state - :type target_state: string from :class:`mopidy.core.PlaybackState` - field or :class:`None` if this is a final state. + :type target_state: :class:`mopidy.audio.PlaybackState` + or :class:`None` if this is a final state. """ - pass - def tags_changed(self, tags): - """ - Called whenever the current audio stream's tags change. + def tags_changed(self, tags: set[str]) -> None: + """Called whenever the current audio stream's tags change. This event signals that some track metadata has been updated. This can be metadata such as artists, titles, organization, or details about the @@ -91,4 +92,3 @@ def tags_changed(self, tags): :param tags: The tags that have just been updated. :type tags: :class:`set` of strings """ - pass diff --git a/mopidy/audio/scan.py b/src/mopidy/audio/scan.py similarity index 61% rename from mopidy/audio/scan.py rename to src/mopidy/audio/scan.py index fd9763b4df..d38cc89547 100644 --- a/mopidy/audio/scan.py +++ b/src/mopidy/audio/scan.py @@ -1,6 +1,9 @@ import collections import logging import time +from enum import IntEnum +from pathlib import Path +from typing import Any, cast from mopidy import exceptions from mopidy.audio import tags as tags_lib @@ -8,16 +11,20 @@ from mopidy.internal import log from mopidy.internal.gi import Gst, GstPbutils -# GST_ELEMENT_FACTORY_LIST: -_DECODER = 1 << 0 -_AUDIO = 1 << 50 -_DEMUXER = 1 << 5 -_DEPAYLOADER = 1 << 8 -_PARSER = 1 << 6 -# GST_TYPE_AUTOPLUG_SELECT_RESULT: -_SELECT_TRY = 0 -_SELECT_EXPOSE = 1 +class GstElementFactoryListType(IntEnum): + DECODER = 1 << 0 + AUDIO = 1 << 50 + DEMUXER = 1 << 5 + DEPAYLOADER = 1 << 8 + PARSER = 1 << 6 + + +class GstAutoplugSelectResult(IntEnum): + TRY = 0 + EXPOSE = 1 + SKIP = 2 + _Result = collections.namedtuple( "_Result", ("uri", "tags", "duration", "seekable", "mime", "playable") @@ -32,9 +39,7 @@ def _trace(*args, **kwargs): # TODO: replace with a scan(uri, timeout=1000, proxy_config=None)? class Scanner: - - """ - Helper to get tags and other relevant info from URIs. + """Helper to get tags and other relevant info from URIs. :param timeout: timeout for scanning a URI in ms :param proxy_config: dictionary containing proxy config strings. @@ -46,8 +51,7 @@ def __init__(self, timeout=1000, proxy_config=None): self._proxy_config = proxy_config or {} def scan(self, uri, timeout=None): - """ - Scan the given uri collecting relevant metadata. + """Scan the given uri collecting relevant metadata. :param uri: URI of the resource to scan. :type uri: string @@ -79,7 +83,7 @@ def scan(self, uri, timeout=None): # Turns out it's _much_ faster to just create a new pipeline for every as # decodebins and other elements don't seem to take well to being reused. -def _setup_pipeline(uri, proxy_config=None): +def _setup_pipeline(uri: str, proxy_config=None) -> tuple[Gst.Pipeline, utils.Signals]: src = Gst.Element.make_from_uri(Gst.URIType.SRC, uri) if not src: raise exceptions.ScannerError(f"GStreamer can not open: {uri}") @@ -88,7 +92,11 @@ def _setup_pipeline(uri, proxy_config=None): utils.setup_proxy(src, proxy_config) signals = utils.Signals() + pipeline = Gst.ElementFactory.make("pipeline") + if pipeline is None: + raise exceptions.AudioException("Failed to create GStreamer pipeline element.") + pipeline = cast(Gst.Pipeline, pipeline) pipeline.add(src) if _has_src_pads(src): @@ -109,15 +117,22 @@ def _has_src_pads(element): def _has_dynamic_src_pad(element): for template in element.get_pad_template_list(): - if template.direction == Gst.PadDirection.SRC: - if template.presence == Gst.PadPresence.SOMETIMES: - return True + if ( + template.direction == Gst.PadDirection.SRC + and template.presence == Gst.PadPresence.SOMETIMES + ): + return True return False def _setup_decodebin(element, pad, pipeline, signals): typefind = Gst.ElementFactory.make("typefind") + if typefind is None: + raise exceptions.AudioException("Failed to create GStreamer typefind element.") + decodebin = Gst.ElementFactory.make("decodebin") + if decodebin is None: + raise exceptions.AudioException("Failed to create GStreamer decodebin element.") for element in (typefind, decodebin): pipeline.add(element) @@ -131,44 +146,101 @@ def _setup_decodebin(element, pad, pipeline, signals): signals.connect(decodebin, "autoplug-select", _autoplug_select) -def _have_type(element, probability, caps, decodebin): +def _have_type( + element: Gst.Element, + _probability: int, + caps: Gst.Caps, + decodebin: Gst.Bin, +) -> None: decodebin.set_property("sink-caps", caps) struct = Gst.Structure.new_empty("have-type") struct.set_value("caps", caps.get_structure(0)) - element.get_bus().post(Gst.Message.new_application(element, struct)) + + element_bus = element.get_bus() + if element_bus is None: + raise exceptions.AudioException("Failed to get bus of GStreamer element.") + + message = Gst.Message.new_application(element, struct) + if message is None: + raise exceptions.AudioException("Failed to create GStreamer message.") + + element_bus.post(message) -def _pad_added(element, pad, pipeline): - sink = Gst.ElementFactory.make("fakesink") - sink.set_property("sync", False) +def _pad_added( + element: Gst.Element, + pad: Gst.Pad, + pipeline: Gst.Pipeline, +) -> None: + fakesink = Gst.ElementFactory.make("fakesink") + if fakesink is None: + raise exceptions.AudioException("Failed to create GStreamer fakesink element.") - pipeline.add(sink) - sink.sync_state_with_parent() - pad.link(sink.get_static_pad("sink")) + fakesink.set_property("sync", False) - if pad.query_caps().is_subset(Gst.Caps.from_string("audio/x-raw")): + pipeline.add(fakesink) + fakesink.sync_state_with_parent() + fakesink_sink = fakesink.get_static_pad("sink") + if fakesink_sink is None: + raise exceptions.AudioException("Failed to get sink pad of GStreamer fakesink.") + pad.link(fakesink_sink) + + raw_caps = Gst.Caps.from_string("audio/x-raw") + assert raw_caps + + if pad.query_caps().is_subset(raw_caps): # Probably won't happen due to autoplug-select fix, but lets play it # safe until we've tested more. struct = Gst.Structure.new_empty("have-audio") - element.get_bus().post(Gst.Message.new_application(element, struct)) + + element_bus = element.get_bus() + if element_bus is None: + raise exceptions.AudioException("Failed to get bus of GStreamer element.") + + message = Gst.Message.new_application(element, struct) + if message is None: + raise exceptions.AudioException("Failed to create GStreamer message.") + + element_bus.post(message) -def _autoplug_select(element, pad, caps, factory): - if factory.list_is_type(_DECODER | _AUDIO): +def _autoplug_select( + element: Gst.Element, + _pad: Gst.Pad, + _caps: Gst.Caps, + factory: Gst.ElementFactory, +) -> GstAutoplugSelectResult: + if factory.list_is_type( + GstElementFactoryListType.DECODER | GstElementFactoryListType.AUDIO + ): struct = Gst.Structure.new_empty("have-audio") - element.get_bus().post(Gst.Message.new_application(element, struct)) - if not factory.list_is_type(_DEMUXER | _DEPAYLOADER | _PARSER): - return _SELECT_EXPOSE - return _SELECT_TRY + element_bus = element.get_bus() + if element_bus is None: + raise exceptions.AudioException("Failed to get bus of GStreamer element.") -def _start_pipeline(pipeline): + message = Gst.Message.new_application(element, struct) + if message is None: + raise exceptions.AudioException("Failed to create GStreamer message.") + + element_bus.post(message) + + if not factory.list_is_type( + GstElementFactoryListType.DEMUXER + | GstElementFactoryListType.DEPAYLOADER + | GstElementFactoryListType.PARSER + ): + return GstAutoplugSelectResult.EXPOSE + return GstAutoplugSelectResult.TRY + + +def _start_pipeline(pipeline: Gst.Pipeline) -> None: result = pipeline.set_state(Gst.State.PAUSED) if result == Gst.StateChangeReturn.NO_PREROLL: pipeline.set_state(Gst.State.PLAYING) -def _query_duration(pipeline): +def _query_duration(pipeline: Gst.Pipeline) -> tuple[bool, int | None]: success, duration = pipeline.query_duration(Gst.Format.TIME) if not success: duration = None # Make sure error case preserves None. @@ -179,16 +251,19 @@ def _query_duration(pipeline): return success, duration -def _query_seekable(pipeline): +def _query_seekable(pipeline: Gst.Pipeline) -> bool: query = Gst.Query.new_seeking(Gst.Format.TIME) pipeline.query(query) return query.parse_seeking()[1] -def _process(pipeline, timeout_ms): +def _process( # noqa: C901, PLR0911, PLR0912, PLR0915 + pipeline: Gst.Pipeline, + timeout_ms: int, +) -> tuple[dict[str, Any], str | None, bool, int | None]: bus = pipeline.get_bus() tags = {} - mime = None + mime: str | None = None have_audio = False missing_message = None duration = None @@ -210,8 +285,10 @@ def _process(pipeline, timeout_ms): if msg is None: break - if logger.isEnabledFor(log.TRACE_LOG_LEVEL) and msg.get_structure(): - debug_text = msg.get_structure().to_string() + structure = msg.get_structure() + + if logger.isEnabledFor(log.TRACE_LOG_LEVEL) and structure: + debug_text = structure.to_string() if len(debug_text) > 77: debug_text = debug_text[:77] + "..." _trace("element %s: %s", msg.src.get_name(), debug_text) @@ -220,19 +297,28 @@ def _process(pipeline, timeout_ms): if GstPbutils.is_missing_plugin_message(msg): missing_message = msg elif msg.type == Gst.MessageType.APPLICATION: - if msg.get_structure().get_name() == "have-type": - mime = msg.get_structure().get_value("caps").get_name() - if mime and ( - mime.startswith("text/") or mime == "application/xml" - ): - return tags, mime, have_audio, duration - elif msg.get_structure().get_name() == "have-audio": + if structure and structure.get_name() == "have-type": + caps = cast(Gst.Caps | None, structure.get_value("caps")) + if caps: + mime = cast( + str, + caps.get_name(), # pyright: ignore[reportAttributeAccessIssue] + ) + if mime.startswith("text/") or mime == "application/xml": + return tags, mime, have_audio, duration + elif structure and structure.get_name() == "have-audio": have_audio = True elif msg.type == Gst.MessageType.ERROR: error, _debug = msg.parse_error() - if missing_message and not mime: - caps = missing_message.get_structure().get_value("detail") - mime = caps.get_structure(0).get_name() + if ( + missing_message + and not mime + and ( + (structure := missing_message.get_structure()) + and (caps := structure.get_value("detail")) + and (mime := caps.get_structure(0).get_name()) + ) + ): return tags, mime, have_audio, duration raise exceptions.ScannerError(str(error)) elif msg.type == Gst.MessageType.EOS: @@ -273,7 +359,6 @@ def _process(pipeline, timeout_ms): if __name__ == "__main__": - import os import sys from mopidy.internal import path @@ -286,17 +371,17 @@ def _process(pipeline, timeout_ms): scanner = Scanner(5000) for uri in sys.argv[1:]: if not Gst.uri_is_valid(uri): - uri = path.path_to_uri(os.path.abspath(uri)) + uri = path.path_to_uri(Path(uri).resolve()) try: result = scanner.scan(uri) for key in ("uri", "mime", "duration", "playable", "seekable"): value = getattr(result, key) - print(f"{key:<20} {value}") - print("tags") + print(f"{key:<20} {value}") # noqa: T201 + print("tags") # noqa: T201 for tag, value in result.tags.items(): line = f"{tag:<20} {value}" if len(line) > 77: line = line[:77] + "..." - print(line) + print(line) # noqa: T201 except exceptions.ScannerError as error: - print(f"{uri}: {error}") + print(f"{uri}: {error}") # noqa: T201 diff --git a/mopidy/audio/tags.py b/src/mopidy/audio/tags.py similarity index 96% rename from mopidy/audio/tags.py rename to src/mopidy/audio/tags.py index 3feb3fd8ce..217676c708 100644 --- a/mopidy/audio/tags.py +++ b/src/mopidy/audio/tags.py @@ -25,7 +25,7 @@ def repr_tags(taglist, max_bytes=10): result = dict(taglist) for tag_values in result.values(): for i, val in enumerate(tag_values): - if type(val) is bytes and len(val) > max_bytes: + if isinstance(val, bytes) and len(val) > max_bytes: tag_values[i] = val[:max_bytes] + b"..." return repr(result) @@ -76,7 +76,7 @@ def convert_taglist(taglist): result[tag].append(value.to_iso8601_string()) elif isinstance(value, bytes): result[tag].append(value.decode(errors="replace")) - elif isinstance(value, (str, bool, numbers.Number)): + elif isinstance(value, str | bool | numbers.Number): result[tag].append(value) elif isinstance(value, Gst.Sample): data = _extract_sample_data(value) @@ -111,7 +111,7 @@ def _extract_buffer_data(buf): success, info = mem.map(Gst.MapFlags.READ) if not success: return None - if isinstance(info.data, memoryview): + if isinstance(info.data, memoryview): # noqa: SIM108 # We need to copy the data as the memoryview is released # when we call mem.unmap() data = bytes(info.data) @@ -189,9 +189,7 @@ def _artists(tags, artist_name, artist_id=None, artist_sortname=None): return None # One artist name and either id or sortname, include all available fields - if len(tags[artist_name]) == 1 and ( - artist_id in tags or artist_sortname in tags - ): + if len(tags[artist_name]) == 1 and (artist_id in tags or artist_sortname in tags): attrs = {"name": tags[artist_name][0]} if artist_id in tags: attrs["musicbrainz_id"] = tags[artist_id][0] diff --git a/mopidy/audio/utils.py b/src/mopidy/audio/utils.py similarity index 54% rename from mopidy/audio/utils.py rename to src/mopidy/audio/utils.py index 42553eda4e..15865865e9 100644 --- a/mopidy/audio/utils.py +++ b/src/mopidy/audio/utils.py @@ -1,66 +1,51 @@ -from mopidy import httpclient -from mopidy.internal.gi import Gst - +from __future__ import annotations -def calculate_duration(num_samples, sample_rate): - """Determine duration of samples using GStreamer helper for precise - math.""" - return Gst.util_uint64_scale(num_samples, Gst.SECOND, sample_rate) +from collections.abc import Callable +from typing import TYPE_CHECKING, Any, cast +from mopidy import httpclient +from mopidy.internal.gi import Gst +from mopidy.types import DurationMs, UriScheme -def create_buffer(data, timestamp=None, duration=None): - """Create a new GStreamer buffer based on provided data. - - Mainly intended to keep gst imports out of non-audio modules. +if TYPE_CHECKING: + from collections.abc import Iterable - .. versionchanged:: 2.0 - ``capabilites`` argument was removed. - """ - if not data: - raise ValueError("Cannot create buffer without data") - buffer_ = Gst.Buffer.new_wrapped(data) - if timestamp is not None: - buffer_.pts = timestamp - if duration is not None: - buffer_.duration = duration - return buffer_ + from mopidy.config import ProxyConfig -def millisecond_to_clocktime(value): +def millisecond_to_clocktime(value: DurationMs) -> int: """Convert a millisecond time to internal GStreamer time.""" return value * Gst.MSECOND -def clocktime_to_millisecond(value): +def clocktime_to_millisecond(value: int) -> DurationMs: """Convert an internal GStreamer time to millisecond time.""" - return value // Gst.MSECOND + return DurationMs(value // Gst.MSECOND) -def supported_uri_schemes(uri_schemes): +def supported_uri_schemes(uri_schemes: Iterable[UriScheme]) -> set[UriScheme]: """Determine which URIs we can actually support from provided whitelist. :param uri_schemes: list/set of URIs to check support for. - :type uri_schemes: list or set or URI schemes as strings. - :rtype: set of URI schemes we can support via this GStreamer install. """ supported_schemes = set() registry = Gst.Registry.get() for factory in registry.get_feature_list(Gst.ElementFactory): - for uri in factory.get_uri_protocols(): - if uri in uri_schemes: - supported_schemes.add(uri) + factory = cast(Gst.ElementFactory, factory) + for uri_protocol in factory.get_uri_protocols(): + uri_scheme = UriScheme(uri_protocol) + if uri_scheme in uri_schemes: + supported_schemes.add(uri_scheme) return supported_schemes -def setup_proxy(element, config): +def setup_proxy(element: Gst.Element, config: ProxyConfig) -> None: """Configure a GStreamer element with proxy settings. :param element: element to setup proxy in. - :type element: :class:`Gst.GstElement` :param config: proxy settings to use. - :type config: :class:`dict` """ if not hasattr(element.props, "proxy") or not config.get("hostname"): return @@ -71,13 +56,18 @@ def setup_proxy(element, config): class Signals: - - """Helper for tracking gobject signal registrations""" - - def __init__(self): - self._ids = {} - - def connect(self, element, event, func, *args): + """Helper for tracking gobject signal registrations.""" + + def __init__(self) -> None: + self._ids: dict[tuple[Gst.Element, str], int] = {} + + def connect( + self, + element: Gst.Element, + event: str, + func: Callable, + *args: Any, + ) -> None: """Connect a function + args to signal event on an element. Each event may only be handled by one callback in this implementation. @@ -86,7 +76,7 @@ def connect(self, element, event, func, *args): raise AssertionError self._ids[(element, event)] = element.connect(event, func, *args) - def disconnect(self, element, event): + def disconnect(self, element: Gst.Element, event: str) -> None: """Disconnect whatever handler we have for an element+event pair. Does nothing it the handler has already been removed. @@ -95,7 +85,7 @@ def disconnect(self, element, event): if signal_id is not None: element.disconnect(signal_id) - def clear(self): + def clear(self) -> None: """Clear all registered signal handlers.""" for element, event in list(self._ids): element.disconnect(self._ids.pop((element, event))) diff --git a/mopidy/backend.py b/src/mopidy/backend.py similarity index 59% rename from mopidy/backend.py rename to src/mopidy/backend.py index 29c43bc11e..2f7d3fc1cf 100644 --- a/mopidy/backend.py +++ b/src/mopidy/backend.py @@ -1,56 +1,34 @@ +# ruff: noqa: ARG002 + from __future__ import annotations import logging -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any, ClassVar import pykka +from pykka.typing import ActorMemberMixin, proxy_field, proxy_method from mopidy import listener if TYPE_CHECKING: - from typing import Any, Dict, List, Optional, Set, TypeVar, Union - - from typing_extensions import Literal - + from mopidy.audio.actor import AudioProxy + from mopidy.internal.gi import Gst from mopidy.models import Image, Playlist, Ref, SearchResult, Track - - # TODO Fix duplication with mopidy.internal.validation.TRACK_FIELDS_WITH_TYPES - TrackField = Literal[ - "uri", - "track_name", - "album", - "artist", - "albumartist", - "composer", - "performer", - "track_no", - "genre", - "date", - "comment", - "disc_no", - "musicbrainz_albumid", - "musicbrainz_artistid", - "musicbrainz_trackid", - ] - - SearchField = Literal[TrackField, "any"] - - DistinctField = TrackField - - F = TypeVar("F") - QueryValue = Union[str, int] - Query = Dict[F, List[QueryValue]] - - Uri = str - UriScheme = str + from mopidy.types import ( + DistinctField, + DurationMs, + Query, + SearchField, + Uri, + UriScheme, + ) logger = logging.getLogger(__name__) class Backend: - - """Backend API + """Backend API. If the backend has problems during initialization it should raise :exc:`mopidy.exceptions.BackendError` with a descriptive error message. @@ -58,35 +36,32 @@ class Backend: fix the issue. :param config: the entire Mopidy configuration - :type config: dict :param audio: actor proxy for the audio subsystem - :type audio: :class:`pykka.ActorProxy` for :class:`mopidy.audio.Audio` """ #: Actor proxy to an instance of :class:`mopidy.audio.Audio`. #: #: Should be passed to the backend constructor as the kwarg ``audio``, #: which will then set this field. - # TODO(typing) Replace Any with an ActorProxy[Audio] type - audio: Optional[Any] = None + audio: AudioProxy #: The library provider. An instance of #: :class:`~mopidy.backend.LibraryProvider`, or :class:`None` if #: the backend doesn't provide a library. - library: Optional[LibraryProvider] = None + library: LibraryProvider | None = None #: The playback provider. An instance of #: :class:`~mopidy.backend.PlaybackProvider`, or :class:`None` if #: the backend doesn't provide playback. - playback: Optional[PlaybackProvider] = None + playback: PlaybackProvider | None = None #: The playlists provider. An instance of #: :class:`~mopidy.backend.PlaylistsProvider`, or class:`None` if #: the backend doesn't provide playlists. - playlists: Optional[PlaylistsProvider] = None + playlists: PlaylistsProvider | None = None #: List of URI schemes this backend can handle. - uri_schemes: List[UriScheme] = [] + uri_schemes: ClassVar[list[UriScheme]] = [] # Because the providers is marked as pykka.traversable(), we can't get() # them from another actor, and need helper methods to check if the @@ -96,9 +71,7 @@ def has_library(self) -> bool: return self.library is not None def has_library_browse(self) -> bool: - return ( - self.library is not None and self.library.root_directory is not None - ) + return self.library is not None and self.library.root_directory is not None def has_playback(self) -> bool: return self.playback is not None @@ -113,13 +86,12 @@ def ping(self) -> bool: @pykka.traversable class LibraryProvider: + """A library provider provides a library of music to Mopidy. - """ :param backend: backend the controller is a part of - :type backend: :class:`mopidy.backend.Backend` """ - root_directory: Optional[Ref] = None + root_directory: Ref | None = None """ :class:`mopidy.models.Ref.directory` instance with a URI and name set representing the root of this library's browse tree. URIs must @@ -132,9 +104,8 @@ class LibraryProvider: def __init__(self, backend: Backend) -> None: self.backend = backend - def browse(self, uri: Uri) -> List[Ref]: - """ - See :meth:`mopidy.core.LibraryController.browse`. + def browse(self, uri: Uri) -> list[Ref]: + """See :meth:`mopidy.core.LibraryController.browse`. If you implement this method, make sure to also set :attr:`root_directory`. @@ -144,10 +115,9 @@ def browse(self, uri: Uri) -> List[Ref]: return [] def get_distinct( - self, field: DistinctField, query: Optional[Query[DistinctField]] = None - ) -> Set[str]: - """ - See :meth:`mopidy.core.LibraryController.get_distinct`. + self, field: DistinctField, query: Query[SearchField] | None = None + ) -> set[str]: + """See :meth:`mopidy.core.LibraryController.get_distinct`. *MAY be implemented by subclass.* @@ -158,9 +128,8 @@ def get_distinct( """ return set() - def get_images(self, uris: List[Uri]) -> Dict[Uri, List[Image]]: - """ - See :meth:`mopidy.core.LibraryController.get_images`. + def get_images(self, uris: list[Uri]) -> dict[Uri, list[Image]]: + """See :meth:`mopidy.core.LibraryController.get_images`. *MAY be implemented by subclass.* @@ -168,77 +137,67 @@ def get_images(self, uris: List[Uri]) -> Dict[Uri, List[Image]]: """ return {} - def lookup(self, uri: Uri) -> Dict[Uri, List[Track]]: - """ - See :meth:`mopidy.core.LibraryController.lookup`. + def lookup(self, uri: Uri) -> list[Track]: + """See :meth:`mopidy.core.LibraryController.lookup`. *MUST be implemented by subclass.* """ raise NotImplementedError - def refresh(self, uri: Optional[Uri] = None) -> None: - """ - See :meth:`mopidy.core.LibraryController.refresh`. + def refresh(self, uri: Uri | None = None) -> None: + """See :meth:`mopidy.core.LibraryController.refresh`. *MAY be implemented by subclass.* """ - pass def search( self, query: Query[SearchField], - uris: Optional[List[Uri]] = None, + uris: list[Uri] | None = None, exact: bool = False, - ) -> List[SearchResult]: - """ - See :meth:`mopidy.core.LibraryController.search`. + ) -> SearchResult | None: + """See :meth:`mopidy.core.LibraryController.search`. *MAY be implemented by subclass.* .. versionadded:: 1.0 The ``exact`` param which replaces the old ``find_exact``. """ - pass + return None @pykka.traversable class PlaybackProvider: + """A playback provider provides audio playback control. - """ :param audio: the audio actor - :type audio: actor proxy to an instance of :class:`mopidy.audio.Audio` :param backend: the backend - :type backend: :class:`mopidy.backend.Backend` """ - def __init__(self, audio: Any, backend: Backend) -> None: - # TODO(typing) Replace Any with an ActorProxy[Audio] type + def __init__(self, audio: AudioProxy, backend: Backend) -> None: self.audio = audio self.backend = backend def pause(self) -> bool: - """ - Pause playback. + """Pause playback. *MAY be reimplemented by subclass.* - :rtype: :class:`True` if successful, else :class:`False` + Returns :class:`True` if successful, else :class:`False`. """ return self.audio.pause_playback().get() def play(self) -> bool: - """ - Start playback. + """Start playback. *MAY be reimplemented by subclass.* - :rtype: :class:`True` if successful, else :class:`False` + Returns :class:`True` if successful, else :class:`False`. """ return self.audio.start_playback().get() def prepare_change(self) -> None: - """ - Indicate that an URI change is about to happen. + """Indicate that an URI change is about to happen. *MAY be reimplemented by subclass.* @@ -248,9 +207,8 @@ def prepare_change(self) -> None: """ self.audio.prepare_change().get() - def translate_uri(self, uri: Uri) -> Optional[Uri]: - """ - Convert custom URI scheme to real playable URI. + def translate_uri(self, uri: Uri) -> Uri | None: + """Convert custom URI scheme to real playable URI. *MAY be reimplemented by subclass.* @@ -260,14 +218,11 @@ def translate_uri(self, uri: Uri) -> Optional[Uri]: return :class:`None`. :param uri: the URI to translate - :type uri: string - :rtype: string or :class:`None` if the URI could not be translated """ return uri def is_live(self, uri: Uri) -> bool: - """ - Decide if the URI should be treated as a live stream or not. + """Decide if the URI should be treated as a live stream or not. *MAY be reimplemented by subclass.* @@ -275,14 +230,11 @@ def is_live(self, uri: Uri) -> bool: latency before playback starts, and discards data when paused. :param uri: the URI - :type uri: string - :rtype: bool """ return False def should_download(self, uri: Uri) -> bool: - """ - Attempt progressive download buffering for the URI or not. + """Attempt progressive download buffering for the URI or not. *MAY be reimplemented by subclass.* @@ -290,14 +242,22 @@ def should_download(self, uri: Uri) -> bool: to improve playback performance. :param uri: the URI - :type uri: string - :rtype: bool """ return False - def change_track(self, track: Track) -> bool: + def on_source_setup(self, source: Gst.Element) -> None: + """Called when a new GStreamer source is created, allowing us to configure + the source. This runs in the audio thread so should not block. + + *MAY be reimplemented by subclass.* + + :param source: the GStreamer source element + + .. versionadded:: 3.4 """ - Switch to provided track. + + def change_track(self, track: Track) -> bool: + """Switch to provided track. *MAY be reimplemented by subclass.* @@ -309,14 +269,13 @@ def change_track(self, track: Track) -> bool: is what you want to implement. :param track: the track to play - :type track: :class:`mopidy.models.Track` - :rtype: :class:`True` if successful, else :class:`False` """ uri = self.translate_uri(track.uri) if uri != track.uri: logger.debug("Backend translated URI from %s to %s", track.uri, uri) if not uri: return False + self.audio.set_source_setup_callback(self.on_source_setup).get() self.audio.set_uri( uri, live_stream=self.is_live(uri), @@ -325,83 +284,71 @@ def change_track(self, track: Track) -> bool: return True def resume(self) -> bool: - """ - Resume playback at the same time position playback was paused. + """Resume playback at the same time position playback was paused. *MAY be reimplemented by subclass.* - :rtype: :class:`True` if successful, else :class:`False` + Returns :class:`True` if successful, else :class:`False`. """ return self.audio.start_playback().get() - def seek(self, time_position: int) -> bool: - """ - Seek to a given time position. + def seek(self, time_position: DurationMs) -> bool: + """Seek to a given time position. *MAY be reimplemented by subclass.* + Returns :class:`True` if successful, else :class:`False`. + :param time_position: time position in milliseconds - :type time_position: int - :rtype: :class:`True` if successful, else :class:`False` """ return self.audio.set_position(time_position).get() def stop(self) -> bool: - """ - Stop playback. + """Stop playback. *MAY be reimplemented by subclass.* Should not be used for tracking if tracks have been played or when we are done playing them. - :rtype: :class:`True` if successful, else :class:`False` + Returns :class:`True` if successful, else :class:`False`. """ return self.audio.stop_playback().get() - def get_time_position(self) -> int: - """ - Get the current time position in milliseconds. + def get_time_position(self) -> DurationMs: + """Get the current time position in milliseconds. *MAY be reimplemented by subclass.* - - :rtype: int """ return self.audio.get_position().get() @pykka.traversable class PlaylistsProvider: + """A playlist provider exposes a collection of playlists. - """ - A playlist provider exposes a collection of playlists, methods to - create/change/delete playlists in this collection, and lookup of any - playlist the backend knows about. + The methods can create/change/delete playlists in this collection, and + lookup of any playlist the backend knows about. :param backend: backend the controller is a part of - :type backend: :class:`mopidy.backend.Backend` instance """ def __init__(self, backend: Backend) -> None: self.backend = backend - def as_list(self) -> List[Ref]: - """ - Get a list of the currently available playlists. + def as_list(self) -> list[Ref]: + """Get a list of the currently available playlists. Returns a list of :class:`~mopidy.models.Ref` objects referring to the playlists. In other words, no information about the playlists' content is given. - :rtype: list of :class:`mopidy.models.Ref` - .. versionadded:: 1.0 """ raise NotImplementedError - def get_items(self, uri: Uri) -> Optional[List[Ref]]: - """ - Get the items in a playlist specified by ``uri``. + def get_items(self, uri: Uri) -> list[Ref] | None: + """Get the items in a playlist specified by ``uri``. Returns a list of :class:`~mopidy.models.Ref` objects referring to the playlist's items. @@ -409,15 +356,12 @@ def get_items(self, uri: Uri) -> Optional[List[Ref]]: If a playlist with the given ``uri`` doesn't exist, it returns :class:`None`. - :rtype: list of :class:`mopidy.models.Ref`, or :class:`None` - .. versionadded:: 1.0 """ raise NotImplementedError - def create(self, name: str) -> Optional[Playlist]: - """ - Create a new empty playlist with the given name. + def create(self, name: str) -> Playlist | None: + """Create a new empty playlist with the given name. Returns a new playlist with the given name and an URI, or :class:`None` on failure. @@ -425,31 +369,25 @@ def create(self, name: str) -> Optional[Playlist]: *MUST be implemented by subclass.* :param name: name of the new playlist - :type name: string - :rtype: :class:`mopidy.models.Playlist` or :class:`None` """ raise NotImplementedError def delete(self, uri: Uri) -> bool: - """ - Delete playlist identified by the URI. + """Delete playlist identified by the URI. Returns :class:`True` if deleted, :class:`False` otherwise. *MUST be implemented by subclass.* :param uri: URI of the playlist to delete - :type uri: string - :rtype: :class:`bool` .. versionchanged:: 2.2 Return type defined. """ raise NotImplementedError - def lookup(self, uri: Uri) -> Optional[Playlist]: - """ - Lookup playlist with given URI in both the set of playlists and in any + def lookup(self, uri: Uri) -> Playlist | None: + """Lookup playlist with given URI in both the set of playlists and in any other playlist source. Returns the playlists or :class:`None` if not found. @@ -457,22 +395,18 @@ def lookup(self, uri: Uri) -> Optional[Playlist]: *MUST be implemented by subclass.* :param uri: playlist URI - :type uri: string - :rtype: :class:`mopidy.models.Playlist` or :class:`None` """ raise NotImplementedError def refresh(self) -> None: - """ - Refresh the playlists in :attr:`playlists`. + """Refresh the playlists in :attr:`playlists`. *MUST be implemented by subclass.* """ raise NotImplementedError - def save(self, playlist: Playlist) -> Optional[Playlist]: - """ - Save the given playlist. + def save(self, playlist: Playlist) -> Playlist | None: + """Save the given playlist. The playlist must have an ``uri`` attribute set. To create a new playlist with an URI, use :meth:`create`. @@ -482,16 +416,12 @@ def save(self, playlist: Playlist) -> Optional[Playlist]: *MUST be implemented by subclass.* :param playlist: the playlist to save - :type playlist: :class:`mopidy.models.Playlist` - :rtype: :class:`mopidy.models.Playlist` or :class:`None` """ raise NotImplementedError class BackendListener(listener.Listener): - - """ - Marker interface for recipients of events sent by the backend actors. + """Marker interface for recipients of events sent by the backend actors. Any Pykka actor that mixes in this class will receive calls to the methods defined here when the corresponding events happen in a backend actor. This @@ -504,13 +434,64 @@ class BackendListener(listener.Listener): @staticmethod def send(event: str, **kwargs: Any) -> None: - """Helper to allow calling of backend listener events""" + """Helper to allow calling of backend listener events.""" listener.send(BackendListener, event, **kwargs) def playlists_loaded(self) -> None: - """ - Called when playlists are loaded or refreshed. + """Called when playlists are loaded or refreshed. *MAY* be implemented by actor. """ - pass + + +class BackendActor(pykka.ThreadingActor, Backend): + pass + + +class BackendProxy(ActorMemberMixin, pykka.ActorProxy[BackendActor]): + """Backend wrapped in a Pykka actor proxy.""" + + library: LibraryProviderProxy + playback: PlaybackProviderProxy + playlists: PlaylistsProviderProxy + uri_schemes = proxy_field(BackendActor.uri_schemes) + has_library = proxy_method(BackendActor.has_library) + has_library_browse = proxy_method(BackendActor.has_library_browse) + has_playback = proxy_method(BackendActor.has_playback) + has_playlists = proxy_method(BackendActor.has_playlists) + ping = proxy_method(BackendActor.ping) + + +class LibraryProviderProxy: + root_directory = proxy_field(LibraryProvider.root_directory) + browse = proxy_method(LibraryProvider.browse) + get_distinct = proxy_method(LibraryProvider.get_distinct) + get_images = proxy_method(LibraryProvider.get_images) + lookup = proxy_method(LibraryProvider.lookup) + refresh = proxy_method(LibraryProvider.refresh) + search = proxy_method(LibraryProvider.search) + + +class PlaybackProviderProxy: + pause = proxy_method(PlaybackProvider.pause) + play = proxy_method(PlaybackProvider.play) + prepare_change = proxy_method(PlaybackProvider.prepare_change) + translate_uri = proxy_method(PlaybackProvider.translate_uri) + is_live = proxy_method(PlaybackProvider.is_live) + should_download = proxy_method(PlaybackProvider.should_download) + on_source_setup = proxy_method(PlaybackProvider.on_source_setup) + change_track = proxy_method(PlaybackProvider.change_track) + resume = proxy_method(PlaybackProvider.resume) + seek = proxy_method(PlaybackProvider.seek) + stop = proxy_method(PlaybackProvider.stop) + get_time_position = proxy_method(PlaybackProvider.get_time_position) + + +class PlaylistsProviderProxy: + as_list = proxy_method(PlaylistsProvider.as_list) + get_items = proxy_method(PlaylistsProvider.get_items) + create = proxy_method(PlaylistsProvider.create) + delete = proxy_method(PlaylistsProvider.delete) + lookup = proxy_method(PlaylistsProvider.lookup) + refresh = proxy_method(PlaylistsProvider.refresh) + save = proxy_method(PlaylistsProvider.save) diff --git a/mopidy/commands.py b/src/mopidy/commands.py similarity index 64% rename from mopidy/commands.py rename to src/mopidy/commands.py index 80cd00048b..7601940ab4 100644 --- a/mopidy/commands.py +++ b/src/mopidy/commands.py @@ -4,52 +4,51 @@ import collections import contextlib import logging -import os -import pathlib import signal import sys -from typing import TYPE_CHECKING +from collections.abc import Generator, Iterable, Sequence +from pathlib import Path +from typing import ( + Any, + NoReturn, + cast, +) import pykka +from pykka import ThreadingActor from pykka.messages import ProxyCall +import mopidy from mopidy import config as config_lib from mopidy import exceptions -from mopidy.audio import Audio -from mopidy.core import Core -from mopidy.internal import deps, process, timer, versioning +from mopidy.audio import Audio, AudioProxy +from mopidy.backend import BackendActor, BackendProxy +from mopidy.core import Core, CoreProxy +from mopidy.internal import deps, process, timer from mopidy.internal.gi import GLib - -if TYPE_CHECKING: - from typing import Optional - +from mopidy.mixer import MixerActor, MixerProxy +from mopidy.types import Percentage logger = logging.getLogger(__name__) -_default_config = [ - (pathlib.Path(base) / "mopidy" / "mopidy.conf").resolve() - for base in GLib.get_system_config_dirs() + [GLib.get_user_config_dir()] -] -DEFAULT_CONFIG = ":".join(map(str, _default_config)) - -def config_files_type(value): +def config_files_type(value: str) -> list[str]: return value.split(":") -def config_override_type(value): +def config_override_type(value: str) -> tuple[str, str, str]: try: section, remainder = value.split("/", 1) key, value = remainder.split("=", 1) return (section.strip(), key.strip(), value.strip()) - except ValueError: + except ValueError as exc: raise argparse.ArgumentTypeError( f"{value} must have the format section/key=value" - ) + ) from exc class _ParserError(Exception): - def __init__(self, message): + def __init__(self, message) -> None: self.message = message @@ -58,12 +57,17 @@ class _HelpError(Exception): class _ArgumentParser(argparse.ArgumentParser): - def error(self, message): + def error(self, message) -> NoReturn: raise _ParserError(message) class _HelpAction(argparse.Action): - def __init__(self, option_strings, dest=None, help=None): + def __init__( + self, + option_strings: Sequence[str], + dest: str | None = None, + help: str | None = None, + ) -> None: super().__init__( option_strings=option_strings, dest=dest or argparse.SUPPRESS, @@ -72,49 +76,55 @@ def __init__(self, option_strings, dest=None, help=None): help=help, ) - def __call__(self, parser, namespace, values, option_string=None): - raise _HelpError() + def __call__( + self, + parser, # noqa: ARG002 + namespace, # noqa: ARG002 + values, # noqa: ARG002 + option_string=None, # noqa: ARG002 + ) -> NoReturn: + raise _HelpError class Command: - """Command parser and runner for building trees of commands. This class provides a wraper around :class:`argparse.ArgumentParser` for handling this type of command line application in a better way than - argprases own sub-parser handling. + argparse's own sub-parser handling. """ - help: Optional[str] = None + help: str | None = None #: Help text to display in help output. - def __init__(self): + _children: dict[str, Command] + _arguments: list[tuple[tuple[Any, ...], dict[str, Any]]] + _overrides: dict[str, Any] + + def __init__(self) -> None: self._children = collections.OrderedDict() self._arguments = [] self._overrides = {} - def _build(self): - actions = [] + def _build(self) -> tuple[_ArgumentParser, list[argparse.Action]]: + actions: list[argparse.Action] = [] parser = _ArgumentParser(add_help=False) parser.register("action", "help", _HelpAction) for args, kwargs in self._arguments: actions.append(parser.add_argument(*args, **kwargs)) - parser.add_argument( - "_args", nargs=argparse.REMAINDER, help=argparse.SUPPRESS - ) + parser.add_argument("_args", nargs=argparse.REMAINDER, help=argparse.SUPPRESS) return parser, actions - def add_child(self, name, command): + def add_child(self, name: str, command: Command) -> None: """Add a child parser to consider using. :param name: name to use for the sub-command that is being added. - :type name: string """ self._children[name] = command - def add_argument(self, *args, **kwargs): + def add_argument(self, *args: Any, **kwargs: Any) -> None: """Add an argument to the parser. This method takes all the same arguments as the @@ -122,30 +132,35 @@ def add_argument(self, *args, **kwargs): """ self._arguments.append((args, kwargs)) - def set(self, **kwargs): + def set(self, **kwargs: Any) -> None: """Override a value in the finaly result of parsing.""" self._overrides.update(kwargs) - def exit(self, status_code=0, message=None, usage=None): + def exit( + self, + status_code: int = 0, + message: str | None = None, + usage: str | None = None, + ) -> NoReturn: """Optionally print a message and exit.""" - print("\n\n".join(m for m in (usage, message) if m)) + print("\n\n".join(m for m in (usage, message) if m)) # noqa: T201 sys.exit(status_code) - def format_usage(self, prog=None): + def format_usage(self, prog: str | None = None) -> str: """Format usage for current parser.""" actions = self._build()[1] - prog = prog or os.path.basename(sys.argv[0]) + prog = prog or Path(sys.argv[0]).name return self._usage(actions, prog) + "\n" - def _usage(self, actions, prog): + def _usage(self, actions: Iterable[argparse.Action], prog) -> str: formatter = argparse.HelpFormatter(prog) formatter.add_usage(None, actions, []) return formatter.format_help().strip() - def format_help(self, prog=None): + def format_help(self, prog: str | None = None) -> str: """Format help for current parser and children.""" actions = self._build()[1] - prog = prog or os.path.basename(sys.argv[0]) + prog = prog or Path(sys.argv[0]).name formatter = argparse.HelpFormatter(prog) formatter.add_usage(None, actions, []) @@ -169,7 +184,7 @@ def format_help(self, prog=None): return formatter.format_help() + "\n".join(subhelp) - def _subhelp(self, name, result): + def _subhelp(self, name: str, result: list[str]) -> None: actions = self._build()[1] if self.help or actions: @@ -186,7 +201,7 @@ def _subhelp(self, name, result): for childname, child in self._children.items(): child._subhelp(" ".join((name, childname)), result) - def parse(self, args, prog=None): + def parse(self, args: list[str], prog: str | None = None) -> argparse.Namespace: """Parse command line arguments. Will recursively parse commands until a final parser is found or an @@ -195,20 +210,26 @@ def parse(self, args, prog=None): in the command attribute of the return value. :param args: list of arguments to parse - :type args: list of strings :param prog: name to use for program - :type prog: string - :rtype: :class:`argparse.Namespace` """ - prog = prog or os.path.basename(sys.argv[0]) + prog = prog or Path(sys.argv[0]).name try: return self._parse( - args, argparse.Namespace(), self._overrides.copy(), prog + args, + argparse.Namespace(), + self._overrides.copy(), + prog, ) except _HelpError: self.exit(0, self.format_help(prog)) - def _parse(self, args, namespace, overrides, prog): + def _parse( + self, + args: Sequence[str], + namespace: argparse.Namespace, + overrides: dict[str, Any], + prog: str, + ) -> argparse.Namespace: overrides.update(self._overrides) parser, actions = self._build() @@ -233,7 +254,13 @@ def _parse(self, args, namespace, overrides, prog): result._args, result, overrides, " ".join([prog, child]) ) - def run(self, *args, **kwargs): + def run( + self, + args: argparse.Namespace, + config: config_lib.Config, + *_args: Any, + **_kwargs: Any, + ) -> int: """Run the command. Must be implemented by sub-classes that are not simply an intermediate @@ -243,7 +270,7 @@ def run(self, *args, **kwargs): @contextlib.contextmanager -def _actor_error_handling(name): +def _actor_error_handling(name) -> Generator[None, Any, None]: try: yield except exceptions.BackendError as exc: @@ -258,7 +285,7 @@ def _actor_error_handling(name): # TODO: move out of this utility class class RootCommand(Command): - def __init__(self): + def __init__(self) -> None: super().__init__() self.set(base_verbosity_level=0) self.add_argument( @@ -267,7 +294,7 @@ def __init__(self): self.add_argument( "--version", action="version", - version=f"Mopidy {versioning.get_version()}", + version=f"Mopidy {mopidy.__version__}", ) self.add_argument( "-q", @@ -290,7 +317,6 @@ def __init__(self): action="store", dest="config_files", type=config_files_type, - default=DEFAULT_CONFIG, metavar="FILES", help="config files to use, colon seperated, later files override", ) @@ -304,19 +330,24 @@ def __init__(self): help="`section/key=value` values to override config options", ) - def run(self, args, config): - def on_sigterm(loop): + def run( + self, + args: argparse.Namespace, + config: config_lib.Config, + *_args: Any, + **_kwargs: Any, + ) -> int: + def on_sigterm(loop) -> bool: logger.info("GLib mainloop got SIGTERM. Exiting...") loop.quit() + return GLib.SOURCE_REMOVE loop = GLib.MainLoop() - GLib.unix_signal_add( - GLib.PRIORITY_DEFAULT, signal.SIGTERM, on_sigterm, loop - ) + GLib.unix_signal_add(GLib.PRIORITY_DEFAULT, signal.SIGTERM, on_sigterm, loop) mixer_class = self.get_mixer_class(config, args.registry["mixer"]) - backend_classes = args.registry["backend"] - frontend_classes = args.registry["frontend"] + backend_classes: list[type[BackendActor]] = args.registry["backend"] + frontend_classes: list[type[ThreadingActor]] = args.registry["frontend"] core = None exit_status_code = 0 @@ -354,7 +385,11 @@ def on_sigterm(loop): process.stop_remaining_actors() return exit_status_code - def get_mixer_class(self, config, mixer_classes): + def get_mixer_class( + self, + config: config_lib.Config, + mixer_classes: list[type[MixerActor]], + ) -> type[MixerActor] | None: logger.debug( "Available Mopidy mixers: %s", ", ".join(m.__name__ for m in mixer_classes) or "none", @@ -376,30 +411,48 @@ def get_mixer_class(self, config, mixer_classes): process.exit_process() return selected_mixers[0] - def start_mixer(self, config, mixer_class): + def start_mixer( + self, + config: config_lib.Config, + mixer_class: type[MixerActor], + ) -> MixerProxy | None: logger.info("Starting Mopidy mixer: %s", mixer_class.__name__) with _actor_error_handling(mixer_class.__name__): - mixer = mixer_class.start(config=config).proxy() + mixer = cast(MixerProxy, mixer_class.start(config=config).proxy()) try: mixer.ping().get() - return mixer except pykka.ActorDeadError as exc: logger.error("Actor died: %s", exc) + else: + return mixer return None - def configure_mixer(self, config, mixer): + def configure_mixer( + self, + config: config_lib.Config, + mixer: MixerProxy, + ) -> None: volume = config["audio"]["mixer_volume"] if volume is not None: - mixer.set_volume(volume) + mixer.set_volume(Percentage(volume)) logger.info("Mixer volume set to %d", volume) else: logger.debug("Mixer volume left unchanged") - def start_audio(self, config, mixer): + def start_audio( + self, + config: config_lib.Config, + mixer: MixerProxy | None, + ) -> AudioProxy: logger.info("Starting Mopidy audio") - return Audio.start(config=config, mixer=mixer).proxy() - - def start_backends(self, config, backend_classes, audio): + return cast(AudioProxy, Audio.start(config=config, mixer=mixer).proxy()) + + def start_backends( + self, + config: config_lib.Config, + backend_classes: list[type[BackendActor]], + audio, + ) -> list[BackendProxy]: logger.info( "Starting Mopidy backends: %s", ", ".join(b.__name__ for b in backend_classes) or "none", @@ -407,12 +460,15 @@ def start_backends(self, config, backend_classes, audio): backends = [] for backend_class in backend_classes: - with _actor_error_handling(backend_class.__name__): - with timer.time_logger(backend_class.__name__): - backend = backend_class.start( - config=config, audio=audio - ).proxy() - backends.append(backend) + with ( + _actor_error_handling(backend_class.__name__), + timer.time_logger(backend_class.__name__), + ): + backend = cast( + BackendProxy, + backend_class.start(config=config, audio=audio).proxy(), + ) + backends.append(backend) # Block until all on_starts have finished, letting them run in parallel for backend in backends[:]: @@ -424,48 +480,64 @@ def start_backends(self, config, backend_classes, audio): return backends - def start_core(self, config, mixer, backends, audio): + def start_core( + self, + config: config_lib.Config, + mixer: MixerProxy | None, + backends: list[BackendProxy], + audio: AudioProxy, + ) -> CoreProxy: logger.info("Starting Mopidy core") - core = Core.start( - config=config, mixer=mixer, backends=backends, audio=audio - ).proxy() - call = ProxyCall(attr_path=["_setup"], args=[], kwargs={}) + core = cast( + CoreProxy, + Core.start( + config=config, mixer=mixer, backends=backends, audio=audio + ).proxy(), + ) + call = ProxyCall(attr_path=("_setup",), args=(), kwargs={}) core.actor_ref.ask(call, block=True) return core - def start_frontends(self, config, frontend_classes, core): + def start_frontends( + self, + config: config_lib.Config, + frontend_classes: list[type[ThreadingActor]], + core: CoreProxy, + ) -> None: logger.info( "Starting Mopidy frontends: %s", ", ".join(f.__name__ for f in frontend_classes) or "none", ) for frontend_class in frontend_classes: - with _actor_error_handling(frontend_class.__name__): - with timer.time_logger(frontend_class.__name__): - frontend_class.start(config=config, core=core) + with ( + _actor_error_handling(frontend_class.__name__), + timer.time_logger(frontend_class.__name__), + ): + frontend_class.start(config=config, core=core) - def stop_frontends(self, frontend_classes): + def stop_frontends(self, frontend_classes: list[type[ThreadingActor]]) -> None: logger.info("Stopping Mopidy frontends") for frontend_class in frontend_classes: process.stop_actors_by_class(frontend_class) - def stop_core(self, core): + def stop_core(self, core: CoreProxy | None) -> None: logger.info("Stopping Mopidy core") - if core: - call = ProxyCall(attr_path=["_teardown"], args=[], kwargs={}) + if core is not None: + call = ProxyCall(attr_path=("_teardown",), args=(), kwargs={}) core.actor_ref.ask(call, block=True) process.stop_actors_by_class(Core) - def stop_backends(self, backend_classes): + def stop_backends(self, backend_classes: list[type[BackendActor]]) -> None: logger.info("Stopping Mopidy backends") for backend_class in backend_classes: process.stop_actors_by_class(backend_class) - def stop_audio(self): + def stop_audio(self) -> None: logger.info("Stopping Mopidy audio") process.stop_actors_by_class(Audio) - def stop_mixer(self, mixer_class): + def stop_mixer(self, mixer_class: type[MixerActor]) -> None: logger.info("Stopping Mopidy mixer") process.stop_actors_by_class(mixer_class) @@ -473,27 +545,41 @@ def stop_mixer(self, mixer_class): class ConfigCommand(Command): help = "Show currently active configuration." - def __init__(self): + def __init__(self) -> None: super().__init__() self.set(base_verbosity_level=-1) - def run(self, config, errors, schemas): + def run( + self, + args: argparse.Namespace, # noqa: ARG002 + config: config_lib.Config, + *_args: Any, + errors: config_lib.ConfigErrors, + schemas: config_lib.ConfigSchemas, + **_kwargs: Any, + ) -> int: data = config_lib.format(config, schemas, errors) # Throw away all bytes that are not valid UTF-8 before printing data = data.encode(errors="surrogateescape").decode(errors="replace") - print(data) + print(data) # noqa: T201 return 0 class DepsCommand(Command): help = "Show dependencies and debug information." - def __init__(self): + def __init__(self) -> None: super().__init__() self.set(base_verbosity_level=-1) - def run(self): - print(deps.format_dependency_list()) + def run( + self, + args: argparse.Namespace, # noqa: ARG002 + config: config_lib.Config, # noqa: ARG002 + *_args: Any, + **_kwargs: Any, + ) -> int: + print(deps.format_dependency_list()) # noqa: T201 return 0 diff --git a/mopidy/config/__init__.py b/src/mopidy/config/__init__.py similarity index 70% rename from mopidy/config/__init__.py rename to src/mopidy/config/__init__.py index 51c3ff91d7..7d7f84c9d1 100644 --- a/mopidy/config/__init__.py +++ b/src/mopidy/config/__init__.py @@ -1,11 +1,15 @@ +from __future__ import annotations + import configparser import itertools import logging import os import pathlib import re -from collections.abc import Mapping +from collections.abc import Iterator, Mapping +from typing import TYPE_CHECKING, Any, TypedDict, cast +import mopidy from mopidy.config import keyring from mopidy.config.schemas import ConfigSchema, MapConfigSchema from mopidy.config.types import ( @@ -13,26 +17,81 @@ ConfigValue, Deprecated, DeprecatedValue, + Float, Hostname, Integer, List, LogColor, LogLevel, + Pair, Path, Port, Secret, String, ) -from mopidy.internal import path, versioning +from mopidy.internal import path + +if TYPE_CHECKING: + from typing import TypeAlias + + from mopidy.ext import ExtensionData + from mopidy.internal.log import LogColorName, LogLevelName + + ConfigErrors: TypeAlias = dict[str, dict[str, Any]] + ConfigSchemas: TypeAlias = list[ConfigSchema | MapConfigSchema] + RawConfig: TypeAlias = dict[str, dict[str, Any]] + __all__ = [ # TODO List everything that is reexported, not just the unused parts. "ConfigValue", + "Float", "List", + "Pair", ] logger = logging.getLogger(__name__) + +class Config(TypedDict): + core: CoreConfig + logging: LoggingConfig + loglevels: dict[LogLevelName, int] + logcolors: dict[LogLevelName, LogColorName] + audio: AudioConfig + proxy: ProxyConfig + + +class CoreConfig(TypedDict): + cache_dir: pathlib.Path + config_dir: pathlib.Path + data_dir: pathlib.Path + max_tracklist_length: int + restore_state: bool + + +class LoggingConfig(TypedDict): + verbosity: int + format: str + color: bool + config_file: pathlib.Path | None + + +class AudioConfig(TypedDict): + mixer: str + mixer_volume: int | None + output: str + buffer_time: int | None + + +class ProxyConfig(TypedDict): + scheme: str | None + hostname: str | None + port: int | None + username: str | None + password: str | None + + _core_schema = ConfigSchema("core") _core_schema["cache_dir"] = Path() _core_schema["config_dir"] = Path() @@ -63,17 +122,15 @@ _proxy_schema = ConfigSchema("proxy") _proxy_schema["scheme"] = String( - optional=True, choices=["http", "https", "socks4", "socks5"] + optional=True, + choices=("http", "https", "socks4", "socks5"), ) _proxy_schema["hostname"] = Hostname(optional=True) _proxy_schema["port"] = Port(optional=True) _proxy_schema["username"] = String(optional=True) _proxy_schema["password"] = Secret(optional=True) -# NOTE: if multiple outputs ever comes something like LogLevelConfigSchema -# _outputs_schema = config.AudioOutputConfigSchema() - -_schemas = [ +_schemas: ConfigSchemas = [ _core_schema, _logging_schema, _loglevels_schema, @@ -95,12 +152,17 @@ """ -def read(config_file): - """Helper to load config defaults in same way across core and extensions""" +def read(config_file: pathlib.Path) -> str: + """Helper to load config defaults in same way across core and extensions.""" return pathlib.Path(config_file).read_text(errors="surrogateescape") -def load(files, ext_schemas, ext_defaults, overrides): +def load( + files: list[pathlib.Path], + ext_schemas: list[ConfigSchema], + ext_defaults: list[str], + overrides: list[Any], +) -> tuple[Config, ConfigErrors]: config_dir = pathlib.Path(__file__).parent defaults = [read(config_dir / "default.conf")] defaults.extend(ext_defaults) @@ -111,13 +173,18 @@ def load(files, ext_schemas, ext_defaults, overrides): return _validate(raw_config, schemas) -def format(config, ext_schemas, comments=None, display=True): +def format( # noqa: A001 + config: Config, + ext_schemas: ConfigSchemas, + comments: dict | None = None, + display: bool = True, +) -> str: schemas = _schemas[:] schemas.extend(ext_schemas) return _format(config, comments or {}, schemas, display, False) -def format_initial(extensions_data): +def format_initial(extensions_data: list[ExtensionData]) -> str: config_dir = pathlib.Path(__file__).parent defaults = [read(config_dir / "default.conf")] defaults.extend(d.extension.get_default_config() for d in extensions_data) @@ -128,10 +195,8 @@ def format_initial(extensions_data): config, errors = _validate(raw_config, schemas) - versions = [f"Mopidy {versioning.get_version()}"] - extensions_data = sorted( - extensions_data, key=lambda d: d.extension.dist_name - ) + versions = [f"Mopidy {mopidy.__version__}"] + extensions_data = sorted(extensions_data, key=lambda d: d.extension.dist_name) for data in extensions_data: versions.append(f"{data.extension.dist_name} {data.extension.version}") @@ -142,7 +207,11 @@ def format_initial(extensions_data): return header + "\n\n" + formatted_config -def _load(files, defaults, overrides): +def _load( + files: list[pathlib.Path], + defaults: list[str], + overrides: list[tuple[str, str, Any]], +) -> RawConfig: parser = configparser.RawConfigParser(inline_comment_prefixes=(";",)) # TODO: simply return path to config file for defaults so we can load it @@ -163,7 +232,7 @@ def _load(files, defaults, overrides): else: _load_file(parser, f.resolve()) - raw_config = {} + raw_config: RawConfig = {} for section in parser.sections(): raw_config[section] = dict(parser.items(section)) @@ -174,11 +243,13 @@ def _load(files, defaults, overrides): return raw_config -def _load_file(parser, file_path): +def _load_file( + parser: configparser.RawConfigParser, + file_path: pathlib.Path, +) -> None: if not file_path.exists(): logger.debug( - f"Loading config from {file_path.as_uri()} failed; " - f"it does not exist" + f"Loading config from {file_path.as_uri()} failed; it does not exist" ) return if not os.access(str(file_path), os.R_OK): @@ -209,10 +280,13 @@ def _load_file(parser, file_path): logger.debug(f"Config file {file_path.as_uri()} not found; skipping") -def _validate(raw_config, schemas): +def _validate( + raw_config: RawConfig, + schemas: ConfigSchemas, +) -> tuple[Config, ConfigErrors]: # Get validated config - config = {} - errors = {} + config: RawConfig = {} + errors: ConfigErrors = {} sections = set(raw_config) for schema in schemas: sections.discard(schema.name) @@ -229,14 +303,21 @@ def _validate(raw_config, schemas): f"because no matching extension was found" ) - return config, errors + return cast(Config, config), errors -def _format(config, comments, schemas, display, disable): +def _format( + config: Config, + comments: dict[str, Any], + schemas: ConfigSchemas, + display: bool, + disable: bool, +) -> str: output = [] for schema in schemas: serialized = schema.serialize( - config.get(schema.name, {}), display=display + config.get(schema.name, {}), + display=display, ) if not serialized: continue @@ -256,7 +337,7 @@ def _format(config, comments, schemas, display, disable): return "\n".join(output).strip() -def _preprocess(config_string): +def _preprocess(config_string: str) -> str: """Convert a raw config into a form that preserves comments etc.""" results = ["[__COMMENTS__]"] counter = itertools.count(0) @@ -266,22 +347,23 @@ def _preprocess(config_string): comment_re = re.compile(r"^(#|;)") inline_comment_re = re.compile(r" ;") - def newlines(match): + def newlines(_match) -> str: return f"__BLANK{next(counter):d}__ =" - def comments(match): - if match.group(1) == "#": - return f"__HASH{next(counter):d}__ =" - elif match.group(1) == ";": - return f"__SEMICOLON{next(counter):d}__ =" + def comments(match) -> str: + match match.group(1): + case "#": + return f"__HASH{next(counter):d}__ =" + case ";": + return f"__SEMICOLON{next(counter):d}__ =" + case _: + raise AssertionError(f"Unexpected comment type: {match.group(1)!r}") - def inlinecomments(match): + def inlinecomments(_match) -> str: return f"\n__INLINE{next(counter):d}__ =" - def sections(match): - return ( - f"{match.group(1)}\n__SECTION{next(counter):d}__ = {match.group(2)}" - ) + def sections(match) -> str: + return f"{match.group(1)}\n__SECTION{next(counter):d}__ = {match.group(2)}" for line in config_string.splitlines(): line = blank_line_re.sub(newlines, line) @@ -292,7 +374,7 @@ def sections(match): return "\n".join(results) -def _postprocess(config_string): +def _postprocess(config_string: str) -> str: """Converts a preprocessed config back to original form.""" flags = re.IGNORECASE | re.MULTILINE result = re.sub(r"^\[__COMMENTS__\](\n|$)", "", config_string, flags=flags) @@ -300,25 +382,24 @@ def _postprocess(config_string): result = re.sub(r"^__HASH\d+__ =(.*)$", r"#\g<1>", result, flags=flags) result = re.sub(r"^__SEMICOLON\d+__ =(.*)$", r";\g<1>", result, flags=flags) result = re.sub(r"\n__SECTION\d+__ =(.*)$", r"\g<1>", result, flags=flags) - result = re.sub(r"^__BLANK\d+__ =$", "", result, flags=flags) - return result + return re.sub(r"^__BLANK\d+__ =$", "", result, flags=flags) class Proxy(Mapping): - def __init__(self, data): + def __init__(self, data: Config | dict[str, Any]) -> None: self._data = data - def __getitem__(self, key): + def __getitem__(self, key) -> Any: item = self._data.__getitem__(key) if isinstance(item, dict): return Proxy(item) return item - def __iter__(self): + def __iter__(self) -> Iterator[str]: return self._data.__iter__() - def __len__(self): + def __len__(self) -> int: return self._data.__len__() - def __repr__(self): + def __repr__(self) -> str: return f"Proxy({self._data!r})" diff --git a/mopidy/config/default.conf b/src/mopidy/config/default.conf similarity index 100% rename from mopidy/config/default.conf rename to src/mopidy/config/default.conf diff --git a/mopidy/config/keyring.py b/src/mopidy/config/keyring.py similarity index 86% rename from mopidy/config/keyring.py rename to src/mopidy/config/keyring.py index 5e1b04dd11..f0b7775024 100644 --- a/mopidy/config/keyring.py +++ b/src/mopidy/config/keyring.py @@ -3,7 +3,7 @@ logger = logging.getLogger(__name__) try: - import dbus + import dbus # pyright: ignore[reportMissingImports] except ImportError: dbus = None @@ -11,10 +11,7 @@ # XXX: Hack to workaround introspection bug caused by gnome-keyring, should be # fixed by version 3.5 per: # https://git.gnome.org/browse/gnome-keyring/commit/?id=5dccbe88eb94eea9934e2b7 -if dbus: - EMPTY_STRING = dbus.String("", variant_level=1) -else: - EMPTY_STRING = "" +EMPTY_STRING = dbus.String("", variant_level=1) if dbus else "" FETCH_ERROR = ( @@ -23,7 +20,7 @@ ) -def fetch(): +def fetch() -> list[tuple[str, str, bytes]]: # noqa: PLR0911 if not dbus: logger.debug("%s (dbus not installed)", FETCH_ERROR) return [] @@ -35,13 +32,16 @@ def fetch(): return [] if not bus.name_has_owner("org.freedesktop.secrets"): - logger.debug( - "%s (org.freedesktop.secrets service not running)", FETCH_ERROR - ) + logger.debug("%s (org.freedesktop.secrets service not running)", FETCH_ERROR) return [] service = _service(bus) - session = service.OpenSession("plain", EMPTY_STRING)[1] + try: + session = service.OpenSession("plain", EMPTY_STRING)[1] + except dbus.exceptions.DBusException as e: + logger.debug("%s (%s)", FETCH_ERROR, e) + return [] + items, locked = service.SearchItems({"service": "mopidy"}) if not locked and not items: @@ -64,7 +64,11 @@ def fetch(): return result -def set(section, key, value): +def set( # noqa: A001, PLR0911 + section: str, + key: str, + value: str | bytes, +) -> bool: """Store a secret config value for a given section/key. Indicates if storage failed or succeeded. @@ -120,16 +124,12 @@ def set(section, key, value): return True _prompt(bus, prompt).Dismiss() - logger.debug( - "Saving secret %s/%s failed. (Keyring is locked)", section, key - ) + logger.debug("Saving secret %s/%s failed. (Keyring is locked)", section, key) return False def _service(bus): - return _interface( - bus, "/org/freedesktop/secrets", "org.freedesktop.Secret.Service" - ) + return _interface(bus, "/org/freedesktop/secrets", "org.freedesktop.Secret.Service") # NOTE: depending on versions and setup 'default' might not exists, so try and @@ -150,12 +150,14 @@ def _collection(bus): # NOTE: Hack to probe if a given collection actually exists. Needed to work # around an introspection bug in setting passwords for non-existant aliases. def _collection_exists(bus, path): + assert dbus try: item = _interface(bus, path, "org.freedesktop.DBus.Properties") item.Get("org.freedesktop.Secret.Collection", "Label") - return True except dbus.exceptions.DBusException: return False + else: + return True # NOTE: We could call prompt.Prompt('') to unlock the keyring when it is not @@ -173,5 +175,6 @@ def _item_attributes(bus, path): def _interface(bus, path, interface): + assert dbus obj = bus.get_object("org.freedesktop.secrets", path) return dbus.Interface(obj, interface) diff --git a/mopidy/config/schemas.py b/src/mopidy/config/schemas.py similarity index 81% rename from mopidy/config/schemas.py rename to src/mopidy/config/schemas.py index df16588926..149c52e202 100644 --- a/mopidy/config/schemas.py +++ b/src/mopidy/config/schemas.py @@ -1,4 +1,5 @@ import collections +from typing import Any from mopidy.config import types @@ -36,7 +37,6 @@ def _levenshtein(a, b): class ConfigSchema(collections.OrderedDict): - """Logical group of config values that correspond to a config section. Schemas are set up by assigning config keys with config values to @@ -47,17 +47,20 @@ class ConfigSchema(collections.OrderedDict): persistence. """ - def __init__(self, name): + def __init__(self, name: str) -> None: super().__init__() self.name = name - def deserialize(self, values): + def deserialize( + self, + values: dict[str, Any], + ) -> tuple[dict[str, Any], dict[str, Any]]: """Validates the given ``values`` using the config schema. Returns a tuple with cleaned values and errors. """ - errors = {} - result = {} + errors: dict[str, Any] = {} + result: dict[str, Any] = {} for key, value in values.items(): try: @@ -80,13 +83,18 @@ def deserialize(self, values): return result, errors - def serialize(self, values, display=False): + def serialize( + self, + values: dict[str, Any], + display: bool = False, + ) -> dict[str, Any]: """Converts the given ``values`` to a format suitable for persistence. If ``display`` is :class:`True` secret config values, like passwords, will be masked out. - Returns a dict of config keys and values.""" + Returns a dict of config keys and values. + """ result = collections.OrderedDict() for key in self.keys(): if key in values: @@ -95,20 +103,22 @@ def serialize(self, values, display=False): class MapConfigSchema: - """Schema for handling multiple unknown keys with the same type. Does not sub-class :class:`ConfigSchema`, but implements the same serialize/deserialize interface. """ - def __init__(self, name, value_type): + def __init__(self, name: str, value_type: types.ConfigValue) -> None: self.name = name self._value_type = value_type - def deserialize(self, values): - errors = {} - result = {} + def deserialize( + self, + values: dict[str, Any], + ) -> tuple[dict[str, Any], dict[str, Any]]: + errors: dict[str, Any] = {} + result: dict[str, Any] = {} for key, value in values.items(): try: @@ -118,7 +128,11 @@ def deserialize(self, values): errors[key] = str(e) return result, errors - def serialize(self, values, display=False): + def serialize( + self, + values: dict[str, Any], + display: bool = False, + ) -> dict[str, Any]: result = collections.OrderedDict() for key in sorted(values.keys()): result[key] = self._value_type.serialize(values[key], display) diff --git a/src/mopidy/config/types.py b/src/mopidy/config/types.py new file mode 100644 index 0000000000..0490139da2 --- /dev/null +++ b/src/mopidy/config/types.py @@ -0,0 +1,511 @@ +# ruff: noqa: ARG002 + +from __future__ import annotations + +import logging +import re +import socket +from abc import ABC, abstractmethod +from collections.abc import Callable +from typing import ( + TYPE_CHECKING, + Any, + AnyStr, + ClassVar, + Generic, + Literal, + TypeVar, + cast, +) + +from mopidy.config import validators +from mopidy.internal import log, path + +if TYPE_CHECKING: + from collections.abc import Iterable + +T = TypeVar("T") +K = TypeVar("K", bound="ConfigValue") +V = TypeVar("V", bound="ConfigValue") + + +def decode(value: AnyStr) -> str: + result = ( + value.decode(errors="surrogateescape") if isinstance(value, bytes) else value + ) + + for char in ("\\", "\n", "\t"): + result = result.replace(char.encode(encoding="unicode-escape").decode(), char) + + return result + + +def encode(value: AnyStr) -> str: + result = ( + value.decode(errors="surrogateescape") if isinstance(value, bytes) else value + ) + + for char in ("\\", "\n", "\t"): + result = result.replace(char, char.encode(encoding="unicode-escape").decode()) + + return result + + +class DeprecatedValue: + pass + + +class _TransformedValue(str): + def __new__(cls, _original, transformed): + return super().__new__(cls, transformed) + + def __init__(self, original, _transformed): + self.original = original + + +class ConfigValue(ABC, Generic[T]): + """Represents a config key's value and how to handle it. + + Normally you will only be interacting with sub-classes for config values + that encode either deserialization behavior and/or validation. + + Each config value should be used for the following actions: + + 1. Deserializing from a raw string and validating, raising ValueError on + failure. + 2. Serializing a value back to a string that can be stored in a config. + 3. Formatting a value to a printable form (useful for masking secrets). + + :class:`None` values should not be deserialized, serialized or formatted, + the code interacting with the config should simply skip None config values. + """ + + @abstractmethod + def deserialize(self, value: AnyStr) -> T | None: + """Cast raw string to appropriate type.""" + raise NotImplementedError + + def serialize(self, value: T, display: bool = False) -> str | DeprecatedValue: + """Convert value back to string for saving.""" + if value is None: + return "" + return str(value) + + +class Deprecated(ConfigValue[Any]): + """Deprecated value. + + Used for ignoring old config values that are no longer in use, but should + not cause the config parser to crash. + """ + + def deserialize(self, value: AnyStr) -> DeprecatedValue: + return DeprecatedValue() + + def serialize(self, value: Any, display: bool = False) -> DeprecatedValue: + return DeprecatedValue() + + +class String(ConfigValue[str]): + r"""String value. + + Is decoded as utf-8, and \n and \t escapes should work and be preserved. + """ + + def __init__( + self, + optional: bool = False, + choices: Iterable[str] | None = None, + transformer: Callable[[str], str] | None = None, + ) -> None: + self._required = not optional + self._choices = choices + self._transformer = transformer + + def deserialize(self, value: AnyStr) -> str | None: + result = decode(value).strip() + validators.validate_required(result, self._required) + if not result: + return None + + # This is necessary for backwards-compatibility, in case subclasses + # aren't calling their parent constructor. + transformer = getattr(self, "_transformer", None) + if transformer: + transformed_value = transformer(result) + result = _TransformedValue(result, transformed_value) + + validators.validate_choice(result, self._choices) + return result + + def serialize(self, value: str, display: bool = False) -> str: + if value is None: + return "" + if isinstance(value, _TransformedValue): + value = value.original + return encode(value) + + +class Secret(String): + r"""Secret string value. + + Is decoded as utf-8, and \n and \t escapes should work and be preserved. + + Should be used for passwords, auth tokens etc. Will mask value when being + displayed. + """ + + def __init__( + self, + optional: bool = False, + choices: None = None, + transformer: Callable[[str], str] | None = None, + ) -> None: + super().__init__( + optional=optional, + choices=None, # Choices doesn't make sense for secrets + transformer=transformer, + ) + + def serialize(self, value: str, display: bool = False) -> str: + if value is not None and display: + return "********" + return super().serialize(value, display) + + +class Integer(ConfigValue[int]): + """Integer value.""" + + def __init__( + self, + minimum: int | None = None, + maximum: int | None = None, + choices: Iterable[int] | None = None, + optional: bool = False, + ) -> None: + self._required = not optional + self._minimum = minimum + self._maximum = maximum + self._choices = choices + + def deserialize(self, value: AnyStr) -> int | None: + result = decode(value) + validators.validate_required(result, self._required) + if not result: + return None + result = int(result) + validators.validate_choice(result, self._choices) + validators.validate_minimum(result, self._minimum) + validators.validate_maximum(result, self._maximum) + return result + + +class Float(ConfigValue[float]): + """Float value.""" + + def __init__( + self, + minimum: float | None = None, + maximum: float | None = None, + optional: bool = False, + ) -> None: + self._required = not optional + self._minimum = minimum + self._maximum = maximum + + def deserialize(self, value: AnyStr) -> float | None: + result = decode(value) + validators.validate_required(result, self._required) + if not value: + return None + result = float(result) + validators.validate_minimum(result, self._minimum) + validators.validate_maximum(result, self._maximum) + return result + + +class Boolean(ConfigValue[bool]): + """Boolean value. + + Accepts ``1``, ``yes``, ``true``, and ``on`` with any casing as + :class:`True`. + + Accepts ``0``, ``no``, ``false``, and ``off`` with any casing as + :class:`False`. + """ + + true_values = ("1", "yes", "true", "on") + false_values = ("0", "no", "false", "off") + + def __init__(self, optional: bool = False) -> None: + self._required = not optional + + def deserialize(self, value: AnyStr) -> bool | None: + result = decode(value) + validators.validate_required(result, self._required) + if not result: + return None + if result.lower() in self.true_values: + return True + if result.lower() in self.false_values: + return False + raise ValueError(f"invalid value for boolean: {result!r}") + + def serialize( + self, + value: bool, + display: bool = False, + ) -> Literal["true", "false"]: + if value is True: + return "true" + if value in (False, None): + return "false" + raise ValueError(f"{value!r} is not a boolean") + + +class Pair(ConfigValue[tuple[K, V]]): + """Pair value. + + The value is expected to be a pair of elements, separated by a specified delimiter. + Values can optionally not be a pair, in which case the whole input is provided for + both sides of the value. + """ + + _subtypes: tuple[K, V] + + def __init__( + self, + optional: bool = False, + optional_pair: bool = False, + separator: str = "|", + subtypes: tuple[K, V] = (String(), String()), + ) -> None: + self._required = not optional + self._optional_pair = optional_pair + self._separator = separator + self._subtypes = subtypes + + def deserialize(self, value: AnyStr) -> tuple[K, V] | None: + raw_value = decode(value).strip() + validators.validate_required(raw_value, self._required) + if not raw_value: + return None + + if self._separator in raw_value: + values = raw_value.split(self._separator, 1) + elif self._optional_pair: + values = (raw_value, raw_value) + else: + raise ValueError( + f"Config value must include {self._separator!r} separator: {raw_value}" + ) + + return cast( + tuple[K, V], + ( + self._subtypes[0].deserialize(encode(values[0])), + self._subtypes[1].deserialize(encode(values[1])), + ), + ) + + def serialize( + self, value: tuple[K, V], display: bool = False + ) -> str | DeprecatedValue: + serialized_first_value = self._subtypes[0].serialize(value[0], display=display) + serialized_second_value = self._subtypes[1].serialize(value[1], display=display) + + if isinstance(serialized_first_value, DeprecatedValue) or isinstance( + serialized_second_value, DeprecatedValue + ): + return DeprecatedValue() + + if ( + not display + and self._optional_pair + and serialized_first_value == serialized_second_value + ): + return serialized_first_value + + return f"{serialized_first_value}{self._separator}{serialized_second_value}" + + +class List(ConfigValue[tuple[V, ...] | frozenset[V]]): + """List value. + + Supports elements split by commas or newlines. Newlines take precedence and + empty list items will be filtered out. + + Enforcing unique entries in the list will result in a set data structure + being used. This does not preserve ordering, which could result in the + serialized output being unstable. + """ + + def __init__( + self, + optional: bool = False, + unique: bool = False, + subtype: V = String(), # noqa: B008 + ) -> None: + self._required = not optional + self._unique = unique + self._subtype = subtype + + def deserialize(self, value: AnyStr) -> tuple[V, ...] | frozenset[V]: + raw_value = decode(value) + + strings: list[str] + if "\n" in raw_value: + strings = re.split(r"\s*\n\s*", raw_value) + else: + strings = re.split(r"\s*,\s*", raw_value) + + # This is necessary for backwards-compatibility, in case subclasses + # aren't calling their parent constructor. + subtype: ConfigValue = getattr(self, "_subtype", String()) + + values_iter = (subtype.deserialize(s.strip()) for s in strings if s.strip()) + values = frozenset(values_iter) if self._unique else tuple(values_iter) + + validators.validate_required(values, self._required) + return cast(tuple[V, ...] | frozenset[V], values) + + def serialize( + self, value: tuple[V, ...] | frozenset[V], display: bool = False + ) -> str: + if not value: + return "" + + # This is necessary for backwards-compatibility, in case subclasses + # aren't calling their parent constructor. + subtype: V = getattr(self, "_subtype", String()) # pyright: ignore[reportAssignmentType] + + serialized_values = [] + for item in value: + serialized_value = subtype.serialize(item, display=display) + if serialized_value: + serialized_values.append(serialized_value) + + return "\n " + "\n ".join(serialized_values) + + +class LogColor(ConfigValue[log.LogColorName]): + def deserialize(self, value: AnyStr) -> log.LogColorName: + raw_value = decode(value).lower() + validators.validate_choice(raw_value, log.COLORS) + raw_value = cast(log.LogColorName, raw_value) + return raw_value + + def serialize(self, value: log.LogColorName, display: bool = False) -> str: + if value.lower() in log.COLORS: + return encode(value.lower()) + return "" + + +class LogLevel(ConfigValue[int]): + """Log level value. + + Expects one of ``critical``, ``error``, ``warning``, ``info``, ``debug``, + ``trace``, or ``all``, with any casing. + """ + + levels: ClassVar[dict[log.LogLevelName, int]] = { + "critical": logging.CRITICAL, + "error": logging.ERROR, + "warning": logging.WARNING, + "info": logging.INFO, + "debug": logging.DEBUG, + "trace": log.TRACE_LOG_LEVEL, + "all": logging.NOTSET, + } + + def deserialize(self, value: AnyStr) -> int | None: + raw_value = decode(value).lower() + validators.validate_choice(raw_value, self.levels.keys()) + raw_value = cast(log.LogLevelName, raw_value) + return self.levels.get(raw_value) + + def serialize(self, value: int, display: bool = False) -> str: + lookup = {v: k for k, v in self.levels.items()} + return encode(lookup.get(value, "")) + + +class Hostname(ConfigValue[str]): + """Network hostname value.""" + + def __init__(self, optional: bool = False) -> None: + self._required = not optional + + def deserialize(self, value: AnyStr, display: bool = False) -> str | None: + raw_value = decode(value).strip() + validators.validate_required(raw_value, self._required) + if not raw_value: + return None + + socket_path = path.get_unix_socket_path(raw_value) + if socket_path is not None: + path_str = Path(not self._required).deserialize(str(socket_path)) + return f"unix:{path_str}" + + try: + socket.getaddrinfo(raw_value, None) + except OSError as exc: + raise ValueError("must be a resolveable hostname or valid IP") from exc + + return raw_value + + +class Port(Integer): + """Network port value. + + Expects integer in the range 0-65535, zero tells the kernel to simply + allocate a port for us. + """ + + def __init__(self, choices=None, optional=False): + super().__init__( + minimum=0, maximum=2**16 - 1, choices=choices, optional=optional + ) + + +# Keep this for backwards compatibility +class _ExpandedPath(_TransformedValue): + pass + + +class Path(ConfigValue[_ExpandedPath]): + """File system path. + + The following expansions of the path will be done: + + - ``~`` to the current user's home directory + - ``$XDG_CACHE_DIR`` according to the XDG spec + - ``$XDG_CONFIG_DIR`` according to the XDG spec + - ``$XDG_DATA_DIR`` according to the XDG spec + - ``$XDG_MUSIC_DIR`` according to the XDG spec + """ + + def __init__(self, optional=False): + self._required = not optional + + def deserialize(self, value: AnyStr) -> _ExpandedPath | None: + raw_value = decode(value).strip() + expanded = path.expand_path(raw_value) + validators.validate_required(raw_value, self._required) + validators.validate_required(expanded, self._required) + if not raw_value or expanded is None: + return None + return _ExpandedPath(raw_value, expanded) + + def serialize( + self, + value: None | (_ExpandedPath | bytes), + display: bool = False, + ) -> str: + if value is None: + return "" + result = value + if isinstance(result, _ExpandedPath): + result = result.original + if isinstance(result, bytes): + result = result.decode(errors="surrogateescape") + return str(result) diff --git a/mopidy/config/validators.py b/src/mopidy/config/validators.py similarity index 52% rename from mopidy/config/validators.py rename to src/mopidy/config/validators.py index 6f69cb569c..ce11f30a11 100644 --- a/mopidy/config/validators.py +++ b/src/mopidy/config/validators.py @@ -1,8 +1,24 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Protocol, TypeVar + +if TYPE_CHECKING: + from abc import abstractmethod + from collections.abc import Iterable + + class Comparable(Protocol): + @abstractmethod + def __lt__(self: CT, other: CT, /) -> bool: ... + + T = TypeVar("T") + CT = TypeVar("CT", bound=Comparable) + + # TODO: add validate regexp? -def validate_required(value, required): - """Validate that ``value`` is set if ``required`` +def validate_required(value: Any, required: bool) -> None: + """Validate that ``value`` is set if ``required``. Normally called in :meth:`~mopidy.config.types.ConfigValue.deserialize` on the raw string, _not_ the converted value. @@ -11,8 +27,8 @@ def validate_required(value, required): raise ValueError("must be set.") -def validate_choice(value, choices): - """Validate that ``value`` is one of the ``choices`` +def validate_choice(value: T, choices: Iterable[T] | None) -> None: + """Validate that ``value`` is one of the ``choices``. Normally called in :meth:`~mopidy.config.types.ConfigValue.deserialize`. """ @@ -21,8 +37,8 @@ def validate_choice(value, choices): raise ValueError(f"must be one of {names}, not {value}.") -def validate_minimum(value, minimum): - """Validate that ``value`` is at least ``minimum`` +def validate_minimum(value: CT, minimum: CT | None) -> None: + """Validate that ``value`` is at least ``minimum``. Normally called in :meth:`~mopidy.config.types.ConfigValue.deserialize`. """ @@ -30,8 +46,8 @@ def validate_minimum(value, minimum): raise ValueError(f"{value!r} must be larger than {minimum!r}.") -def validate_maximum(value, maximum): - """Validate that ``value`` is at most ``maximum`` +def validate_maximum(value: CT, maximum: CT | None) -> None: + """Validate that ``value`` is at most ``maximum``. Normally called in :meth:`~mopidy.config.types.ConfigValue.deserialize`. """ diff --git a/src/mopidy/core/__init__.py b/src/mopidy/core/__init__.py new file mode 100644 index 0000000000..b8fe8e6cfe --- /dev/null +++ b/src/mopidy/core/__init__.py @@ -0,0 +1,27 @@ +from .actor import Core, CoreProxy +from .history import HistoryController, HistoryControllerProxy +from .library import LibraryController, LibraryControllerProxy +from .listener import CoreListener +from .mixer import MixerController, MixerControllerProxy +from .playback import PlaybackController, PlaybackControllerProxy, PlaybackState +from .playlists import PlaylistsController, PlaylistsControllerProxy +from .tracklist import TracklistController, TracklistControllerProxy + +__all__ = [ + "Core", + "CoreProxy", + "CoreListener", + "HistoryController", + "HistoryControllerProxy", + "LibraryController", + "LibraryControllerProxy", + "MixerController", + "MixerControllerProxy", + "PlaybackController", + "PlaybackControllerProxy", + "PlaybackState", + "PlaylistsController", + "PlaylistsControllerProxy", + "TracklistController", + "TracklistControllerProxy", +] diff --git a/mopidy/core/actor.py b/src/mopidy/core/actor.py similarity index 62% rename from mopidy/core/actor.py rename to src/mopidy/core/actor.py index ec44c079e3..643633c340 100644 --- a/mopidy/core/actor.py +++ b/src/mopidy/core/actor.py @@ -1,8 +1,15 @@ -import collections +# ruff: noqa: ARG002 + +from __future__ import annotations + import itertools import logging +from collections.abc import Iterable +from pathlib import Path +from typing import TYPE_CHECKING import pykka +from pykka.typing import ActorMemberMixin, proxy_method import mopidy from mopidy import audio, backend, mixer @@ -14,9 +21,19 @@ from mopidy.core.playback import PlaybackController from mopidy.core.playlists import PlaylistsController from mopidy.core.tracklist import TracklistController -from mopidy.internal import path, storage, validation, versioning +from mopidy.internal import path, storage, validation from mopidy.internal.models import CoreState +if TYPE_CHECKING: + from mopidy.config import Config + from mopidy.core.history import HistoryControllerProxy + from mopidy.core.library import LibraryControllerProxy + from mopidy.core.mixer import MixerControllerProxy + from mopidy.core.playback import PlaybackControllerProxy + from mopidy.core.playlists import PlaylistsControllerProxy + from mopidy.core.tracklist import TracklistControllerProxy + from mopidy.types import Uri + logger = logging.getLogger(__name__) @@ -26,31 +43,37 @@ class Core( backend.BackendListener, mixer.MixerListener, ): - - library = None + library: LibraryController """An instance of :class:`~mopidy.core.LibraryController`""" - history = None + history: HistoryController """An instance of :class:`~mopidy.core.HistoryController`""" - mixer = None + mixer: MixerController """An instance of :class:`~mopidy.core.MixerController`""" - playback = None + playback: PlaybackController """An instance of :class:`~mopidy.core.PlaybackController`""" - playlists = None + playlists: PlaylistsController """An instance of :class:`~mopidy.core.PlaylistsController`""" - tracklist = None + tracklist: TracklistController """An instance of :class:`~mopidy.core.TracklistController`""" - def __init__(self, config=None, mixer=None, backends=None, audio=None): + def __init__( + self, + config: Config, + *, + mixer: mixer.MixerProxy | None = None, + backends: Iterable[backend.BackendProxy], + audio: audio.AudioProxy | None = None, + ) -> None: super().__init__() self._config = config - self.backends = Backends(backends) + self.backends = Backends(backends or []) self.library = pykka.traversable( LibraryController(backends=self.backends, core=self) @@ -67,27 +90,32 @@ def __init__(self, config=None, mixer=None, backends=None, audio=None): self.audio = audio - def get_uri_schemes(self): - """Get list of URI schemes we can handle""" + def get_uri_schemes(self) -> list[backend.UriScheme]: + """Get list of URI schemes we can handle.""" futures = [b.uri_schemes for b in self.backends] results = pykka.get_all(futures) uri_schemes = itertools.chain(*results) return sorted(uri_schemes) - def get_version(self): - """Get version of the Mopidy core API""" - return versioning.get_version() + def get_version(self) -> str: + """Get version of the Mopidy core API.""" + return mopidy.__version__ - def reached_end_of_stream(self): + def reached_end_of_stream(self) -> None: self.playback._on_end_of_stream() - def stream_changed(self, uri): + def stream_changed(self, uri: Uri) -> None: self.playback._on_stream_changed(uri) - def position_changed(self, position): + def position_changed(self, position: int) -> None: self.playback._on_position_changed(position) - def state_changed(self, old_state, new_state, target_state): + def state_changed( + self, + old_state: PlaybackState, + new_state: PlaybackState, + target_state: PlaybackState | None, + ) -> None: # XXX: This is a temporary fix for issue #232 while we wait for a more # permanent solution with the implementation of issue #234. When the # Spotify play token is lost, the Spotify backend pauses audio @@ -105,79 +133,80 @@ def state_changed(self, old_state, new_state, target_state): self.playback.set_state(new_state) self.playback._trigger_track_playback_paused() - def playlists_loaded(self): + def playlists_loaded(self) -> None: # Forward event from backend to frontends CoreListener.send("playlists_loaded") - def volume_changed(self, volume): + def volume_changed(self, volume: int) -> None: # Forward event from mixer to frontends CoreListener.send("volume_changed", volume=volume) - def mute_changed(self, mute): + def mute_changed(self, mute: bool) -> None: # Forward event from mixer to frontends CoreListener.send("mute_changed", mute=mute) - def tags_changed(self, tags): + def tags_changed(self, tags: set[str]) -> None: if not self.audio or "title" not in tags: return - tags = self.audio.get_current_tags().get() - if not tags: + current_tags = self.audio.get_current_tags().get() + if not current_tags: return self.playback._stream_title = None # TODO: Do not emit stream title changes for plain tracks. We need a # better way to decide if something is a stream. - if "title" in tags and tags["title"]: - title = tags["title"][0] + if current_tags.get("title"): + title = current_tags["title"][0] current_track = self.playback.get_current_track() if current_track is not None and current_track.name != title: self.playback._stream_title = title CoreListener.send("stream_title_changed", title=title) - def _setup(self): + def _setup(self) -> None: """Do not call this function. It is for internal use at startup.""" try: coverage = [] - if self._config and "restore_state" in self._config["core"]: - if self._config["core"]["restore_state"]: - coverage = [ - "tracklist", - "mode", - "play-last", - "mixer", - "history", - ] + if ( + self._config + and "restore_state" in self._config["core"] + and self._config["core"]["restore_state"] + ): + coverage = [ + "tracklist", + "mode", + "play-last", + "mixer", + "history", + ] if len(coverage): self._load_state(coverage) except Exception as e: logger.warning("Restore state: Unexpected error: %s", str(e)) - def _teardown(self): + def _teardown(self) -> None: """Do not call this function. It is for internal use at shutdown.""" try: - if self._config and "restore_state" in self._config["core"]: - if self._config["core"]["restore_state"]: - self._save_state() + if ( + self._config + and "restore_state" in self._config["core"] + and self._config["core"]["restore_state"] + ): + self._save_state() except Exception as e: logger.warning("Unexpected error while saving state: %s", str(e)) - def _get_data_dir(self): + def _get_data_dir(self) -> Path: # get or create data director for core - data_dir_path = ( - path.expand_path(self._config["core"]["data_dir"]) / "core" - ) + data_dir_path = path.expand_path(self._config["core"]["data_dir"]) / "core" path.get_or_create_dir(data_dir_path) return data_dir_path - def _get_state_file(self): + def _get_state_file(self) -> Path: return self._get_data_dir() / "state.json.gz" - def _save_state(self): - """ - Save current state to disk. - """ - + def _save_state(self) -> None: + """Save current state to disk.""" state_file = self._get_state_file() logger.info("Saving state to %s", state_file) @@ -192,9 +221,8 @@ def _save_state(self): storage.dump(state_file, data) logger.debug("Saving state done") - def _load_state(self, coverage): - """ - Restore state from disk. + def _load_state(self, coverage: Iterable[str]) -> None: + """Restore state from disk. Load state from disk and restore it. Parameter ``coverage`` limits the amount of data to restore. Possible @@ -209,7 +237,6 @@ def _load_state(self, coverage): :param coverage: amount of data to restore :type coverage: list of strings """ - state_file = self._get_state_file() logger.info("Loading state from %s", state_file) @@ -233,18 +260,18 @@ def _load_state(self, coverage): class Backends(list): - def __init__(self, backends): + def __init__(self, backends: Iterable[backend.BackendProxy]) -> None: super().__init__(backends) - self.with_library = collections.OrderedDict() - self.with_library_browse = collections.OrderedDict() - self.with_playback = collections.OrderedDict() - self.with_playlists = collections.OrderedDict() + self.with_library: dict[backend.UriScheme, backend.BackendProxy] = {} + self.with_library_browse: dict[backend.UriScheme, backend.BackendProxy] = {} + self.with_playback: dict[backend.UriScheme, backend.BackendProxy] = {} + self.with_playlists: dict[backend.UriScheme, backend.BackendProxy] = {} - backends_by_scheme = {} + backends_by_scheme: dict[backend.UriScheme, backend.BackendProxy] = {} - def name(b): - return b.actor_ref.actor_class.__name__ + def name(backend_proxy: backend.BackendProxy) -> str: + return backend_proxy.actor_ref.actor_class.__name__ for b in backends: try: @@ -273,3 +300,22 @@ def name(b): self.with_playback[scheme] = b if has_playlists: self.with_playlists[scheme] = b + + +class CoreProxy(ActorMemberMixin, pykka.ActorProxy[Core]): + library: LibraryControllerProxy + history: HistoryControllerProxy + mixer: MixerControllerProxy + playback: PlaybackControllerProxy + playlists: PlaylistsControllerProxy + tracklist: TracklistControllerProxy + get_uri_schemes = proxy_method(Core.get_uri_schemes) + get_version = proxy_method(Core.get_version) + reached_end_of_stream = proxy_method(Core.reached_end_of_stream) + stream_changed = proxy_method(Core.stream_changed) + position_changed = proxy_method(Core.position_changed) + state_changed = proxy_method(Core.state_changed) + playlists_loaded = proxy_method(Core.playlists_loaded) + volume_changed = proxy_method(Core.volume_changed) + mute_changed = proxy_method(Core.mute_changed) + tags_changed = proxy_method(Core.tags_changed) diff --git a/mopidy/core/history.py b/src/mopidy/core/history.py similarity index 58% rename from mopidy/core/history.py rename to src/mopidy/core/history.py index 4202567d95..09d4db6fed 100644 --- a/mopidy/core/history.py +++ b/src/mopidy/core/history.py @@ -1,63 +1,65 @@ +from __future__ import annotations + import copy import logging import time +from collections.abc import Iterable +from typing import TYPE_CHECKING + +from pykka.typing import proxy_method -from mopidy import models from mopidy.internal.models import HistoryState, HistoryTrack +from mopidy.models import Ref, Track + +if TYPE_CHECKING: + from typing import TypeAlias logger = logging.getLogger(__name__) +History: TypeAlias = list[tuple[int, Ref]] + class HistoryController: - def __init__(self): - self._history = [] + def __init__(self) -> None: + self._history: History = [] - def _add_track(self, track): + def _add_track(self, track: Track) -> None: """Add track to the playback history. Internal method for :class:`mopidy.core.PlaybackController`. :param track: track to add - :type track: :class:`mopidy.models.Track` """ - if not isinstance(track, models.Track): + if not isinstance(track, Track): raise TypeError("Only Track objects can be added to the history") timestamp = int(time.time() * 1000) name_parts = [] if track.artists: - artists_names = [ - artist.name for artist in track.artists if artist.name - ] + artists_names = [artist.name for artist in track.artists if artist.name] if artists_names: name_parts.append(", ".join(artists_names)) if track.name is not None: name_parts.append(track.name) name = " - ".join(name_parts) - ref = models.Ref.track(uri=track.uri, name=name) + ref = Ref.track(uri=track.uri, name=name) self._history.insert(0, (timestamp, ref)) - def get_length(self): - """Get the number of tracks in the history. - - :returns: the history length - :rtype: int - """ + def get_length(self) -> int: + """Get the number of tracks in the history.""" return len(self._history) - def get_history(self): + def get_history(self) -> History: """Get the track history. + Returns a list of two-tuples with timestamp and a reference to the track. The timestamps are milliseconds since epoch. - - :returns: the track history - :rtype: list of (timestamp, :class:`mopidy.models.Ref`) tuples """ return copy.copy(self._history) - def _save_state(self): + def _save_state(self) -> HistoryState: # 500 tracks a 3 minutes -> 24 hours history count_max = 500 count = 1 @@ -70,6 +72,11 @@ def _save_state(self): break return HistoryState(history=history_list) - def _load_state(self, state, coverage): + def _load_state(self, state: HistoryState, coverage: Iterable[str]) -> None: if state and "history" in coverage: self._history = [(h.timestamp, h.track) for h in state.history] + + +class HistoryControllerProxy: + get_length = proxy_method(HistoryController.get_length) + get_history = proxy_method(HistoryController.get_history) diff --git a/mopidy/core/library.py b/src/mopidy/core/library.py similarity index 71% rename from mopidy/core/library.py rename to src/mopidy/core/library.py index 92766eda84..873fd88304 100644 --- a/mopidy/core/library.py +++ b/src/mopidy/core/library.py @@ -1,18 +1,32 @@ +from __future__ import annotations + import collections import contextlib import logging import operator -import urllib -from collections.abc import Mapping +import urllib.parse +from collections.abc import Generator, Iterable, Mapping +from typing import TYPE_CHECKING, Any, cast + +from pykka.typing import proxy_method -from mopidy import exceptions, models +from mopidy import exceptions from mopidy.internal import deprecation, validation +from mopidy.models import Image, Ref, SearchResult, Track +from mopidy.types import DistinctField, Query, SearchField, Uri, UriScheme + +if TYPE_CHECKING: + from mopidy.backend import BackendProxy + from mopidy.core.actor import Backends, Core logger = logging.getLogger(__name__) @contextlib.contextmanager -def _backend_error_handling(backend, reraise=None): +def _backend_error_handling( + backend: BackendProxy, + reraise: None | (type[Exception] | tuple[type[Exception], ...]) = None, +) -> Generator[None, Any, None]: try: yield except exceptions.ValidationError as e: @@ -31,30 +45,32 @@ def _backend_error_handling(backend, reraise=None): class LibraryController: - def __init__(self, backends, core): + def __init__(self, backends: Backends, core: Core) -> None: self.backends = backends self.core = core - def _get_backend(self, uri): - uri_scheme = urllib.parse.urlparse(uri).scheme + def _get_backend(self, uri: Uri) -> BackendProxy | None: + uri_scheme = UriScheme(urllib.parse.urlparse(uri).scheme) return self.backends.with_library.get(uri_scheme, None) - def _get_backends_to_uris(self, uris): - if uris: - backends_to_uris = collections.defaultdict(list) - for uri in uris: - backend = self._get_backend(uri) - if backend is not None: - backends_to_uris[backend].append(uri) - else: - backends_to_uris = { - b: None for b in self.backends.with_library.values() - } - return backends_to_uris + def _get_backends_to_uris( + self, + uris: Iterable[Uri] | None, + ) -> dict[BackendProxy, list[Uri] | None]: + if not uris: + return {b: None for b in self.backends.with_library.values()} + + result: dict[BackendProxy, list[Uri] | None] = collections.defaultdict(list) + for uri in uris: + backend = self._get_backend(uri) + if backend is not None: + lst = result[backend] + assert lst is not None + lst.append(uri) + return result - def browse(self, uri): - """ - Browse directories and tracks at the given ``uri``. + def browse(self, uri: Uri | None) -> list[Ref]: + """Browse directories and tracks at the given ``uri``. ``uri`` is a string which represents some directory belonging to a backend. To get the intial root directories for backends pass @@ -80,31 +96,30 @@ def browse(self, uri): Ref.directory(uri='dummy:directory:/bar', name='bar') - :param string uri: URI to browse - :rtype: list of :class:`mopidy.models.Ref` + :param uri: URI to browse .. versionadded:: 0.18 """ if uri is None: return self._roots() - elif not uri.strip(): + if not uri.strip(): return [] validation.check_uri(uri) return self._browse(uri) - def _roots(self): + def _roots(self) -> list[Ref]: directories = set() backends = self.backends.with_library_browse.values() futures = {b: b.library.root_directory for b in backends} for backend, future in futures.items(): with _backend_error_handling(backend): root = future.get() - validation.check_instance(root, models.Ref) + validation.check_instance(root, Ref) directories.add(root) return sorted(directories, key=operator.attrgetter("name")) - def _browse(self, uri): - scheme = urllib.parse.urlparse(uri).scheme + def _browse(self, uri: Uri) -> list[Ref]: + scheme = UriScheme(urllib.parse.urlparse(uri).scheme) backend = self.backends.with_library_browse.get(scheme) if not backend: @@ -112,27 +127,31 @@ def _browse(self, uri): with _backend_error_handling(backend): result = backend.library.browse(uri).get() - validation.check_instances(result, models.Ref) + validation.check_instances(result, Ref) return result return [] - def get_distinct(self, field, query=None): - """ - List distinct values for a given field from the library. + def get_distinct( + self, + field: DistinctField, + query: Query[SearchField] | None = None, + ) -> set[Any]: + """List distinct values for a given field from the library. This has mainly been added to support the list commands the MPD protocol supports in a more sane fashion. Other frontends are not recommended to use this method. - :param string field: Any one of ``uri``, ``track_name``, ``album``, + Returns set of values corresponding to the requested field type. + + :param field: Any one of ``uri``, ``track_name``, ``album``, ``artist``, ``albumartist``, ``composer``, ``performer``, ``track_no``, ``genre``, ``date``, ``comment``, ``disc_no``, ``musicbrainz_albumid``, ``musicbrainz_artistid``, or ``musicbrainz_trackid``. - :param dict query: Query to use for limiting results, see + :param query: Query to use for limiting results, see :meth:`search` for details about the query format. - :rtype: set of values corresponding to the requested field type. .. versionadded:: 1.0 """ @@ -145,9 +164,10 @@ def get_distinct(self, field, query=None): else: validation.check_choice(field, validation.DISTINCT_FIELDS.keys()) field_type = validation.DISTINCT_FIELDS.get(field) - query is None or validation.check_query(query) # TODO: normalize? + if query is not None: + validation.check_query(query) # TODO: normalize? - compat_field = {"track_name": "track"}.get(field, field) + compat_field = cast(DistinctField, {"track_name": "track"}.get(field, field)) result = set() futures = { @@ -158,12 +178,13 @@ def get_distinct(self, field, query=None): with _backend_error_handling(backend): values = future.get() if values is not None: - validation.check_instances(values, field_type) + if field_type is not None: + validation.check_instances(values, field_type) result.update(values) return result - def get_images(self, uris): - """Lookup the images for the given URIs + def get_images(self, uris: Iterable[Uri]) -> dict[Uri, tuple[Image, ...]]: + """Lookup the images for the given URIs. Backends can use this to return image URIs for any URI they know about be it tracks, albums, playlists. The lookup result is a dictionary @@ -173,8 +194,6 @@ def get_images(self, uris): for will simply return an empty list for that URI. :param uris: list of URIs to find images for - :type uris: list of string - :rtype: {uri: tuple of :class:`mopidy.models.Image`} .. versionadded:: 1.0 """ @@ -182,13 +201,11 @@ def get_images(self, uris): futures = { backend: backend.library.get_images(backend_uris) - for (backend, backend_uris) in self._get_backends_to_uris( - uris - ).items() + for (backend, backend_uris) in self._get_backends_to_uris(uris).items() if backend_uris } - results = {uri: () for uri in uris} + results: dict[Uri, tuple[Image, ...]] = {uri: () for uri in uris} for backend, future in futures.items(): with _backend_error_handling(backend): if future.get() is None: @@ -199,20 +216,17 @@ def get_images(self, uris): raise exceptions.ValidationError( f"Got unknown image URI: {uri}" ) - validation.check_instances(images, models.Image) + validation.check_instances(images, Image) results[uri] += tuple(images) return results - def lookup(self, uris): - """ - Lookup the given URIs. + def lookup(self, uris: Iterable[Uri]) -> dict[Uri, list[Track]]: + """Lookup the given URIs. If the URI expands to multiple tracks, the returned list will contain them all. :param uris: track URIs - :type uris: list of string - :rtype: {uri: list of :class:`mopidy.models.Track`} """ validation.check_uris(uris) @@ -229,21 +243,20 @@ def lookup(self, uris): with _backend_error_handling(backend): result = future.get() if result is not None: - validation.check_instances(result, models.Track) + validation.check_instances(result, Track) # TODO Consider making Track.uri field mandatory, and # then remove this filtering of tracks without URIs. results[u] = [r for r in result if r.uri] return results - def refresh(self, uri=None): - """ - Refresh library. Limit to URI and below if an URI is given. + def refresh(self, uri: Uri | None = None) -> None: + """Refresh library. Limit to URI and below if an URI is given. :param uri: directory or track URI - :type uri: string """ - uri is None or validation.check_uri(uri) + if uri is not None: + validation.check_uri(uri) futures = {} backends = {} @@ -260,14 +273,13 @@ def refresh(self, uri=None): with _backend_error_handling(backend): future.get() - def search(self, query, uris=None, exact=False): - """ - Search the library for tracks where ``field`` contains ``values``. - - ``field`` can be one of ``uri``, ``track_name``, ``album``, ``artist``, - ``albumartist``, ``composer``, ``performer``, ``track_no``, ``genre``, - ``date``, ``comment``, ``disc_no``, ``musicbrainz_albumid``, - ``musicbrainz_artistid``, ``musicbrainz_trackid`` or ``any``. + def search( + self, + query: Query[SearchField], + uris: Iterable[Uri] | None = None, + exact: bool = False, + ) -> list[SearchResult]: + """Search the library for tracks where ``field`` contains ``values``. If ``uris`` is given, the search is limited to results from within the URI roots. For example passing ``uris=['file:']`` will limit the search @@ -293,19 +305,16 @@ def search(self, query, uris=None, exact=False): search({'artist': ['xyz', 'abc']}) :param query: one or more queries to search for - :type query: dict :param uris: zero or more URI roots to limit the search to - :type uris: list of string or :class:`None` :param exact: if the search should use exact matching - :type exact: :class:`bool` - :rtype: list of :class:`mopidy.models.SearchResult` .. versionadded:: 1.0 The ``exact`` keyword argument. """ query = _normalize_query(query) - uris is None or validation.check_uris(uris) + if uris is not None: + validation.check_uris(uris) validation.check_query(query) validation.check_boolean(exact) @@ -329,7 +338,7 @@ def search(self, query, uris=None, exact=False): with _backend_error_handling(backend, reraise=reraise): result = future.get() if result is not None: - validation.check_instance(result, models.SearchResult) + validation.check_instance(result, SearchResult) results.append(result) except TypeError: backend_name = backend.actor_ref.actor_class.__name__ @@ -342,10 +351,10 @@ def search(self, query, uris=None, exact=False): return results -def _normalize_query(query): +def _normalize_query(query: Query[SearchField]) -> Query[SearchField]: broken_client = False # TODO: this breaks if query is not a dictionary like object... - for (field, values) in query.items(): + for field, values in query.items(): if isinstance(values, str): broken_client = True query[field] = [values] @@ -363,3 +372,12 @@ def _normalize_query(query): "and file a bug." ) return query + + +class LibraryControllerProxy: + browse = proxy_method(LibraryController.browse) + get_distinct = proxy_method(LibraryController.get_distinct) + get_images = proxy_method(LibraryController.get_images) + lookup = proxy_method(LibraryController.lookup) + refresh = proxy_method(LibraryController.refresh) + search = proxy_method(LibraryController.search) diff --git a/mopidy/core/listener.py b/src/mopidy/core/listener.py similarity index 51% rename from mopidy/core/listener.py rename to src/mopidy/core/listener.py index b0222252be..6b73fd0146 100644 --- a/mopidy/core/listener.py +++ b/src/mopidy/core/listener.py @@ -1,10 +1,37 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Literal + from mopidy import listener +if TYPE_CHECKING: + from typing import TypeAlias -class CoreListener(listener.Listener): + from mopidy.audio import PlaybackState + from mopidy.models import Playlist, TlTrack + from mopidy.types import DurationMs, Percentage, Uri - """ - Marker interface for recipients of events sent by the core actor. + +CoreEvent: TypeAlias = Literal[ + "track_playback_paused", + "track_playback_resumed", + "track_playback_started", + "track_playback_ended", + "playback_state_changed", + "tracklist_changed", + "playlists_loaded", + "playlist_changed", + "playlist_deleted", + "options_changed", + "volume_changed", + "mute_changed", + "seeked", + "stream_title_changed", +] + + +class CoreListener(listener.Listener): + """Marker interface for recipients of events sent by the core actor. Any Pykka actor that mixes in this class will receive calls to the methods defined here when the corresponding events happen in the core actor. This @@ -14,77 +41,75 @@ class CoreListener(listener.Listener): """ @staticmethod - def send(event, **kwargs): - """Helper to allow calling of core listener events""" + def send(event: CoreEvent, **kwargs: Any) -> None: + """Helper to allow calling of core listener events.""" listener.send(CoreListener, event, **kwargs) - def on_event(self, event, **kwargs): - """ - Called on all events. + def on_event(self, event: CoreEvent, **kwargs: Any) -> None: + """Called on all events. *MAY* be implemented by actor. By default, this method forwards the event to the specific event methods. :param event: the event name - :type event: string :param kwargs: any other arguments to the specific event handlers """ # Just delegate to parent, entry mostly for docs. super().on_event(event, **kwargs) - def track_playback_paused(self, tl_track, time_position): - """ - Called whenever track playback is paused. + def track_playback_paused( + self, + tl_track: TlTrack, + time_position: DurationMs, + ) -> None: + """Called whenever track playback is paused. *MAY* be implemented by actor. :param tl_track: the track that was playing when playback paused - :type tl_track: :class:`mopidy.models.TlTrack` :param time_position: the time position in milliseconds - :type time_position: int """ - pass - def track_playback_resumed(self, tl_track, time_position): - """ - Called whenever track playback is resumed. + def track_playback_resumed( + self, + tl_track: TlTrack, + time_position: DurationMs, + ) -> None: + """Called whenever track playback is resumed. *MAY* be implemented by actor. :param tl_track: the track that was playing when playback resumed - :type tl_track: :class:`mopidy.models.TlTrack` :param time_position: the time position in milliseconds - :type time_position: int """ - pass - def track_playback_started(self, tl_track): - """ - Called whenever a new track starts playing. + def track_playback_started(self, tl_track: TlTrack) -> None: + """Called whenever a new track starts playing. *MAY* be implemented by actor. :param tl_track: the track that just started playing - :type tl_track: :class:`mopidy.models.TlTrack` """ - pass - def track_playback_ended(self, tl_track, time_position): - """ - Called whenever playback of a track ends. + def track_playback_ended( + self, + tl_track: TlTrack, + time_position: DurationMs, + ) -> None: + """Called whenever playback of a track ends. *MAY* be implemented by actor. :param tl_track: the track that was played before playback stopped - :type tl_track: :class:`mopidy.models.TlTrack` :param time_position: the time position in milliseconds - :type time_position: int """ - pass - def playback_state_changed(self, old_state, new_state): - """ - Called whenever playback state is changed. + def playback_state_changed( + self, + old_state: PlaybackState, + new_state: PlaybackState, + ) -> None: + """Called whenever playback state is changed. *MAY* be implemented by actor. @@ -93,79 +118,63 @@ def playback_state_changed(self, old_state, new_state): :param new_state: the state after the change :type new_state: string from :class:`mopidy.core.PlaybackState` field """ - pass - def tracklist_changed(self): - """ - Called whenever the tracklist is changed. + def tracklist_changed(self) -> None: + """Called whenever the tracklist is changed. *MAY* be implemented by actor. """ - pass - def playlists_loaded(self): - """ - Called when playlists are loaded or refreshed. + def playlists_loaded(self) -> None: + """Called when playlists are loaded or refreshed. *MAY* be implemented by actor. """ - pass - def playlist_changed(self, playlist): - """ - Called whenever a playlist is changed. + def playlist_changed(self, playlist: Playlist) -> None: + """Called whenever a playlist is changed. *MAY* be implemented by actor. :param playlist: the changed playlist :type playlist: :class:`mopidy.models.Playlist` """ - pass - def playlist_deleted(self, uri): - """ - Called whenever a playlist is deleted. + def playlist_deleted(self, uri: Uri) -> None: + """Called whenever a playlist is deleted. *MAY* be implemented by actor. :param uri: the URI of the deleted playlist :type uri: string """ - pass - def options_changed(self): - """ - Called whenever an option is changed. + def options_changed(self) -> None: + """Called whenever an option is changed. *MAY* be implemented by actor. """ - pass - def volume_changed(self, volume): - """ - Called whenever the volume is changed. + def volume_changed(self, volume: Percentage) -> None: + """Called whenever the volume is changed. *MAY* be implemented by actor. :param volume: the new volume in the range [0..100] :type volume: int """ - pass - def mute_changed(self, mute): - """ - Called whenever the mute state is changed. + def mute_changed(self, mute: bool) -> None: + """Called whenever the mute state is changed. *MAY* be implemented by actor. :param mute: the new mute state :type mute: boolean """ - pass - def seeked(self, time_position): - """ - Called whenever the time position changes by an unexpected amount, e.g. + def seeked(self, time_position: DurationMs) -> None: + """Called whenever the time position changes by an unexpected amount, e.g. at seek to a new time position. *MAY* be implemented by actor. @@ -173,15 +182,12 @@ def seeked(self, time_position): :param time_position: the position that was seeked to in milliseconds :type time_position: int """ - pass - def stream_title_changed(self, title): - """ - Called whenever the currently playing stream title changes. + def stream_title_changed(self, title: str) -> None: + """Called whenever the currently playing stream title changes. *MAY* be implemented by actor. :param title: the new stream title :type title: string """ - pass diff --git a/mopidy/core/mixer.py b/src/mopidy/core/mixer.py similarity index 68% rename from mopidy/core/mixer.py rename to src/mopidy/core/mixer.py index 5c81cd499c..54e39dd191 100644 --- a/mopidy/core/mixer.py +++ b/src/mopidy/core/mixer.py @@ -1,15 +1,25 @@ +from __future__ import annotations + import contextlib import logging +from collections.abc import Generator, Iterable +from typing import TYPE_CHECKING, Any + +from pykka.typing import proxy_method from mopidy import exceptions from mopidy.internal import validation from mopidy.internal.models import MixerState +from mopidy.types import Percentage + +if TYPE_CHECKING: + from mopidy.mixer import MixerProxy logger = logging.getLogger(__name__) @contextlib.contextmanager -def _mixer_error_handling(mixer): +def _mixer_error_handling(mixer: MixerProxy) -> Generator[None, Any, None]: try: yield except exceptions.ValidationError as e: @@ -26,10 +36,10 @@ def _mixer_error_handling(mixer): class MixerController: - def __init__(self, mixer): + def __init__(self, mixer: MixerProxy | None) -> None: self._mixer = mixer - def get_volume(self): + def get_volume(self) -> Percentage | None: """Get the volume. Integer in range [0..100] or :class:`None` if unknown. @@ -41,12 +51,13 @@ def get_volume(self): with _mixer_error_handling(self._mixer): volume = self._mixer.get_volume().get() - volume is None or validation.check_integer(volume, min=0, max=100) + if volume is not None: + validation.check_integer(volume, min=0, max=100) return volume return None - def set_volume(self, volume): + def set_volume(self, volume: Percentage) -> bool: """Set the volume. The volume is defined as an integer in range [0..100]. @@ -67,7 +78,7 @@ def set_volume(self, volume): return False - def get_mute(self): + def get_mute(self) -> bool | None: """Get mute state. :class:`True` if muted, :class:`False` unmuted, :class:`None` if @@ -78,12 +89,13 @@ def get_mute(self): with _mixer_error_handling(self._mixer): mute = self._mixer.get_mute().get() - mute is None or validation.check_instance(mute, bool) + if mute is not None: + validation.check_instance(mute, bool) return mute return None - def set_mute(self, mute): + def set_mute(self, mute: bool) -> bool: """Set mute state. :class:`True` to mute, :class:`False` to unmute. @@ -101,11 +113,18 @@ def set_mute(self, mute): return False - def _save_state(self): + def _save_state(self) -> MixerState: return MixerState(volume=self.get_volume(), mute=self.get_mute()) - def _load_state(self, state, coverage): + def _load_state(self, state: MixerState, coverage: Iterable[str]) -> None: if state and "mixer" in coverage: self.set_mute(state.mute) if state.volume: - self.set_volume(state.volume) + self.set_volume(Percentage(state.volume)) + + +class MixerControllerProxy: + get_volume = proxy_method(MixerController.get_volume) + set_volume = proxy_method(MixerController.set_volume) + get_mute = proxy_method(MixerController.get_mute) + set_mute = proxy_method(MixerController.set_mute) diff --git a/mopidy/core/playback.py b/src/mopidy/core/playback.py similarity index 70% rename from mopidy/core/playback.py rename to src/mopidy/core/playback.py index 8e9ce4ceb8..2b50168ca6 100644 --- a/mopidy/core/playback.py +++ b/src/mopidy/core/playback.py @@ -1,63 +1,79 @@ +from __future__ import annotations + import logging -import urllib +import urllib.parse +from collections.abc import Iterable +from typing import TYPE_CHECKING from pykka.messages import ProxyCall +from pykka.typing import proxy_method from mopidy.audio import PlaybackState from mopidy.core import listener -from mopidy.internal import deprecation, models, validation +from mopidy.exceptions import CoreError +from mopidy.internal import models, validation +from mopidy.types import DurationMs, UriScheme + +if TYPE_CHECKING: + from mopidy.audio.actor import AudioProxy + from mopidy.backend import BackendProxy + from mopidy.core.actor import Backends, Core + from mopidy.models import TlTrack, Track + from mopidy.types import Uri logger = logging.getLogger(__name__) class PlaybackController: - def __init__(self, audio, backends, core): + def __init__( + self, + audio: AudioProxy | None, + backends: Backends, + core: Core, + ) -> None: # TODO: these should be internal self.backends = backends self.core = core self._audio = audio - self._stream_title = None + self._stream_title: str | None = None self._state = PlaybackState.STOPPED - self._current_tl_track = None - self._pending_tl_track = None + self._current_tl_track: TlTrack | None = None + self._pending_tl_track: TlTrack | None = None - self._pending_position = None - self._last_position = None - self._previous = False + self._pending_position: DurationMs | None = None + self._last_position: DurationMs | None = None + self._previous: bool = False - self._start_at_position = None - self._start_paused = False + self._start_at_position: DurationMs | None = None + self._start_paused: bool = False if self._audio: - self._audio.set_about_to_finish_callback( - self._on_about_to_finish_callback - ) + self._audio.set_about_to_finish_callback(self._on_about_to_finish_callback) - def _get_backend(self, tl_track): + def _get_backend(self, tl_track: TlTrack | None) -> BackendProxy | None: if tl_track is None: return None - uri_scheme = urllib.parse.urlparse(tl_track.track.uri).scheme + uri_scheme = UriScheme(urllib.parse.urlparse(tl_track.track.uri).scheme) return self.backends.with_playback.get(uri_scheme, None) - def get_current_tl_track(self): + def get_current_tl_track(self) -> TlTrack | None: """Get the currently playing or selected track. Returns a :class:`mopidy.models.TlTrack` or :class:`None`. """ return self._current_tl_track - def _set_current_tl_track(self, value): + def _set_current_tl_track(self, value: TlTrack | None) -> None: """Set the currently playing or selected track. *Internal:* This is only for use by Mopidy's test suite. """ self._current_tl_track = value - def get_current_track(self): - """ - Get the currently playing or selected track. + def get_current_track(self) -> Track | None: + """Get the currently playing or selected track. Extracted from :meth:`get_current_tl_track` for convenience. @@ -65,9 +81,8 @@ def get_current_track(self): """ return getattr(self.get_current_tl_track(), "track", None) - def get_current_tlid(self): - """ - Get the currently playing or selected TLID. + def get_current_tlid(self) -> int | None: + """Get the currently playing or selected TLID. Extracted from :meth:`get_current_tl_track` for convenience. @@ -77,16 +92,15 @@ def get_current_tlid(self): """ return getattr(self.get_current_tl_track(), "tlid", None) - def get_stream_title(self): + def get_stream_title(self) -> str | None: """Get the current stream title or :class:`None`.""" return self._stream_title - def get_state(self): + def get_state(self) -> PlaybackState: """Get The playback state.""" - return self._state - def set_state(self, new_state): + def set_state(self, new_state: PlaybackState) -> None: """Set the playback state. Must be :attr:`PLAYING`, :attr:`PAUSED`, or :attr:`STOPPED`. @@ -110,24 +124,23 @@ def set_state(self, new_state): self._trigger_playback_state_changed(old_state, new_state) - def get_time_position(self): + def get_time_position(self) -> DurationMs: """Get time position in milliseconds.""" if self._pending_position is not None: return self._pending_position backend = self._get_backend(self.get_current_tl_track()) - if backend: - # TODO: Wrap backend call in error handling. - return backend.playback.get_time_position().get() - else: - return 0 + if not backend: + return DurationMs(0) + # TODO: Wrap backend call in error handling. + return backend.playback.get_time_position().get() - def _on_end_of_stream(self): + def _on_end_of_stream(self) -> None: self.set_state(PlaybackState.STOPPED) if self._current_tl_track: self._trigger_track_playback_ended(self.get_time_position()) self._set_current_tl_track(None) - def _on_stream_changed(self, uri): + def _on_stream_changed(self, _uri: Uri) -> None: if self._last_position is None: position = self.get_time_position() else: @@ -157,7 +170,7 @@ def _on_stream_changed(self, uri): self.set_state(PlaybackState.PLAYING) self._trigger_track_playback_started() - def _on_position_changed(self, position): + def _on_position_changed(self, _position: int) -> None: if self._pending_position is not None: self._trigger_seeked(self._pending_position) self._pending_position = None @@ -165,7 +178,7 @@ def _on_position_changed(self, position): self._start_paused = False self.pause() - def _on_about_to_finish_callback(self): + def _on_about_to_finish_callback(self) -> None: """Callback that performs a blocking actor call to the real callback. This is passed to audio, which is allowed to call this code from the @@ -175,21 +188,27 @@ def _on_about_to_finish_callback(self): """ self.core.actor_ref.ask( ProxyCall( - attr_path=["playback", "_on_about_to_finish"], - args=[], + attr_path=("playback", "_on_about_to_finish"), + args=(), kwargs={}, ) ) - def _on_about_to_finish(self): + def _on_about_to_finish(self) -> None: if self._state == PlaybackState.STOPPED: return # Unless overridden by other calls (e.g. next / previous / stop) this # will be the last position recorded until the track gets reassigned. - # TODO: Check if case when track.length isn't populated needs to be - # handled. - self._last_position = self._current_tl_track.track.length + if self._current_tl_track is not None: + if self._current_tl_track.track.length is not None: + self._last_position = DurationMs(self._current_tl_track.track.length) + else: + self._last_position = None + else: + # TODO: Check if case when track.length isn't populated needs to be + # handled. + pass pending = self.core.tracklist.eot_track(self._current_tl_track) # avoid endless loop if 'repeat' is 'true' and no track is playable @@ -216,9 +235,8 @@ def _on_about_to_finish(self): logger.info("No playable track in the list.") break - def _on_tracklist_change(self): - """ - Tell the playback controller that the current playlist has changed. + def _on_tracklist_change(self) -> None: + """Tell the playback controller that the current playlist has changed. Used by :class:`mopidy.core.TracklistController`. """ @@ -229,9 +247,8 @@ def _on_tracklist_change(self): elif self.get_current_tl_track() not in tl_tracks: self._set_current_tl_track(None) - def next(self): - """ - Change to the next track. + def next(self) -> None: + """Change to the next track. The current playback state will be kept. If it was playing, playing will continue. If it was paused, it will still be paused, etc. @@ -246,11 +263,7 @@ def next(self): pending = self.core.tracklist.next_track(current) if self._change(pending, state): break - else: - self.core.tracklist._mark_unplayable(pending) - # TODO: this could be needed to prevent a loop in rare cases - # if current == pending: - # break + self.core.tracklist._mark_unplayable(pending) current = pending count -= 1 if not count: @@ -259,55 +272,47 @@ def next(self): # TODO return result? - def pause(self): + def pause(self) -> None: """Pause playback.""" backend = self._get_backend(self.get_current_tl_track()) # TODO: Wrap backend call in error handling. if not backend or backend.playback.pause().get(): - # TODO: switch to: - # backend.track(pause) - # wait for state change? self.set_state(PlaybackState.PAUSED) self._trigger_track_playback_paused() - def play(self, tl_track=None, tlid=None): - """ - Play the given track, or if the given tl_track and tlid is - :class:`None`, play the currently active track. + def play( + self, + tlid: int | None = None, + ) -> None: + """Play a track from the tracklist, specified by the tracklist ID. - Note that the track **must** already be in the tracklist. + Note that the track must already be in the tracklist. - .. deprecated:: 3.0 + If no tracklist ID is provided, resume playback of the currently + active track. + + .. versionremoved:: 4.0 The ``tl_track`` argument. Use ``tlid`` instead. - :param tl_track: track to play - :type tl_track: :class:`mopidy.models.TlTrack` or :class:`None` - :param tlid: TLID of the track to play - :type tlid: :class:`int` or :class:`None` + :param tlid: Tracklist ID of the track to play """ - if sum(o is not None for o in [tl_track, tlid]) > 1: - raise ValueError('At most one of "tl_track" and "tlid" may be set') - - tl_track is None or validation.check_instance(tl_track, models.TlTrack) - tlid is None or validation.check_integer(tlid, min=1) - - if tl_track: - deprecation.warn("core.playback.play:tl_track_kwarg") + if tlid is None and self.get_state() == PlaybackState.PAUSED: + self.resume() + return - if tl_track is None and tlid is not None: - for tl_track in self.core.tracklist.get_tl_tracks(): - if tl_track.tlid == tlid: + tl_track: TlTrack | None = None + if tlid is not None: + validation.check_integer(tlid, min=1) + for candidate_tl_track in self.core.tracklist.get_tl_tracks(): + if candidate_tl_track.tlid == tlid: + tl_track = candidate_tl_track break else: - tl_track = None - - if tl_track is not None: - # TODO: allow from outside tracklist, would make sense given refs? - if tl_track not in self.core.tracklist.get_tl_tracks(): - raise AssertionError - elif tl_track is None and self.get_state() == PlaybackState.PAUSED: - self.resume() - return + logger.info( + "Tried to play track with TLID %d, " + "but it was not found in the tracklist.", + tlid, + ) current = self._pending_tl_track or self._current_tl_track pending = tl_track or current or self.core.tracklist.next_track(None) @@ -318,8 +323,7 @@ def play(self, tl_track=None, tlid=None): while pending: if self._change(pending, PlaybackState.PLAYING): break - else: - self.core.tracklist._mark_unplayable(pending) + self.core.tracklist._mark_unplayable(pending) current = pending pending = self.core.tracklist.next_track(current) count -= 1 @@ -327,9 +331,11 @@ def play(self, tl_track=None, tlid=None): logger.info("No playable track in the list.") break - # TODO return result? - - def _change(self, pending_tl_track, state): + def _change( # noqa: PLR0911 + self, + pending_tl_track: TlTrack | None, + state: PlaybackState, + ) -> bool: self._pending_tl_track = pending_tl_track if not pending_tl_track: @@ -366,8 +372,7 @@ def _change(self, pending_tl_track, state): # TODO: check by binding against underlying play method using # inspect and otherwise re-raise? logger.error( - "%s needs to be updated to work with this " - "version of Mopidy.", + "%s needs to be updated to work with this version of Mopidy.", backend, ) return False @@ -379,11 +384,10 @@ def _change(self, pending_tl_track, state): self._pending_tl_track = None return True - raise Exception(f"Unknown state: {state}") + raise CoreError(f"Unknown playback state: {state}") - def previous(self): - """ - Change to the previous track. + def previous(self) -> None: + """Change to the previous track. The current playback state will be kept. If it was playing, playing will continue. If it was paused, it will still be paused, etc. @@ -399,11 +403,7 @@ def previous(self): pending = self.core.tracklist.previous_track(current) if self._change(pending, state): break - else: - self.core.tracklist._mark_unplayable(pending) - # TODO: this could be needed to prevent a loop in rare cases - # if current == pending: - # break + self.core.tracklist._mark_unplayable(pending) current = pending count -= 1 if not count: @@ -412,7 +412,7 @@ def previous(self): # TODO: no return value? - def resume(self): + def resume(self) -> None: """If paused, resume playing the current track.""" if self.get_state() != PlaybackState.PAUSED: return @@ -422,24 +422,20 @@ def resume(self): self.set_state(PlaybackState.PLAYING) # TODO: trigger via gst messages self._trigger_track_playback_resumed() - # TODO: switch to: - # backend.resume() - # wait for state change? - def seek(self, time_position): - """ - Seeks to time position given in milliseconds. + def seek(self, time_position: DurationMs) -> bool: + """Seeks to time position given in milliseconds. + + Returns :class:`True` if successful, else :class:`False`. :param time_position: time position in milliseconds - :type time_position: int - :rtype: :class:`True` if successful, else :class:`False` """ # TODO: seek needs to take pending tracks into account :( validation.check_integer(time_position) if time_position < 0: logger.debug("Client seeked to negative position. Seeking to zero.") - time_position = 0 + time_position = DurationMs(0) if not self.core.tracklist.get_length(): return False @@ -450,11 +446,11 @@ def seek(self, time_position): # We need to prefer the still playing track, but if nothing is playing # we fall back to the pending one. tl_track = self._current_tl_track or self._pending_tl_track - if tl_track and tl_track.track.length is None: + if tl_track is None or tl_track.track.length is None: return False if time_position < 0: - time_position = 0 + time_position = DurationMs(0) elif time_position > tl_track.track.length: # TODO: GStreamer will trigger a about-to-finish for us, use that? self.next() @@ -466,19 +462,19 @@ def seek(self, time_position): # Make sure we switch back to previous track if we get a seek while we # have a pending track. if self._current_tl_track and self._pending_tl_track: - self._change(self._current_tl_track, self.get_state()) - else: - # TODO: Avoid returning False here when STOPPED (seek is deferred)? - return self._seek(time_position) + return self._change(self._current_tl_track, self.get_state()) - def _seek(self, time_position): + # TODO: Avoid returning False here when STOPPED (seek is deferred)? + return self._seek(time_position) + + def _seek(self, time_position: DurationMs) -> bool: backend = self._get_backend(self.get_current_tl_track()) if not backend: return False # TODO: Wrap backend call in error handling. return backend.playback.seek(time_position).get() - def stop(self): + def stop(self) -> None: """Stop playing.""" if self.get_state() != PlaybackState.STOPPED: self._last_position = self.get_time_position() @@ -487,7 +483,7 @@ def stop(self): if not backend or backend.playback.stop().get(): self.set_state(PlaybackState.STOPPED) - def _trigger_track_playback_paused(self): + def _trigger_track_playback_paused(self) -> None: logger.debug("Triggering track playback paused event") if self.get_current_tl_track() is None: return @@ -497,7 +493,7 @@ def _trigger_track_playback_paused(self): time_position=self.get_time_position(), ) - def _trigger_track_playback_resumed(self): + def _trigger_track_playback_resumed(self) -> None: logger.debug("Triggering track playback resumed event") if self.get_current_tl_track() is None: return @@ -507,17 +503,19 @@ def _trigger_track_playback_resumed(self): time_position=self.get_time_position(), ) - def _trigger_track_playback_started(self): + def _trigger_track_playback_started(self) -> None: if self.get_current_tl_track() is None: return logger.debug("Triggering track playback started event") tl_track = self.get_current_tl_track() + if tl_track is None: + return self.core.tracklist._mark_playing(tl_track) self.core.history._add_track(tl_track.track) listener.CoreListener.send("track_playback_started", tl_track=tl_track) - def _trigger_track_playback_ended(self, time_position_before_stop): + def _trigger_track_playback_ended(self, time_position_before_stop: int) -> None: tl_track = self.get_current_tl_track() if tl_track is None: return @@ -535,28 +533,49 @@ def _trigger_track_playback_ended(self, time_position_before_stop): time_position=time_position_before_stop, ) - def _trigger_playback_state_changed(self, old_state, new_state): + def _trigger_playback_state_changed( + self, + old_state: PlaybackState, + new_state: PlaybackState, + ) -> None: logger.debug("Triggering playback state change event") listener.CoreListener.send( "playback_state_changed", old_state=old_state, new_state=new_state ) - def _trigger_seeked(self, time_position): + def _trigger_seeked(self, time_position: int) -> None: # TODO: Trigger this from audio events? logger.debug("Triggering seeked event") listener.CoreListener.send("seeked", time_position=time_position) - def _save_state(self): + def _save_state(self) -> models.PlaybackState: return models.PlaybackState( tlid=self.get_current_tlid(), time_position=self.get_time_position(), state=self.get_state(), ) - def _load_state(self, state, coverage): + def _load_state(self, state: models.PlaybackState, coverage: Iterable[str]) -> None: if state and "play-last" in coverage and state.tlid is not None: if state.state == PlaybackState.PAUSED: self._start_paused = True if state.state in (PlaybackState.PLAYING, PlaybackState.PAUSED): - self._start_at_position = state.time_position + self._start_at_position = DurationMs(state.time_position) self.play(tlid=state.tlid) + + +class PlaybackControllerProxy: + get_current_tl_track = proxy_method(PlaybackController.get_current_tl_track) + get_current_track = proxy_method(PlaybackController.get_current_track) + get_current_tlid = proxy_method(PlaybackController.get_current_tlid) + get_stream_title = proxy_method(PlaybackController.get_stream_title) + get_state = proxy_method(PlaybackController.get_state) + set_state = proxy_method(PlaybackController.set_state) + get_time_position = proxy_method(PlaybackController.get_time_position) + next = proxy_method(PlaybackController.next) + pause = proxy_method(PlaybackController.pause) + play = proxy_method(PlaybackController.play) + previous = proxy_method(PlaybackController.previous) + resume = proxy_method(PlaybackController.resume) + seek = proxy_method(PlaybackController.seek) + stop = proxy_method(PlaybackController.stop) diff --git a/mopidy/core/playlists.py b/src/mopidy/core/playlists.py similarity index 71% rename from mopidy/core/playlists.py rename to src/mopidy/core/playlists.py index 5dff83cb28..a04c6bcac0 100644 --- a/mopidy/core/playlists.py +++ b/src/mopidy/core/playlists.py @@ -1,17 +1,32 @@ +from __future__ import annotations + import contextlib import logging -import urllib +import urllib.parse +from collections.abc import Generator +from typing import TYPE_CHECKING, Any + +from pykka.typing import proxy_method from mopidy import exceptions from mopidy.core import listener from mopidy.internal import validation from mopidy.models import Playlist, Ref +from mopidy.types import UriScheme logger = logging.getLogger(__name__) +if TYPE_CHECKING: + from mopidy.backend import BackendProxy + from mopidy.core.actor import Backends, Core + from mopidy.types import Uri + @contextlib.contextmanager -def _backend_error_handling(backend, reraise=None): +def _backend_error_handling( + backend: BackendProxy, + reraise: None | (type[Exception] | tuple[type[Exception], ...]) = None, +) -> Generator[None, Any, None]: try: yield except exceptions.ValidationError as e: @@ -30,30 +45,24 @@ def _backend_error_handling(backend, reraise=None): class PlaylistsController: - def __init__(self, backends, core): + def __init__(self, backends: Backends, core: Core) -> None: self.backends = backends self.core = core - def get_uri_schemes(self): - """ - Get the list of URI schemes that support playlists. - - :rtype: list of string + def get_uri_schemes(self) -> list[UriScheme]: + """Get the list of URI schemes that support playlists. .. versionadded:: 2.0 """ - return list(sorted(self.backends.with_playlists.keys())) + return sorted(self.backends.with_playlists.keys()) - def as_list(self): - """ - Get a list of the currently available playlists. + def as_list(self) -> list[Ref]: + """Get a list of the currently available playlists. Returns a list of :class:`~mopidy.models.Ref` objects referring to the playlists. In other words, no information about the playlists' content is given. - :rtype: list of :class:`mopidy.models.Ref` - .. versionadded:: 1.0 """ futures = { @@ -72,16 +81,14 @@ def as_list(self): except NotImplementedError: backend_name = b.actor_ref.actor_class.__name__ logger.warning( - "%s does not implement playlists.as_list(). " - "Please upgrade it.", + "%s does not implement playlists.as_list(). Please upgrade it.", backend_name, ) return results - def get_items(self, uri): - """ - Get the items in a playlist specified by ``uri``. + def get_items(self, uri: Uri) -> list[Ref] | None: + """Get the items in a playlist specified by ``uri``. Returns a list of :class:`~mopidy.models.Ref` objects referring to the playlist's items. @@ -89,13 +96,11 @@ def get_items(self, uri): If a playlist with the given ``uri`` doesn't exist, it returns :class:`None`. - :rtype: list of :class:`mopidy.models.Ref`, or :class:`None` - .. versionadded:: 1.0 """ validation.check_uri(uri) - uri_scheme = urllib.parse.urlparse(uri).scheme + uri_scheme = UriScheme(urllib.parse.urlparse(uri).scheme) backend = self.backends.with_playlists.get(uri_scheme, None) if not backend: @@ -103,14 +108,18 @@ def get_items(self, uri): with _backend_error_handling(backend): items = backend.playlists.get_items(uri).get() - items is None or validation.check_instances(items, Ref) + if items is not None: + validation.check_instances(items, Ref) return items return None - def create(self, name, uri_scheme=None): - """ - Create a new playlist. + def create( + self, + name: str, + uri_scheme: UriScheme | None = None, + ) -> Playlist | None: + """Create a new playlist. If ``uri_scheme`` matches an URI scheme handled by a current backend, that backend is asked to create the playlist. If ``uri_scheme`` is @@ -121,10 +130,7 @@ def create(self, name, uri_scheme=None): by creating new instances of :class:`mopidy.models.Playlist`. :param name: name of the new playlist - :type name: string :param uri_scheme: use the backend matching the URI scheme - :type uri_scheme: string - :rtype: :class:`mopidy.models.Playlist` or :class:`None` """ if uri_scheme in self.backends.with_playlists: backends = [self.backends.with_playlists[uri_scheme]] @@ -142,9 +148,8 @@ def create(self, name, uri_scheme=None): return None - def delete(self, uri): - """ - Delete playlist identified by the URI. + def delete(self, uri: Uri) -> bool: + """Delete playlist identified by the URI. If the URI doesn't match the URI schemes handled by the current backends, nothing happens. @@ -152,15 +157,13 @@ def delete(self, uri): Returns :class:`True` if deleted, :class:`False` otherwise. :param uri: URI of the playlist to delete - :type uri: string - :rtype: :class:`bool` .. versionchanged:: 2.2 Return type defined. """ validation.check_uri(uri) - uri_scheme = urllib.parse.urlparse(uri).scheme + uri_scheme = UriScheme(urllib.parse.urlparse(uri).scheme) backend = self.backends.with_playlists.get(uri_scheme, None) if not backend: return False @@ -179,32 +182,29 @@ def delete(self, uri): return success - def lookup(self, uri): - """ - Lookup playlist with given URI in both the set of playlists and in any + def lookup(self, uri: Uri) -> Playlist | None: + """Lookup playlist with given URI in both the set of playlists and in any other playlist sources. Returns :class:`None` if not found. :param uri: playlist URI - :type uri: string - :rtype: :class:`mopidy.models.Playlist` or :class:`None` """ - uri_scheme = urllib.parse.urlparse(uri).scheme + uri_scheme = UriScheme(urllib.parse.urlparse(uri).scheme) backend = self.backends.with_playlists.get(uri_scheme, None) if not backend: return None with _backend_error_handling(backend): playlist = backend.playlists.lookup(uri).get() - playlist is None or validation.check_instance(playlist, Playlist) + if playlist is not None: + validation.check_instance(playlist, Playlist) return playlist return None # TODO: there is an inconsistency between library.refresh(uri) and this # call, not sure how to sort this out. - def refresh(self, uri_scheme=None): - """ - Refresh the playlists in :attr:`playlists`. + def refresh(self, uri_scheme: UriScheme | None = None) -> None: + """Refresh the playlists in :attr:`playlists`. If ``uri_scheme`` is :class:`None`, all backends are asked to refresh. If ``uri_scheme`` is an URI scheme handled by a backend, only that @@ -212,7 +212,6 @@ def refresh(self, uri_scheme=None): current backend, nothing happens. :param uri_scheme: limit to the backend matching the URI scheme - :type uri_scheme: string """ # TODO: check: uri_scheme is None or uri_scheme? @@ -235,9 +234,8 @@ def refresh(self, uri_scheme=None): if playlists_loaded: listener.CoreListener.send("playlists_loaded") - def save(self, playlist): - """ - Save the playlist. + def save(self, playlist: Playlist) -> Playlist | None: + """Save the playlist. For a playlist to be saveable, it must have the ``uri`` attribute set. You must not set the ``uri`` atribute yourself, but use playlist @@ -254,27 +252,35 @@ def save(self, playlist): current backend, nothing is done and :class:`None` is returned. :param playlist: the playlist - :type playlist: :class:`mopidy.models.Playlist` - :rtype: :class:`mopidy.models.Playlist` or :class:`None` """ validation.check_instance(playlist, Playlist) if playlist.uri is None: - return # TODO: log this problem? + return None # TODO: log this problem? - uri_scheme = urllib.parse.urlparse(playlist.uri).scheme + uri_scheme = UriScheme(urllib.parse.urlparse(playlist.uri).scheme) backend = self.backends.with_playlists.get(uri_scheme, None) if not backend: return None # TODO: we let AssertionError error through due to legacy tests :/ with _backend_error_handling(backend, reraise=AssertionError): - playlist = backend.playlists.save(playlist).get() - playlist is None or validation.check_instance(playlist, Playlist) - if playlist: - listener.CoreListener.send( - "playlist_changed", playlist=playlist - ) - return playlist + result = backend.playlists.save(playlist).get() + if result is not None: + validation.check_instance(result, Playlist) + if result: + listener.CoreListener.send("playlist_changed", playlist=result) + return result return None + + +class PlaylistsControllerProxy: + get_uri_schemes = proxy_method(PlaylistsController.get_uri_schemes) + as_list = proxy_method(PlaylistsController.as_list) + get_items = proxy_method(PlaylistsController.get_items) + create = proxy_method(PlaylistsController.create) + delete = proxy_method(PlaylistsController.delete) + lookup = proxy_method(PlaylistsController.lookup) + refresh = proxy_method(PlaylistsController.refresh) + save = proxy_method(PlaylistsController.save) diff --git a/mopidy/core/tracklist.py b/src/mopidy/core/tracklist.py similarity index 69% rename from mopidy/core/tracklist.py rename to src/mopidy/core/tracklist.py index 9d425efd80..edeb837ca1 100644 --- a/mopidy/core/tracklist.py +++ b/src/mopidy/core/tracklist.py @@ -1,5 +1,11 @@ +from __future__ import annotations + import logging import random +from collections.abc import Iterable +from typing import TYPE_CHECKING + +from pykka.typing import proxy_method from mopidy import exceptions from mopidy.core import listener @@ -9,47 +15,50 @@ logger = logging.getLogger(__name__) +if TYPE_CHECKING: + from mopidy.core.actor import Core + from mopidy.types import Query, TracklistField, Uri + class TracklistController: - def __init__(self, core): + def __init__(self, core: Core) -> None: self.core = core - self._next_tlid = 1 - self._tl_tracks = [] - self._version = 0 + self._next_tlid: int = 1 + self._tl_tracks: list[TlTrack] = [] + self._version: int = 0 - self._consume = False - self._random = False - self._shuffled = [] - self._repeat = False - self._single = False + self._consume: bool = False + self._random: bool = False + self._shuffled: list[TlTrack] = [] + self._repeat: bool = False + self._single: bool = False - def get_tl_tracks(self): + def get_tl_tracks(self) -> list[TlTrack]: """Get tracklist as list of :class:`mopidy.models.TlTrack`.""" return self._tl_tracks[:] - def get_tracks(self): + def get_tracks(self) -> list[Track]: """Get tracklist as list of :class:`mopidy.models.Track`.""" return [tl_track.track for tl_track in self._tl_tracks] - def get_length(self): + def get_length(self) -> int: """Get length of the tracklist.""" return len(self._tl_tracks) - def get_version(self): - """ - Get the tracklist version. + def get_version(self) -> int: + """Get the tracklist version. Integer which is increased every time the tracklist is changed. Is not reset before Mopidy is restarted. """ return self._version - def _increase_version(self): + def _increase_version(self) -> None: self._version += 1 self.core.playback._on_tracklist_change() self._trigger_tracklist_changed() - def get_consume(self): + def get_consume(self) -> bool: """Get consume mode. :class:`True` @@ -59,7 +68,7 @@ def get_consume(self): """ return self._consume - def set_consume(self, value): + def set_consume(self, value: bool) -> None: """Set consume mode. :class:`True` @@ -72,7 +81,7 @@ def set_consume(self, value): self._trigger_options_changed() self._consume = value - def get_random(self): + def get_random(self) -> bool: """Get random mode. :class:`True` @@ -82,7 +91,7 @@ def get_random(self): """ return self._random - def set_random(self, value): + def set_random(self, value: bool) -> None: """Set random mode. :class:`True` @@ -98,9 +107,8 @@ def set_random(self, value): random.shuffle(self._shuffled) self._random = value - def get_repeat(self): - """ - Get repeat mode. + def get_repeat(self) -> bool: + """Get repeat mode. :class:`True` The tracklist is played repeatedly. @@ -109,9 +117,8 @@ def get_repeat(self): """ return self._repeat - def set_repeat(self, value): - """ - Set repeat mode. + def set_repeat(self, value: bool) -> None: + """Set repeat mode. To repeat a single track, set both ``repeat`` and ``single``. @@ -125,9 +132,8 @@ def set_repeat(self, value): self._trigger_options_changed() self._repeat = value - def get_single(self): - """ - Get single mode. + def get_single(self) -> bool: + """Get single mode. :class:`True` Playback is stopped after current song, unless in ``repeat`` mode. @@ -136,9 +142,8 @@ def get_single(self): """ return self._single - def set_single(self, value): - """ - Set single mode. + def set_single(self, value: bool) -> None: + """Set single mode. :class:`True` Playback is stopped after current song, unless in ``repeat`` mode. @@ -150,24 +155,26 @@ def set_single(self, value): self._trigger_options_changed() self._single = value - def index(self, tl_track=None, tlid=None): - """ - The position of the given track in the tracklist. + def index( + self, + tl_track: TlTrack | None = None, + tlid: int | None = None, + ) -> int | None: + """The position of the given track in the tracklist. If neither *tl_track* or *tlid* is given we return the index of the currently playing track. :param tl_track: the track to find the index of - :type tl_track: :class:`mopidy.models.TlTrack` or :class:`None` :param tlid: TLID of the track to find the index of - :type tlid: :class:`int` or :class:`None` - :rtype: :class:`int` or :class:`None` .. versionadded:: 1.1 The *tlid* parameter """ - tl_track is None or validation.check_instance(tl_track, TlTrack) - tlid is None or validation.check_integer(tlid, min=1) + if tl_track is not None: + validation.check_instance(tl_track, TlTrack) + if tlid is not None: + validation.check_integer(tlid, min=1) if tl_track is None and tlid is None: tl_track = self.core.playback.get_current_tl_track() @@ -183,17 +190,13 @@ def index(self, tl_track=None, tlid=None): return i return None - def get_eot_tlid(self): - """ - The TLID of the track that will be played after the current track. + def get_eot_tlid(self) -> int | None: + """The TLID of the track that will be played after the current track. Not necessarily the same TLID as returned by :meth:`get_next_tlid`. - :rtype: :class:`int` or :class:`None` - .. versionadded:: 1.1 """ - current_tl_track = self.core.playback.get_current_tl_track() with deprecation.ignore("core.tracklist.eot_track"): @@ -201,9 +204,8 @@ def get_eot_tlid(self): return getattr(eot_tl_track, "tlid", None) - def eot_track(self, tl_track): - """ - The track that will be played after the given track. + def eot_track(self, tl_track: TlTrack | None) -> TlTrack | None: + """The track that will be played after the given track. Not necessarily the same track as :meth:`next_track`. @@ -211,14 +213,13 @@ def eot_track(self, tl_track): Use :meth:`get_eot_tlid` instead. :param tl_track: the reference track - :type tl_track: :class:`mopidy.models.TlTrack` or :class:`None` - :rtype: :class:`mopidy.models.TlTrack` or :class:`None` """ deprecation.warn("core.tracklist.eot_track") - tl_track is None or validation.check_instance(tl_track, TlTrack) + if tl_track is not None: + validation.check_instance(tl_track, TlTrack) if self.get_single() and self.get_repeat(): return tl_track - elif self.get_single(): + if self.get_single(): return None # Current difference between next and EOT handling is that EOT needs to @@ -226,9 +227,8 @@ def eot_track(self, tl_track): # shared. return self.next_track(tl_track) - def get_next_tlid(self): - """ - The tlid of the track that will be played if calling + def get_next_tlid(self) -> int | None: + """The tlid of the track that will be played if calling :meth:`mopidy.core.PlaybackController.next()`. For normal playback this is the next track in the tracklist. If repeat @@ -236,8 +236,6 @@ def get_next_tlid(self): enabled this should be a random track, all tracks should be played once before the tracklist repeats. - :rtype: :class:`int` or :class:`None` - .. versionadded:: 1.1 """ current_tl_track = self.core.playback.get_current_tl_track() @@ -247,9 +245,8 @@ def get_next_tlid(self): return getattr(next_tl_track, "tlid", None) - def next_track(self, tl_track): - """ - The track that will be played if calling + def next_track(self, tl_track: TlTrack | None) -> TlTrack | None: + """The track that will be played if calling :meth:`mopidy.core.PlaybackController.next()`. For normal playback this is the next track in the tracklist. If repeat @@ -261,20 +258,22 @@ def next_track(self, tl_track): Use :meth:`get_next_tlid` instead. :param tl_track: the reference track - :type tl_track: :class:`mopidy.models.TlTrack` or :class:`None` - :rtype: :class:`mopidy.models.TlTrack` or :class:`None` """ deprecation.warn("core.tracklist.next_track") - tl_track is None or validation.check_instance(tl_track, TlTrack) + if tl_track is not None: + validation.check_instance(tl_track, TlTrack) if not self._tl_tracks: return None - if self.get_random() and not self._shuffled: - if self.get_repeat() or not tl_track: - logger.debug("Shuffling tracks") - self._shuffled = self._tl_tracks[:] - random.shuffle(self._shuffled) + if ( + self.get_random() + and not self._shuffled + and (self.get_repeat() or not tl_track) + ): + logger.debug("Shuffling tracks") + self._shuffled = self._tl_tracks[:] + random.shuffle(self._shuffled) if self.get_random(): if self._shuffled: @@ -290,24 +289,20 @@ def next_track(self, tl_track): if self.get_repeat(): if self.get_consume() and len(self._tl_tracks) == 1: return None - else: - next_index %= len(self._tl_tracks) + next_index %= len(self._tl_tracks) elif next_index >= len(self._tl_tracks): return None return self._tl_tracks[next_index] - def get_previous_tlid(self): - """ - Returns the TLID of the track that will be played if calling + def get_previous_tlid(self) -> int | None: + """Returns the TLID of the track that will be played if calling :meth:`mopidy.core.PlaybackController.previous()`. For normal playback this is the previous track in the tracklist. If random and/or consume is enabled it should return the current track instead. - :rtype: :class:`int` or :class:`None` - .. versionadded:: 1.1 """ current_tl_track = self.core.playback.get_current_tl_track() @@ -317,9 +312,8 @@ def get_previous_tlid(self): return getattr(previous_tl_track, "tlid", None) - def previous_track(self, tl_track): - """ - Returns the track that will be played if calling + def previous_track(self, tl_track: TlTrack | None) -> TlTrack | None: + """Returns the track that will be played if calling :meth:`mopidy.core.PlaybackController.previous()`. For normal playback this is the previous track in the tracklist. If @@ -330,11 +324,10 @@ def previous_track(self, tl_track): Use :meth:`get_previous_tlid` instead. :param tl_track: the reference track - :type tl_track: :class:`mopidy.models.TlTrack` or :class:`None` - :rtype: :class:`mopidy.models.TlTrack` or :class:`None` """ deprecation.warn("core.tracklist.previous_track") - tl_track is None or validation.check_instance(tl_track, TlTrack) + if tl_track is not None: + validation.check_instance(tl_track, TlTrack) if self.get_repeat() or self.get_consume() or self.get_random(): return tl_track @@ -348,9 +341,13 @@ def previous_track(self, tl_track): # 1 - len(tracks) Thus 'position - 1' will always be within the list. return self._tl_tracks[position - 1] - def add(self, tracks=None, at_position=None, uris=None): - """ - Add tracks to the tracklist. + def add( # noqa: C901 + self, + tracks: Iterable[Track] | None = None, + at_position: int | None = None, + uris: Iterable[Uri] | None = None, + ) -> list[TlTrack]: + """Add tracks to the tracklist. If ``uris`` is given instead of ``tracks``, the URIs are looked up in the library and the resulting tracks are added to the @@ -363,12 +360,8 @@ def add(self, tracks=None, at_position=None, uris=None): Triggers the :meth:`mopidy.core.CoreListener.tracklist_changed` event. :param tracks: tracks to add - :type tracks: list of :class:`mopidy.models.Track` or :class:`None` :param at_position: position in tracklist to add tracks - :type at_position: int or :class:`None` :param uris: list of URIs for tracks to add - :type uris: list of string or :class:`None` - :rtype: list of :class:`mopidy.models.TlTrack` .. versionadded:: 1.0 The ``uris`` argument. @@ -379,8 +372,10 @@ def add(self, tracks=None, at_position=None, uris=None): if sum(o is not None for o in [tracks, uris]) != 1: raise ValueError('Exactly one of "tracks" or "uris" must be set') - tracks is None or validation.check_instances(tracks, Track) - uris is None or validation.check_uris(uris) + if tracks is not None: + validation.check_instances(tracks, Track) + if uris is not None: + validation.check_uris(uris) validation.check_integer(at_position or 0) if tracks: @@ -388,6 +383,7 @@ def add(self, tracks=None, at_position=None, uris=None): if tracks is None: tracks = [] + assert uris is not None track_map = self.core.library.lookup(uris=uris) for uri in uris: tracks.extend(track_map[uri]) @@ -415,18 +411,16 @@ def add(self, tracks=None, at_position=None, uris=None): return tl_tracks - def clear(self): - """ - Clear the tracklist. + def clear(self) -> None: + """Clear the tracklist. Triggers the :meth:`mopidy.core.CoreListener.tracklist_changed` event. """ self._tl_tracks = [] self._increase_version() - def filter(self, criteria): - """ - Filter the tracklist by the given criteria. + def filter(self, criteria: Query[TracklistField]) -> list[TlTrack]: + """Filter the tracklist by the given criteria. Each rule in the criteria consists of a model field and a list of values to compare it against. If the model field matches any of the @@ -447,32 +441,26 @@ def filter(self, criteria): filter({'tlid': [1, 3, 6], 'uri': ['xyz', 'abc']}) :param criteria: one or more rules to match by - :type criteria: dict, of (string, list) pairs - :rtype: list of :class:`mopidy.models.TlTrack` """ tlids = criteria.pop("tlid", []) - validation.check_query(criteria, validation.TRACKLIST_FIELDS) + validation.check_query(criteria, validation.TRACKLIST_FIELDS.keys()) validation.check_instances(tlids, int) matches = self._tl_tracks - for (key, values) in criteria.items(): + for key, values in criteria.items(): matches = [ct for ct in matches if getattr(ct.track, key) in values] if tlids: matches = [ct for ct in matches if ct.tlid in tlids] return matches - def move(self, start, end, to_position): - """ - Move the tracks in the slice ``[start:end]`` to ``to_position``. + def move(self, start: int, end: int, to_position: int) -> None: + """Move the tracks in the slice ``[start:end]`` to ``to_position``. Triggers the :meth:`mopidy.core.CoreListener.tracklist_changed` event. :param start: position of first track to move - :type start: int :param end: position after last track to move - :type end: int :param to_position: new position for the tracks - :type to_position: int """ if start == end: end += 1 @@ -489,9 +477,7 @@ def move(self, start, end, to_position): if to_position < 0: raise AssertionError("to_position must be at least zero") if to_position > len(tl_tracks): - raise AssertionError( - "to_position can not be larger than tracklist length" - ) + raise AssertionError("to_position can not be larger than tracklist length") new_tl_tracks = tl_tracks[:start] + tl_tracks[end:] for tl_track in tl_tracks[start:end]: @@ -500,17 +486,16 @@ def move(self, start, end, to_position): self._tl_tracks = new_tl_tracks self._increase_version() - def remove(self, criteria): - """ - Remove the matching tracks from the tracklist. + def remove(self, criteria: Query[TracklistField]) -> list[TlTrack]: + """Remove the matching tracks from the tracklist. Uses :meth:`filter()` to lookup the tracks to remove. Triggers the :meth:`mopidy.core.CoreListener.tracklist_changed` event. + Returns the removed tracks. + :param criteria: one or more rules to match by - :type criteria: dict, of (string, list) pairs - :rtype: list of :class:`mopidy.models.TlTrack` that were removed """ tl_tracks = self.filter(criteria) for tl_track in tl_tracks: @@ -519,34 +504,26 @@ def remove(self, criteria): self._increase_version() return tl_tracks - def shuffle(self, start=None, end=None): - """ - Shuffles the entire tracklist. If ``start`` and ``end`` is given only + def shuffle(self, start: int | None = None, end: int | None = None) -> None: + """Shuffles the entire tracklist. If ``start`` and ``end`` is given only shuffles the slice ``[start:end]``. Triggers the :meth:`mopidy.core.CoreListener.tracklist_changed` event. :param start: position of first track to shuffle - :type start: int or :class:`None` :param end: position after last track to shuffle - :type end: int or :class:`None` """ tl_tracks = self._tl_tracks # TOOD: use validation helpers? - if start is not None and end is not None: - if start >= end: - raise AssertionError("start must be smaller than end") - - if start is not None: - if start < 0: - raise AssertionError("start must be at least zero") - - if end is not None: - if end > len(tl_tracks): - raise AssertionError( - "end can not be larger than " + "tracklist length" - ) + if start is not None and end is not None and start >= end: + raise AssertionError("start must be smaller than end") + + if start is not None and start < 0: + raise AssertionError("start must be at least zero") + + if end is not None and end > len(tl_tracks): + raise AssertionError("end can not be larger than tracklist length") before = tl_tracks[: start or 0] shuffled = tl_tracks[start:end] @@ -555,41 +532,40 @@ def shuffle(self, start=None, end=None): self._tl_tracks = before + shuffled + after self._increase_version() - def slice(self, start, end): - """ - Returns a slice of the tracklist, limited by the given start and end + def slice(self, start: int, end: int) -> list[TlTrack]: + """Returns a slice of the tracklist, limited by the given start and end positions. :param start: position of first track to include in slice - :type start: int :param end: position after last track to include in slice - :type end: int - :rtype: :class:`mopidy.models.TlTrack` """ # TODO: validate slice? return self._tl_tracks[start:end] - def _mark_playing(self, tl_track): + def _mark_playing(self, tl_track: TlTrack) -> None: """Internal method for :class:`mopidy.core.PlaybackController`.""" if self.get_random() and tl_track in self._shuffled: self._shuffled.remove(tl_track) - def _mark_unplayable(self, tl_track): + def _mark_unplayable(self, tl_track: TlTrack | None) -> None: """Internal method for :class:`mopidy.core.PlaybackController`.""" - logger.warning("Track is not playable: %s", tl_track.track.uri) + logger.warning( + "Track is not playable: %s", + tl_track.track.uri if tl_track else None, + ) if self.get_consume() and tl_track is not None: self.remove({"tlid": [tl_track.tlid]}) if self.get_random() and tl_track in self._shuffled: self._shuffled.remove(tl_track) - def _mark_played(self, tl_track): + def _mark_played(self, tl_track: TlTrack | None) -> bool: """Internal method for :class:`mopidy.core.PlaybackController`.""" if self.get_consume() and tl_track is not None: self.remove({"tlid": [tl_track.tlid]}) return True return False - def _trigger_tracklist_changed(self): + def _trigger_tracklist_changed(self) -> None: if self.get_random(): self._shuffled = self._tl_tracks[:] random.shuffle(self._shuffled) @@ -599,11 +575,11 @@ def _trigger_tracklist_changed(self): logger.debug("Triggering event: tracklist_changed()") listener.CoreListener.send("tracklist_changed") - def _trigger_options_changed(self): + def _trigger_options_changed(self) -> None: logger.debug("Triggering options changed event") listener.CoreListener.send("options_changed") - def _save_state(self): + def _save_state(self) -> TracklistState: return TracklistState( tl_tracks=self._tl_tracks, next_tlid=self._next_tlid, @@ -613,7 +589,11 @@ def _save_state(self): single=self.get_single(), ) - def _load_state(self, state, coverage): + def _load_state( + self, + state: TracklistState, + coverage: Iterable[str], + ) -> None: if state: if "mode" in coverage: self.set_consume(state.consume) @@ -624,3 +604,32 @@ def _load_state(self, state, coverage): self._next_tlid = max(state.next_tlid, self._next_tlid) self._tl_tracks = list(state.tl_tracks) self._increase_version() + + +class TracklistControllerProxy: + get_tl_tracks = proxy_method(TracklistController.get_tl_tracks) + get_tracks = proxy_method(TracklistController.get_tracks) + get_length = proxy_method(TracklistController.get_length) + get_version = proxy_method(TracklistController.get_version) + get_consume = proxy_method(TracklistController.get_consume) + set_consume = proxy_method(TracklistController.set_consume) + get_random = proxy_method(TracklistController.get_random) + set_random = proxy_method(TracklistController.set_random) + get_repeat = proxy_method(TracklistController.get_repeat) + set_repeat = proxy_method(TracklistController.set_repeat) + get_single = proxy_method(TracklistController.get_single) + set_single = proxy_method(TracklistController.set_single) + index = proxy_method(TracklistController.index) + get_eot_tlid = proxy_method(TracklistController.get_eot_tlid) + eot_track = proxy_method(TracklistController.eot_track) + get_next_tlid = proxy_method(TracklistController.get_next_tlid) + next_track = proxy_method(TracklistController.next_track) + get_previous_tlid = proxy_method(TracklistController.get_previous_tlid) + previous_track = proxy_method(TracklistController.previous_track) + add = proxy_method(TracklistController.add) + clear = proxy_method(TracklistController.clear) + filter = proxy_method(TracklistController.filter) + move = proxy_method(TracklistController.move) + remove = proxy_method(TracklistController.remove) + shuffle = proxy_method(TracklistController.shuffle) + slice = proxy_method(TracklistController.slice) diff --git a/mopidy/exceptions.py b/src/mopidy/exceptions.py similarity index 91% rename from mopidy/exceptions.py rename to src/mopidy/exceptions.py index 0c1ebd204d..a067f82b32 100644 --- a/mopidy/exceptions.py +++ b/src/mopidy/exceptions.py @@ -5,10 +5,10 @@ def __init__(self, message, *args, **kwargs): @property def message(self): - """Reimplement message field that was deprecated in Python 2.6""" + """Reimplement message field that was deprecated in Python 2.6.""" return self._message - @message.setter # noqa + @message.setter def message(self, message): self._message = message @@ -45,7 +45,7 @@ def __init__(self, message, errno=None): self.errno = errno -class AudioException(MopidyException): # noqa: N818 +class AudioException(MopidyException): pass diff --git a/mopidy/ext.py b/src/mopidy/ext.py similarity index 75% rename from mopidy/ext.py rename to src/mopidy/ext.py index a2f7799815..9861f030db 100644 --- a/mopidy/ext.py +++ b/src/mopidy/ext.py @@ -2,38 +2,37 @@ import logging from collections.abc import Mapping +from importlib import metadata from typing import TYPE_CHECKING, NamedTuple -import pkg_resources - from mopidy import config as config_lib from mopidy import exceptions from mopidy.internal import path if TYPE_CHECKING: + from collections.abc import Iterator from pathlib import Path - from typing import Any, Dict, Iterator, List, Optional, Type + from typing import Any, TypeAlias from mopidy.commands import Command from mopidy.config import ConfigSchema - Config = Dict[str, Dict[str, Any]] - + Config = dict[str, dict[str, Any]] + RegistryEntry: TypeAlias = type[Any] | dict[str, Any] logger = logging.getLogger(__name__) class ExtensionData(NamedTuple): - extension: "Extension" + extension: Extension entry_point: Any config_schema: ConfigSchema config_defaults: Any - command: Optional[Command] + command: Command | None class Extension: - - """Base class for Mopidy extensions""" + """Base class for Mopidy extensions.""" dist_name: str """The extension's distribution name, as registered on PyPI @@ -60,12 +59,10 @@ def get_default_config(self) -> str: :returns: str """ - raise NotImplementedError( - 'Add at least a config section with "enabled = true"' - ) + raise NotImplementedError('Add at least a config section with "enabled = true"') def get_config_schema(self) -> ConfigSchema: - """The extension's config validation schema + """The extension's config validation schema. :returns: :class:`~mopidy.config.schemas.ConfigSchema` """ @@ -73,6 +70,12 @@ def get_config_schema(self) -> ConfigSchema: schema["enabled"] = config_lib.Boolean() return schema + @classmethod + def check_attr(cls) -> None: + """Check if ext_name exist.""" + if not hasattr(cls, "ext_name") or cls.ext_name is None: + raise AttributeError(f"{cls} not an extension or ext_name missing!") + @classmethod def get_cache_dir(cls, config: Config) -> Path: """Get or create cache directory for the extension. @@ -82,11 +85,8 @@ def get_cache_dir(cls, config: Config) -> Path: :param config: the Mopidy config object :return: pathlib.Path """ - if cls.ext_name is None: - raise AssertionError - cache_dir_path = ( - path.expand_path(config["core"]["cache_dir"]) / cls.ext_name - ) + cls.check_attr() + cache_dir_path = path.expand_path(config["core"]["cache_dir"]) / cls.ext_name path.get_or_create_dir(cache_dir_path) return cache_dir_path @@ -97,11 +97,8 @@ def get_config_dir(cls, config: Config) -> Path: :param config: the Mopidy config object :return: pathlib.Path """ - if cls.ext_name is None: - raise AssertionError - config_dir_path = ( - path.expand_path(config["core"]["config_dir"]) / cls.ext_name - ) + cls.check_attr() + config_dir_path = path.expand_path(config["core"]["config_dir"]) / cls.ext_name path.get_or_create_dir(config_dir_path) return config_dir_path @@ -114,21 +111,17 @@ def get_data_dir(cls, config: Config) -> Path: :param config: the Mopidy config object :returns: pathlib.Path """ - if cls.ext_name is None: - raise AssertionError - data_dir_path = ( - path.expand_path(config["core"]["data_dir"]) / cls.ext_name - ) + cls.check_attr() + data_dir_path = path.expand_path(config["core"]["data_dir"]) / cls.ext_name path.get_or_create_dir(data_dir_path) return data_dir_path - def get_command(self) -> Optional[Command]: + def get_command(self) -> Command | None: """Command to expose to command line users running ``mopidy``. :returns: Instance of a :class:`~mopidy.commands.Command` class. """ - pass def validate_environment(self) -> None: """Checks if the extension can run in the current environment. @@ -142,11 +135,9 @@ def validate_environment(self) -> None: :raises: :exc:`~mopidy.exceptions.ExtensionError` :returns: :class:`None` """ - pass - def setup(self, registry: "Registry") -> None: - """ - Register the extension's components in the extension :class:`Registry`. + def setup(self, registry: Registry) -> None: + """Register the extension's components in the extension :class:`Registry`. For example, to register a backend:: @@ -168,7 +159,6 @@ def setup(self, registry): class Registry(Mapping): - """Registry of components provided by Mopidy extensions. Passed to the :meth:`~Extension.setup` method of all extensions. The @@ -188,16 +178,16 @@ class Registry(Mapping): """ def __init__(self) -> None: - self._registry: Dict[str, List[Type[Any]]] = {} + self._registry: dict[str, list[RegistryEntry]] = {} - def add(self, name: str, cls: Type[Any]) -> None: + def add(self, name: str, entry: RegistryEntry) -> None: """Add a component to the registry. Multiple classes can be registered to the same name. """ - self._registry.setdefault(name, []).append(cls) + self._registry.setdefault(name, []).append(entry) - def __getitem__(self, name: str) -> List[Type[Any]]: + def __getitem__(self, name: str) -> list[RegistryEntry]: return self._registry.setdefault(name, []) def __iter__(self) -> Iterator[str]: @@ -207,30 +197,27 @@ def __len__(self) -> int: return len(self._registry) -def load_extensions() -> List[ExtensionData]: +def load_extensions() -> list[ExtensionData]: """Find all installed extensions. :returns: list of installed extensions """ - installed_extensions = [] - for entry_point in pkg_resources.iter_entry_points("mopidy.ext"): + for entry_point in metadata.entry_points(group="mopidy.ext"): logger.debug("Loading entry point: %s", entry_point) try: - extension_class = entry_point.resolve() - except Exception as e: - logger.exception( - f"Failed to load extension {entry_point.name}: {e}" - ) + extension_class = entry_point.load() + except Exception: + logger.exception(f"Failed to load extension {entry_point.name}.") continue try: if not issubclass(extension_class, Extension): - raise TypeError # issubclass raises TypeError on non-class + raise TypeError # noqa: TRY301 except TypeError: logger.error( - "Entry point %s did not contain a valid extension" "class: %r", + "Entry point %s did not contain a valid extension class: %r", entry_point.name, extension_class, ) @@ -251,30 +238,26 @@ def load_extensions() -> List[ExtensionData]: ) except Exception: logger.exception( - "Setup of extension from entry point %s failed, " - "ignoring extension.", + "Setup of extension from entry point %s failed, ignoring extension.", entry_point.name, ) continue installed_extensions.append(extension_data) - logger.debug( - "Loaded extension: %s %s", extension.dist_name, extension.version - ) + logger.debug("Loaded extension: %s %s", extension.dist_name, extension.version) names = (ed.extension.ext_name for ed in installed_extensions) logger.debug("Discovered extensions: %s", ", ".join(names)) return installed_extensions -def validate_extension_data(data: ExtensionData) -> bool: +def validate_extension_data(data: ExtensionData) -> bool: # noqa: PLR0911 """Verify extension's dependencies and environment. :param extensions: an extension to check :returns: if extension should be run """ - logger.debug("Validating extension: %s", data.extension.ext_name) if data.extension.ext_name != data.entry_point.name: @@ -286,28 +269,15 @@ def validate_extension_data(data: ExtensionData) -> bool: return False try: - data.entry_point.require() - except pkg_resources.DistributionNotFound as exc: + data.entry_point.load() + except ModuleNotFoundError as exc: logger.info( - "Disabled extension %s: Dependency %s not found", + "Disabled extension %s: Exception %s", data.extension.ext_name, exc, ) - return False - except pkg_resources.VersionConflict as exc: - if len(exc.args) == 2: - found, required = exc.args - logger.info( - "Disabled extension %s: %s required, but found %s at %s", - data.extension.ext_name, - required, - found, - found.location, - ) - else: - logger.info( - "Disabled extension %s: %s", data.extension.ext_name, exc - ) + # Remark: There are no version check, so any version is accepted + # this is a difference to pkg_resources, and affect debugging. return False try: @@ -328,7 +298,8 @@ def validate_extension_data(data: ExtensionData) -> bool: data.extension.ext_name, ) return False - elif not isinstance(data.config_schema.get("enabled"), config_lib.Boolean): + + if not isinstance(data.config_schema.get("enabled"), config_lib.Boolean): logger.error( 'Extension %s does not have the required "enabled" config' " option, disabling.", diff --git a/mopidy/file/__init__.py b/src/mopidy/file/__init__.py similarity index 75% rename from mopidy/file/__init__.py rename to src/mopidy/file/__init__.py index bf5dbf94ab..8a9061cbfe 100644 --- a/mopidy/file/__init__.py +++ b/src/mopidy/file/__init__.py @@ -1,5 +1,5 @@ import logging -import os +from pathlib import Path import mopidy from mopidy import config, ext @@ -8,16 +8,14 @@ class Extension(ext.Extension): - dist_name = "Mopidy-File" ext_name = "file" version = mopidy.__version__ - def get_default_config(self): - conf_file = os.path.join(os.path.dirname(__file__), "ext.conf") - return config.read(conf_file) + def get_default_config(self) -> str: + return config.read(Path(__file__).parent / "ext.conf") - def get_config_schema(self): + def get_config_schema(self) -> config.ConfigSchema: schema = super().get_config_schema() schema["media_dirs"] = config.List(optional=True) schema["excluded_file_extensions"] = config.List(optional=True) @@ -26,7 +24,7 @@ def get_config_schema(self): schema["metadata_timeout"] = config.Integer(optional=True) return schema - def setup(self, registry): + def setup(self, registry) -> None: from .backend import FileBackend registry.add("backend", FileBackend) diff --git a/mopidy/file/backend.py b/src/mopidy/file/backend.py similarity index 60% rename from mopidy/file/backend.py rename to src/mopidy/file/backend.py index 16cafde9dd..2f6d4072c9 100644 --- a/mopidy/file/backend.py +++ b/src/mopidy/file/backend.py @@ -1,17 +1,21 @@ import logging +from typing import ClassVar import pykka from mopidy import backend +from mopidy.audio import AudioProxy +from mopidy.config import Config from mopidy.file import library +from mopidy.types import UriScheme logger = logging.getLogger(__name__) class FileBackend(pykka.ThreadingActor, backend.Backend): - uri_schemes = ["file"] + uri_schemes: ClassVar[list[UriScheme]] = [UriScheme("file")] - def __init__(self, config, audio): + def __init__(self, config: Config, audio: AudioProxy) -> None: super().__init__() self.library = library.FileLibraryProvider(backend=self, config=config) self.playback = backend.PlaybackProvider(audio=audio, backend=self) diff --git a/mopidy/file/ext.conf b/src/mopidy/file/ext.conf similarity index 100% rename from mopidy/file/ext.conf rename to src/mopidy/file/ext.conf diff --git a/mopidy/file/library.py b/src/mopidy/file/library.py similarity index 65% rename from mopidy/file/library.py rename to src/mopidy/file/library.py index 36e488af8f..38b6523dd3 100644 --- a/mopidy/file/library.py +++ b/src/mopidy/file/library.py @@ -1,42 +1,49 @@ import logging import os +import pathlib +from collections.abc import Generator +from typing import Any, TypedDict, cast -from mopidy import backend, exceptions, models +from mopidy import backend, exceptions +from mopidy import config as config_lib from mopidy.audio import scan, tags +from mopidy.file import Extension +from mopidy.file.types import FileConfig from mopidy.internal import path +from mopidy.models import Ref, Track +from mopidy.types import Uri logger = logging.getLogger(__name__) +class MediaDir(TypedDict): + path: pathlib.Path + name: str + + class FileLibraryProvider(backend.LibraryProvider): """Library for browsing local files.""" # TODO: get_images that can pull from metadata and/or .folder.png etc? # TODO: handle playlists? - @property - def root_directory(self): - if not self._media_dirs: - return None - elif len(self._media_dirs) == 1: - uri = path.path_to_uri(self._media_dirs[0]["path"]) - else: - uri = "file:root" - return models.Ref.directory(name="Files", uri=uri) - - def __init__(self, backend, config): + def __init__(self, backend: backend.Backend, config: config_lib.Config) -> None: super().__init__(backend) + + ext_config = cast(FileConfig, config[Extension.ext_name]) + self._media_dirs = list(self._get_media_dirs(config)) - self._show_dotfiles = config["file"]["show_dotfiles"] + self._show_dotfiles = ext_config["show_dotfiles"] self._excluded_file_extensions = tuple( - file_ext.lower() - for file_ext in config["file"]["excluded_file_extensions"] + file_ext.lower() for file_ext in ext_config["excluded_file_extensions"] ) - self._follow_symlinks = config["file"]["follow_symlinks"] + self._follow_symlinks = ext_config["follow_symlinks"] + + self._scanner = scan.Scanner(timeout=ext_config["metadata_timeout"]) - self._scanner = scan.Scanner(timeout=config["file"]["metadata_timeout"]) + self.root_directory = self._get_root_directory() - def browse(self, uri): + def browse(self, uri) -> list[Ref]: # noqa: C901 logger.debug("Browsing files at: %s", uri) result = [] local_path = path.uri_to_path(uri) @@ -64,11 +71,11 @@ def browse(self, uri): if ( self._excluded_file_extensions - and dir_entry.suffix in self._excluded_file_extensions + and dir_entry.suffix.lower() in self._excluded_file_extensions ): continue - if child_path.is_symlink() and not self._follow_symlinks: + if dir_entry.is_symlink() and not self._follow_symlinks: logger.debug("Ignoring symlink: %s", uri) continue @@ -77,20 +84,18 @@ def browse(self, uri): continue if child_path.is_dir(): - result.append( - models.Ref.directory(name=dir_entry.name, uri=uri) - ) + result.append(Ref.directory(name=dir_entry.name, uri=uri)) elif child_path.is_file(): - result.append(models.Ref.track(name=dir_entry.name, uri=uri)) + result.append(Ref.track(name=dir_entry.name, uri=uri)) def order(item): - return (item.type != models.Ref.DIRECTORY, item.name) + return (item.type != Ref.DIRECTORY, item.name) result.sort(key=order) return result - def lookup(self, uri): + def lookup(self, uri: Uri) -> list[Track]: logger.debug("Looking up file URI: %s", uri) local_path = path.uri_to_path(uri) @@ -101,23 +106,30 @@ def lookup(self, uri): ) except exceptions.ScannerError as e: logger.warning("Failed looking up %s: %s", uri, e) - track = models.Track(uri=uri) + track = Track(uri=uri) if not track.name: track = track.replace(name=local_path.name) return [track] - def _get_media_dirs(self, config): + def _get_root_directory(self) -> Ref | None: + if not self._media_dirs: + return None + if len(self._media_dirs) == 1: + uri = path.path_to_uri(self._media_dirs[0]["path"]) + else: + uri = "file:root" + return Ref.directory(name="Files", uri=uri) + + def _get_media_dirs(self, config) -> Generator[MediaDir, Any, None]: for entry in config["file"]["media_dirs"]: - media_dir = {} media_dir_split = entry.split("|", 1) local_path = path.expand_path(media_dir_split[0]) if local_path is None: logger.debug( - "Failed expanding path (%s) from file/media_dirs config " - "value.", + "Failed expanding path (%s) from file/media_dirs config value.", media_dir_split[0], ) continue @@ -129,22 +141,21 @@ def _get_media_dirs(self, config): ) continue - media_dir["path"] = local_path if len(media_dir_split) == 2: - media_dir["name"] = media_dir_split[1] + name = media_dir_split[1] else: # TODO Mpd client should accept / in dir name - media_dir["name"] = media_dir_split[0].replace(os.sep, "+") + name = media_dir_split[0].replace(os.sep, "+") - yield media_dir + yield MediaDir(path=local_path, name=name) - def _get_media_dirs_refs(self): + def _get_media_dirs_refs(self) -> Generator[Ref, Any, None]: for media_dir in self._media_dirs: - yield models.Ref.directory( + yield Ref.directory( name=media_dir["name"], uri=path.path_to_uri(media_dir["path"]) ) - def _is_in_basedir(self, local_path): + def _is_in_basedir(self, local_path) -> bool: return any( path.is_path_inside_base_dir(local_path, media_dir["path"]) for media_dir in self._media_dirs diff --git a/src/mopidy/file/types.py b/src/mopidy/file/types.py new file mode 100644 index 0000000000..97781cb7bf --- /dev/null +++ b/src/mopidy/file/types.py @@ -0,0 +1,11 @@ +from __future__ import annotations + +from typing import TypedDict + + +class FileConfig(TypedDict): + media_dirs: list[str] + excluded_file_extensions: list[str] + show_dotfiles: bool + follow_symlinks: bool + metadata_timeout: int diff --git a/src/mopidy/http/__init__.py b/src/mopidy/http/__init__.py new file mode 100644 index 0000000000..86bf3f8eab --- /dev/null +++ b/src/mopidy/http/__init__.py @@ -0,0 +1,59 @@ +import logging +from pathlib import Path +from typing import cast + +import mopidy +from mopidy import config, exceptions, ext +from mopidy.config import ConfigSchema + +logger = logging.getLogger(__name__) + + +class Extension(ext.Extension): + dist_name = "Mopidy-HTTP" + ext_name = "http" + version = mopidy.__version__ + + def get_default_config(self) -> str: + return config.read(Path(__file__).parent / "ext.conf") + + def get_config_schema(self) -> ConfigSchema: + schema = super().get_config_schema() + schema["hostname"] = config.Hostname() + schema["port"] = config.Port() + schema["static_dir"] = config.Deprecated() + schema["zeroconf"] = config.String(optional=True) + schema["allowed_origins"] = config.List( + optional=True, + unique=True, + subtype=config.String(transformer=lambda x: x.lower()), + ) + schema["csrf_protection"] = config.Boolean(optional=True) + schema["default_app"] = config.String(optional=True) + return schema + + def validate_environment(self) -> None: + try: + import tornado.web # noqa: F401 (Imported to test if available) + except ImportError as exc: + raise exceptions.ExtensionError("tornado library not found") from exc + + def setup(self, registry: ext.Registry) -> None: + from .actor import HttpFrontend + from .handlers import make_mopidy_app_factory + from .types import HttpApp, HttpStatic + + HttpFrontend.apps = cast(list[HttpApp], registry["http:app"]) + HttpFrontend.statics = cast(list[HttpStatic], registry["http:static"]) + + registry.add("frontend", HttpFrontend) + registry.add( + "http:app", + { + "name": "mopidy", + "factory": make_mopidy_app_factory( + apps=cast(list[HttpApp], registry["http:app"]), + statics=cast(list[HttpStatic], registry["http:static"]), + ), + }, + ) diff --git a/mopidy/http/actor.py b/src/mopidy/http/actor.py similarity index 75% rename from mopidy/http/actor.py rename to src/mopidy/http/actor.py index 40fbf52420..f472efbf3e 100644 --- a/mopidy/http/actor.py +++ b/src/mopidy/http/actor.py @@ -1,10 +1,12 @@ from __future__ import annotations +import asyncio import json import logging import secrets +import socket import threading -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any, ClassVar import pykka import tornado.httpserver @@ -19,22 +21,19 @@ from mopidy.internal import formatting, network if TYPE_CHECKING: - from typing import Any, ClassVar, List, Type - -try: - import asyncio -except ImportError: - asyncio = None # type: ignore + from mopidy.core.actor import CoreProxy + from mopidy.ext import Config + from mopidy.http.types import HttpApp, HttpStatic, RequestRule logger = logging.getLogger(__name__) class HttpFrontend(pykka.ThreadingActor, CoreListener): - apps: ClassVar[List[Type[Any]]] = [] - statics: ClassVar[List[Type[Any]]] = [] + apps: ClassVar[list[HttpApp]] = [] + statics: ClassVar[list[HttpStatic]] = [] - def __init__(self, config, core): + def __init__(self, config: Config, core: CoreProxy) -> None: super().__init__() self.hostname = network.format_hostname(config["http"]["hostname"]) @@ -54,13 +53,13 @@ def __init__(self, config, core): statics=self.statics, ) except OSError as exc: - raise exceptions.FrontendError(f"HTTP server startup failed: {exc}") + raise exceptions.FrontendError("HTTP server startup failed.") from exc self.zeroconf_name = config["http"]["zeroconf"] self.zeroconf_http = None self.zeroconf_mopidy_http = None - def on_start(self): + def on_start(self) -> None: logger.info("HTTP server running at [%s]:%s", self.hostname, self.port) self.server.start() @@ -76,7 +75,7 @@ def on_start(self): self.zeroconf_http.publish() self.zeroconf_mopidy_http.publish() - def on_stop(self): + def on_stop(self) -> None: if self.zeroconf_http: self.zeroconf_http.unpublish() if self.zeroconf_mopidy_http: @@ -84,11 +83,12 @@ def on_stop(self): self.server.stop() - def on_event(self, name, **data): - on_event(name, self.server.io_loop, **data) + def on_event(self, event: str, **data: Any) -> None: + assert self.server.io_loop + on_event(event, self.server.io_loop, **data) -def on_event(name, io_loop, **data): +def on_event(name: str, io_loop: tornado.ioloop.IOLoop, **data: Any) -> None: event = data event["event"] = name message = json.dumps(event, cls=models.ModelJSONEncoder) @@ -98,7 +98,14 @@ def on_event(name, io_loop, **data): class HttpServer(threading.Thread): name = "HttpServer" - def __init__(self, config, core, sockets, apps, statics): + def __init__( # noqa: PLR0913 + self, + config: Config, + core: CoreProxy, + sockets: list[socket.socket], + apps: list[HttpApp], + statics: list[HttpStatic], + ) -> None: super().__init__() self.config = config @@ -111,15 +118,13 @@ def __init__(self, config, core, sockets, apps, statics): self.server = None self.io_loop = None - def run(self): - if asyncio: - # If asyncio is available, Tornado uses it as its IO loop. Since we - # start Tornado in a another thread than the main thread, we must - # explicitly create an asyncio loop for the current thread. - asyncio.set_event_loop(asyncio.new_event_loop()) + def run(self) -> None: + # Since we start Tornado in a another thread than the main thread, + # we must explicitly create an asyncio loop for the current thread. + asyncio.set_event_loop(asyncio.new_event_loop()) self.app = tornado.web.Application( - self._get_request_handlers(), + self._get_request_handlers(), # pyright: ignore[reportArgumentType] cookie_secret=self._get_cookie_secret(), ) self.server = tornado.httpserver.HTTPServer(self.app) @@ -130,11 +135,12 @@ def run(self): logger.debug("Stopped HTTP server") - def stop(self): + def stop(self) -> None: logger.debug("Stopping HTTP server") + assert self.io_loop self.io_loop.add_callback(self.io_loop.stop) - def _get_request_handlers(self): + def _get_request_handlers(self) -> list[RequestRule]: request_handlers = [] request_handlers.extend(self._get_app_request_handlers()) request_handlers.extend(self._get_static_request_handlers()) @@ -144,15 +150,14 @@ def _get_request_handlers(self): "HTTP routes from extensions: %s", formatting.indent( "\n".join( - f"{path!r}: {handler!r}" - for (path, handler, *_) in request_handlers + f"{path!r}: {handler!r}" for (path, handler, *_) in request_handlers ) ), ) return request_handlers - def _get_app_request_handlers(self): + def _get_app_request_handlers(self) -> list[RequestRule]: result = [] for app in self.apps: try: @@ -169,7 +174,7 @@ def _get_app_request_handlers(self): logger.debug("Loaded HTTP extension: %s", app["name"]) return result - def _get_static_request_handlers(self): + def _get_static_request_handlers(self) -> list[RequestRule]: result = [] for static in self.statics: result.append((f"/{static['name']}", handlers.AddSlashHandler)) @@ -183,14 +188,12 @@ def _get_static_request_handlers(self): logger.debug("Loaded static HTTP extension: %s", static["name"]) return result - def _get_default_request_handlers(self): + def _get_default_request_handlers(self) -> list[RequestRule]: sites = [app["name"] for app in self.apps + self.statics] default_app = self.config["http"]["default_app"] if default_app not in sites: - logger.warning( - f"HTTP server's default app {default_app!r} not found" - ) + logger.warning(f"HTTP server's default app {default_app!r} not found") default_app = "mopidy" logger.debug(f"Default webclient is {default_app}") @@ -202,7 +205,7 @@ def _get_default_request_handlers(self): ) ] - def _get_cookie_secret(self): + def _get_cookie_secret(self) -> str: file_path = Extension.get_data_dir(self.config) / "cookie_secret" if not file_path.is_file(): cookie_secret = secrets.token_hex(32) diff --git a/mopidy/http/data/clients.html b/src/mopidy/http/data/clients.html similarity index 100% rename from mopidy/http/data/clients.html rename to src/mopidy/http/data/clients.html diff --git a/mopidy/http/data/favicon.ico b/src/mopidy/http/data/favicon.ico similarity index 100% rename from mopidy/http/data/favicon.ico rename to src/mopidy/http/data/favicon.ico diff --git a/mopidy/http/data/mopidy.css b/src/mopidy/http/data/mopidy.css similarity index 100% rename from mopidy/http/data/mopidy.css rename to src/mopidy/http/data/mopidy.css diff --git a/mopidy/http/ext.conf b/src/mopidy/http/ext.conf similarity index 100% rename from mopidy/http/ext.conf rename to src/mopidy/http/ext.conf diff --git a/mopidy/http/handlers.py b/src/mopidy/http/handlers.py similarity index 58% rename from mopidy/http/handlers.py rename to src/mopidy/http/handlers.py index 9a71a70d62..8f70e348d2 100644 --- a/mopidy/http/handlers.py +++ b/src/mopidy/http/handlers.py @@ -2,9 +2,10 @@ import functools import logging -import os -import urllib -from typing import TYPE_CHECKING +import urllib.parse +from collections.abc import Callable +from pathlib import Path +from typing import TYPE_CHECKING, Any, cast import tornado.escape import tornado.ioloop @@ -13,32 +14,39 @@ import mopidy from mopidy import core, models +from mopidy.http.types import HttpConfig from mopidy.internal import jsonrpc if TYPE_CHECKING: - from typing import ClassVar, Set + from collections.abc import Awaitable + from typing import ClassVar + + from mopidy.core.actor import CoreProxy + from mopidy.ext import Config + from mopidy.http.types import HttpApp, HttpStatic, RequestRule logger = logging.getLogger(__name__) -def make_mopidy_app_factory(apps, statics): - def mopidy_app_factory(config, core): - if not config["http"]["csrf_protection"]: - logger.warning( - "HTTP Cross-Site Request Forgery protection is disabled" - ) - allowed_origins = { - x.lower() for x in config["http"]["allowed_origins"] if x - } +def make_mopidy_app_factory( + *, + apps: list[HttpApp], + statics: list[HttpStatic], +) -> Callable[[Config, CoreProxy], list[RequestRule]]: + def mopidy_app_factory(config: Config, core: CoreProxy) -> list[RequestRule]: + http_config = cast(HttpConfig, config["http"]) + if not http_config["csrf_protection"]: + logger.warning("HTTP Cross-Site Request Forgery protection is disabled") + return [ ( r"/ws/?", WebSocketHandler, { "core": core, - "allowed_origins": allowed_origins, - "csrf_protection": config["http"]["csrf_protection"], + "allowed_origins": http_config["allowed_origins"], + "csrf_protection": http_config["csrf_protection"], }, ), ( @@ -46,22 +54,31 @@ def mopidy_app_factory(config, core): JsonRpcHandler, { "core": core, - "allowed_origins": allowed_origins, - "csrf_protection": config["http"]["csrf_protection"], + "allowed_origins": http_config["allowed_origins"], + "csrf_protection": http_config["csrf_protection"], }, ), ( r"/(.+)", StaticFileHandler, - {"path": os.path.join(os.path.dirname(__file__), "data")}, + { + "path": str(Path(__file__).parent / "data"), + }, + ), + ( + r"/", + ClientListHandler, + { + "apps": apps, + "statics": statics, + }, ), - (r"/", ClientListHandler, {"apps": apps, "statics": statics}), ] return mopidy_app_factory -def make_jsonrpc_wrapper(core_actor): +def make_jsonrpc_wrapper(core_actor: CoreProxy) -> jsonrpc.JsonRpcWrapper: inspector = jsonrpc.JsonRpcInspector( objects={ "core.get_uri_schemes": core.Core.get_uri_schemes, @@ -91,7 +108,10 @@ def make_jsonrpc_wrapper(core_actor): ) -def _send_broadcast(client, msg): +def _send_broadcast( + client: WebSocketHandler, + msg: bytes | str | dict[str, Any], +) -> None: # We could check for client.ws_connection, but we don't really # care why the broadcast failed, we just want the rest of them # to succeed, so catch everything. @@ -106,39 +126,43 @@ def _send_broadcast(client, msg): class WebSocketHandler(tornado.websocket.WebSocketHandler): - # XXX This set is shared by all WebSocketHandler objects. This isn't # optimal, but there's currently no use case for having more than one of # these anyway. - clients: ClassVar[Set[WebSocketHandler]] = set() + clients: ClassVar[set[WebSocketHandler]] = set() @classmethod - def broadcast(cls, msg, io_loop): + def broadcast( + cls, + msg: bytes | str | dict[str, Any], + io_loop: tornado.ioloop.IOLoop, + ) -> None: # This can be called from outside the Tornado ioloop, so we need to # safely cross the thread boundary by adding a callback to the loop. for client in cls.clients.copy(): # One callback per client to keep time we hold up the loop short - io_loop.add_callback( - functools.partial(_send_broadcast, client, msg) - ) - - def initialize(self, core, allowed_origins, csrf_protection): + io_loop.add_callback(functools.partial(_send_broadcast, client, msg)) + + def initialize( + self, + core: CoreProxy, + allowed_origins: set[str], + csrf_protection: bool | None, + ) -> None: self.jsonrpc = make_jsonrpc_wrapper(core) self.allowed_origins = allowed_origins self.csrf_protection = csrf_protection - def open(self): + def open(self, *_args: str, **_kwargs: str) -> Awaitable[None] | None: self.set_nodelay(True) self.clients.add(self) logger.debug("New WebSocket connection from %s", self.request.remote_ip) - def on_close(self): + def on_close(self) -> None: self.clients.discard(self) - logger.debug( - "Closed WebSocket connection from %s", self.request.remote_ip - ) + logger.debug("Closed WebSocket connection from %s", self.request.remote_ip) - def on_message(self, message): + def on_message(self, message: str | bytes) -> Awaitable[None] | None: if not message: return @@ -149,9 +173,7 @@ def on_message(self, message): ) try: - response = self.jsonrpc.handle_json( - tornado.escape.native_str(message) - ) + response = self.jsonrpc.handle_json(tornado.escape.native_str(message)) if response and self.write_message(response): logger.debug( "Sent WebSocket message to %s: %r", @@ -162,61 +184,77 @@ def on_message(self, message): logger.error(f"WebSocket request error: {exc}") self.close() - def check_origin(self, origin): + def check_origin(self, origin: str) -> bool: if not self.csrf_protection: return True return check_origin(origin, self.request.headers, self.allowed_origins) -def set_mopidy_headers(request_handler): +def set_mopidy_headers(request_handler: tornado.web.RequestHandler) -> None: request_handler.set_header("Cache-Control", "no-cache") request_handler.set_header("X-Mopidy-Version", mopidy.__version__.encode()) -def check_origin(origin, request_headers, allowed_origins): +def check_origin( + origin: str | None, + request_headers: tornado.httputil.HTTPHeaders, + allowed_origins: set[str], +) -> bool: if origin is None: logger.warning("HTTP request denied for missing Origin header") return False - allowed_origins.add(request_headers.get("Host")) + host_header = request_headers.get("Host") parsed_origin = urllib.parse.urlparse(origin).netloc.lower() # Some frameworks (e.g. Apache Cordova) use local files. Requests from # these files don't really have a sensible Origin so the browser sets the # header to something like 'file://' or 'null'. This results here in an # empty parsed_origin which we choose to allow. - if parsed_origin and parsed_origin not in allowed_origins: + if parsed_origin and parsed_origin not in allowed_origins | {host_header}: logger.warning('HTTP request denied for Origin "%s"', origin) return False return True class JsonRpcHandler(tornado.web.RequestHandler): - def initialize(self, core, allowed_origins, csrf_protection): + def initialize( + self, + core: CoreProxy, + allowed_origins: set[str], + csrf_protection: bool | None, + ) -> None: self.jsonrpc = make_jsonrpc_wrapper(core) self.allowed_origins = allowed_origins self.csrf_protection = csrf_protection - def head(self): + def head(self) -> Awaitable[None] | None: self.set_extra_headers() self.finish() - def post(self): + def post(self) -> Awaitable[None] | None: if self.csrf_protection: + # This "non-standard" Content-Type requirement forces browsers to + # automatically issue a preflight OPTIONS request before this one. + # All Origin header enforcement/checking can be limited to our OPTIONS + # handler and requests not vulnerable to CSRF (i.e. non-browser + # requests) need only set the Content-Type header. content_type = ( - self.request.headers.get("Content-Type", "") - .split(";")[0] - .strip() + self.request.headers.get("Content-Type", "").split(";")[0].strip() ) if content_type != "application/json": self.set_status(415, "Content-Type must be application/json") return + origin = self.request.headers.get("Origin") + if origin is not None: + # This request came from a browser and has already had its Origin + # checked in the preflight request. + self.set_cors_headers(origin) + data = self.request.body if not data: return - logger.debug( - "Received RPC message from %s: %r", self.request.remote_ip, data - ) + logger.debug("Received RPC message from %s: %r", self.request.remote_ip, data) try: self.set_extra_headers() @@ -231,36 +269,38 @@ def post(self): logger.error("HTTP JSON-RPC request error: %s", e) self.write_error(500) - def set_extra_headers(self): + def set_extra_headers(self) -> None: set_mopidy_headers(self) self.set_header("Accept", "application/json") self.set_header("Content-Type", "application/json; utf-8") - def options(self): + def set_cors_headers(self, origin: str) -> None: + self.set_header("Access-Control-Allow-Origin", f"{origin}") + self.set_header("Access-Control-Allow-Headers", "Content-Type") + + def options(self) -> Awaitable[None] | None: if self.csrf_protection: - origin = self.request.headers.get("Origin") - if not check_origin( - origin, self.request.headers, self.allowed_origins - ): + origin = cast(str | None, self.request.headers.get("Origin")) + if not check_origin(origin, self.request.headers, self.allowed_origins): self.set_status(403, f"Access denied for origin {origin}") return - self.set_header("Access-Control-Allow-Origin", f"{origin}") - self.set_header("Access-Control-Allow-Headers", "Content-Type") + assert origin + self.set_cors_headers(origin) self.set_status(204) self.finish() class ClientListHandler(tornado.web.RequestHandler): - def initialize(self, apps, statics): + def initialize(self, apps: list[HttpApp], statics: list[HttpStatic]) -> None: self.apps = apps self.statics = statics - def get_template_path(self): - return os.path.dirname(__file__) + def get_template_path(self) -> str: + return str(Path(__file__).parent) - def get(self): + def get(self) -> Awaitable[None] | None: set_mopidy_headers(self) names = set() @@ -270,15 +310,18 @@ def get(self): names.add(static["name"]) names.discard("mopidy") - self.render("data/clients.html", apps=sorted(list(names))) + self.render("data/clients.html", apps=sorted(names)) class StaticFileHandler(tornado.web.StaticFileHandler): - def set_extra_headers(self, path): + def set_extra_headers( + self, + path: str, # noqa: ARG002 + ) -> None: set_mopidy_headers(self) class AddSlashHandler(tornado.web.RequestHandler): @tornado.web.addslash - def prepare(self): + def prepare(self) -> Awaitable[None] | None: return super().prepare() diff --git a/src/mopidy/http/types.py b/src/mopidy/http/types.py new file mode 100644 index 0000000000..5496186589 --- /dev/null +++ b/src/mopidy/http/types.py @@ -0,0 +1,35 @@ +from __future__ import annotations + +from collections.abc import Callable +from os import PathLike +from typing import TYPE_CHECKING, Any, TypedDict + +import tornado.web + +if TYPE_CHECKING: + from typing import TypeAlias + + from mopidy.core.actor import CoreProxy + from mopidy.ext import Config + + +RequestRule: TypeAlias = tuple[str, type[tornado.web.RequestHandler], dict[str, Any]] + + +class HttpConfig(TypedDict): + hostname: str + port: int + zeroconf: str | None + allowed_origins: list[str] + csrf_protection: bool | None + default_app: str | None + + +class HttpApp(TypedDict): + name: str + factory: Callable[[Config, CoreProxy], list[RequestRule]] + + +class HttpStatic(TypedDict): + name: str + path: str | PathLike[str] diff --git a/mopidy/httpclient.py b/src/mopidy/httpclient.py similarity index 76% rename from mopidy/httpclient.py rename to src/mopidy/httpclient.py index b3316ddc8d..465106abbe 100644 --- a/mopidy/httpclient.py +++ b/src/mopidy/httpclient.py @@ -1,11 +1,17 @@ +"""Helpers for configuring HTTP clients used in Mopidy extensions.""" + +from __future__ import annotations + import platform +from typing import TYPE_CHECKING import mopidy -"Helpers for configuring HTTP clients used in Mopidy extensions." +if TYPE_CHECKING: + from mopidy.config import ProxyConfig -def format_proxy(proxy_config, auth=True): +def format_proxy(proxy_config: ProxyConfig, auth: bool = True) -> str | None: """Convert a Mopidy proxy config to the commonly used proxy string format. Outputs ``scheme://host:port``, ``scheme://user:pass@host:port`` or @@ -29,11 +35,11 @@ def format_proxy(proxy_config, auth=True): if username and password and auth: return f"{scheme}://{username}:{password}@{hostname}:{port}" - else: - return f"{scheme}://{hostname}:{port}" + + return f"{scheme}://{hostname}:{port}" -def format_user_agent(name=None): +def format_user_agent(name: str | None = None) -> str: """Construct a User-Agent suitable for use in client code. This will identify use by the provided ``name`` (which should be on the diff --git a/mopidy/internal/__init__.py b/src/mopidy/internal/__init__.py similarity index 100% rename from mopidy/internal/__init__.py rename to src/mopidy/internal/__init__.py diff --git a/mopidy/internal/deprecation.py b/src/mopidy/internal/deprecation.py similarity index 74% rename from mopidy/internal/deprecation.py rename to src/mopidy/internal/deprecation.py index 28d4a01e0d..04a6be1e49 100644 --- a/mopidy/internal/deprecation.py +++ b/src/mopidy/internal/deprecation.py @@ -5,18 +5,12 @@ # Messages used in deprecation warnings are collected here so we can target # them easily when ignoring warnings. _MESSAGES = { - # Deprecated features in core playback: - "core.playback.play:tl_track_kwargs": ( - 'playback.play() with "tl_track" argument is pending deprecation use ' - '"tlid" instead' - ), # Deprecated features in core tracklist: "core.tracklist.add:tracks_arg": ( 'tracklist.add() "tracks" argument is deprecated' ), "core.tracklist.eot_track": ( - "tracklist.eot_track() is pending deprecation, use " - "tracklist.get_eot_tlid()" + "tracklist.eot_track() is pending deprecation, use tracklist.get_eot_tlid()" ), "core.tracklist.next_track": ( "tracklist.next_track() is pending deprecation, use " @@ -35,11 +29,8 @@ def warn(msg_id, pending=False): - if pending: - category = PendingDeprecationWarning - else: - category = DeprecationWarning - warnings.warn(_MESSAGES.get(msg_id, msg_id), category) + category = PendingDeprecationWarning if pending else DeprecationWarning + warnings.warn(_MESSAGES.get(msg_id, msg_id), category, stacklevel=2) @contextlib.contextmanager diff --git a/src/mopidy/internal/deps.py b/src/mopidy/internal/deps.py new file mode 100644 index 0000000000..4eec08d353 --- /dev/null +++ b/src/mopidy/internal/deps.py @@ -0,0 +1,204 @@ +from __future__ import annotations + +import platform +import re +import sys +from dataclasses import dataclass, field +from importlib import metadata +from os import PathLike +from pathlib import Path + +from mopidy.internal import formatting +from mopidy.internal.gi import Gst, gi + + +@dataclass +class DepInfo: + name: str + version: str | None = None + path: PathLike[str] | None = None + dependencies: list[DepInfo] = field(default_factory=list) + other: str | None = None + + +def format_dependency_list(dependencies: list[DepInfo] | None = None) -> str: + if dependencies is None: + seen_pkgs = set() + ext_pkg_names = { + ext_pkg_name + for ep in metadata.entry_points(group="mopidy.ext") + if ep.dist is not None + and (ext_pkg_name := ep.dist.name.lower()) + and ext_pkg_name != "mopidy" + } + dependencies = [ + executable_info(), + platform_info(), + python_info(), + pkg_info( + pkg_name="mopidy", + seen_pkgs=seen_pkgs, + ), + *[ + pkg_info( + pkg_name=pkg_name, + seen_pkgs=seen_pkgs, + ) + for pkg_name in ext_pkg_names + ], + gstreamer_info(), + ] + return "\n".join(_format_dependency(dep) for dep in dependencies) + + +def _format_dependency(dep: DepInfo) -> str: + lines = [] + + if dep.version is None: + lines.append(f"{dep.name}: not found") + else: + source = f" from {dep.path}" if dep.path else "" + lines.append(f"{dep.name}: {dep.version}{source}") + + if dep.other: + details = formatting.indent(dep.other, places=4) + lines.append(f" Detailed information: {details}") + + for sub_dep in dep.dependencies: + sub_dep_lines = _format_dependency(sub_dep) + lines.append(formatting.indent(sub_dep_lines, places=2, singles=True)) + + return "\n".join(lines) + + +def executable_info() -> DepInfo: + return DepInfo( + name="Executable", + version=sys.argv[0], + ) + + +def platform_info() -> DepInfo: + return DepInfo( + name="Platform", + version=platform.platform(), + ) + + +def python_info() -> DepInfo: + return DepInfo( + name="Python", + version=f"{platform.python_implementation()} {platform.python_version()}", + path=Path(platform.__file__).parent, + ) + + +def pkg_info( + *, + pkg_name: str, + depth: int = 0, + seen_pkgs: set[str], +) -> DepInfo: + try: + dependencies = [] + distribution = metadata.distribution(pkg_name) + if distribution.requires: + for raw in distribution.requires: + if "extra" in raw: + continue + if match := re.match("[a-zA-Z0-9_-]+", raw): + name = match.group(0).lower() + if depth > 0 and name in seen_pkgs: + continue + seen_pkgs.add(name) + dependencies.append( + pkg_info( + pkg_name=name, + depth=depth + 1, + seen_pkgs=seen_pkgs, + ) + ) + return DepInfo( + name=pkg_name, + version=distribution.version, + path=distribution.locate_file("."), + dependencies=dependencies, + ) + except metadata.PackageNotFoundError: + return DepInfo( + name=pkg_name, + ) + + +def gstreamer_info() -> DepInfo: + other: list[str] = [] + other.append(f"Python wrapper: python-gi {gi.__version__}") + + found_elements = [] + missing_elements = [] + for name, status in _gstreamer_check_elements(): + if status: + found_elements.append(name) + else: + missing_elements.append(name) + + other.append("Relevant elements:") + other.append(" Found:") + for element in found_elements: + other.append(f" {element}") + if not found_elements: + other.append(" none") + other.append(" Not found:") + for element in missing_elements: + other.append(f" {element}") + if not missing_elements: + other.append(" none") + + return DepInfo( + name="GStreamer", + version=".".join(map(str, Gst.version())), + path=Path(gi.__file__).parent, + other="\n".join(other), + ) + + +def _gstreamer_check_elements(): + elements_to_check = [ + # Core playback + "uridecodebin", + # External HTTP streams + "souphttpsrc", + # Audio sinks + "alsasink", + "osssink", + "oss4sink", + "pulsesink", + # MP3 encoding and decoding + # + # One of flump3dec, mad, and mpg123audiodec is required for MP3 + # playback. + "flump3dec", + "id3demux", + "id3v2mux", + "lamemp3enc", + "mad", + "mpegaudioparse", + "mpg123audiodec", + # Ogg Vorbis encoding and decoding + "vorbisdec", + "vorbisenc", + "vorbisparse", + "oggdemux", + "oggmux", + "oggparse", + # Flac decoding + "flacdec", + "flacparse", + # Shoutcast output + "shout2send", + ] + known_elements = [ + factory.get_name() + for factory in Gst.Registry.get().get_feature_list(Gst.ElementFactory) + ] + return [(element, element in known_elements) for element in elements_to_check] diff --git a/mopidy/internal/formatting.py b/src/mopidy/internal/formatting.py similarity index 100% rename from mopidy/internal/formatting.py rename to src/mopidy/internal/formatting.py diff --git a/mopidy/internal/gi.py b/src/mopidy/internal/gi.py similarity index 82% rename from mopidy/internal/gi.py rename to src/mopidy/internal/gi.py index c569d73293..91b78e5046 100644 --- a/mopidy/internal/gi.py +++ b/src/mopidy/internal/gi.py @@ -1,13 +1,15 @@ +# pyright: reportMissingModuleSource=false + import sys import textwrap -try: - import gi +import gi +try: gi.require_version("Gst", "1.0") - from gi.repository import GLib, GObject, Gst -except ImportError: - print( + gi.require_version("GstPbutils", "1.0") +except ValueError: + print( # noqa: T201 textwrap.dedent( """ ERROR: A GObject based library was not found. @@ -22,15 +24,14 @@ ) ) raise -else: - Gst.init([]) - gi.require_version("GstPbutils", "1.0") - from gi.repository import GstPbutils +from gi.repository import GLib, GObject, Gst, GstPbutils + +Gst.init([]) GLib.set_prgname("mopidy") GLib.set_application_name("Mopidy") -REQUIRED_GST_VERSION = (1, 14, 0) +REQUIRED_GST_VERSION = (1, 22, 0) REQUIRED_GST_VERSION_DISPLAY = ".".join(map(str, REQUIRED_GST_VERSION)) if Gst.version() < REQUIRED_GST_VERSION: diff --git a/mopidy/internal/http.py b/src/mopidy/internal/http.py similarity index 79% rename from mopidy/internal/http.py rename to src/mopidy/internal/http.py index 2cedcbe164..44df5c0765 100644 --- a/mopidy/internal/http.py +++ b/src/mopidy/internal/http.py @@ -1,19 +1,29 @@ +from __future__ import annotations + import logging import time +from typing import TYPE_CHECKING import requests from mopidy import httpclient -logger = logging.getLogger(__name__) +if TYPE_CHECKING: + from mopidy.httpclient import ProxyConfig +logger = logging.getLogger(__name__) -def get_requests_session(proxy_config, user_agent): - proxy = httpclient.format_proxy(proxy_config) - full_user_agent = httpclient.format_user_agent(user_agent) +def get_requests_session( + proxy_config: ProxyConfig, + user_agent: str, +) -> requests.Session: session = requests.Session() - session.proxies.update({"http": proxy, "https": proxy}) + + if proxy := httpclient.format_proxy(proxy_config): + session.proxies.update({"http": proxy, "https": proxy}) + + full_user_agent = httpclient.format_user_agent(user_agent) session.headers.update({"user-agent": full_user_agent}) return session @@ -24,7 +34,7 @@ def download(session, uri, timeout=1.0, chunk_size=4096): response = session.get(uri, stream=True, timeout=timeout) except requests.exceptions.Timeout: logger.warning( - "Download of %r failed due to connection timeout after " "%.3fs", + "Download of %r failed due to connection timeout after %.3fs", uri, timeout, ) @@ -43,8 +53,7 @@ def download(session, uri, timeout=1.0, chunk_size=4096): content.append(chunk) if time.time() > deadline: logger.warning( - "Download of %r failed due to download taking more " - "than %.3fs", + "Download of %r failed due to download taking more than %.3fs", uri, timeout, ) diff --git a/mopidy/internal/jsonrpc.py b/src/mopidy/internal/jsonrpc.py similarity index 62% rename from mopidy/internal/jsonrpc.py rename to src/mopidy/internal/jsonrpc.py index c20162a26e..aee3a2aedc 100644 --- a/mopidy/internal/jsonrpc.py +++ b/src/mopidy/internal/jsonrpc.py @@ -1,12 +1,61 @@ import inspect import json import traceback +from collections.abc import Callable +from typing import Any, Literal, TypeAlias, TypedDict, TypeVar import pykka +T = TypeVar("T") -class JsonRpcWrapper: +class JsonRpcNotification(TypedDict): + jsonrpc: Literal["2.0"] + method: str + params: list[Any] | dict[str, Any] + + +JsonRpcRequestId: TypeAlias = str | int | float + + +class JsonRpcRequest(JsonRpcNotification): + id: JsonRpcRequestId | None + + +class JsonRpcErrorDetails(TypedDict, total=False): + code: int + message: str + data: Any | None + + +class JsonRpcErrorResponse(TypedDict): + jsonrpc: Literal["2.0"] + id: JsonRpcRequestId | None + error: JsonRpcErrorDetails + + +class JsonRpcSuccessResponse(TypedDict): + jsonrpc: Literal["2.0"] + id: JsonRpcRequestId + result: Any + + +JsonRpcResponse: TypeAlias = JsonRpcErrorResponse | JsonRpcSuccessResponse + + +class JsonRpcParamDescription(TypedDict, total=False): + name: str + default: Any + varargs: bool + kwargs: bool + + +class JsonRpcMethodDescription(TypedDict): + description: str | None + params: list[JsonRpcParamDescription] + + +class JsonRpcWrapper: """ Wrap objects and make them accessible through JSON-RPC 2.0 messaging. @@ -52,28 +101,31 @@ class instances method :meth:`default` implemented """ - def __init__(self, objects, decoders=None, encoders=None): - if "" in objects.keys(): - raise AttributeError( - "The empty string is not allowed as an object mount" - ) + def __init__( + self, + objects: dict[str, Any], + decoders: list[Callable[[dict[Any, Any]], Any]] | None = None, + encoders: list[type[json.JSONEncoder]] | None = None, + ): + if "" in objects: + raise AttributeError("The empty string is not allowed as an object mount") self.objects = objects self.decoder = get_combined_json_decoder(decoders or []) self.encoder = get_combined_json_encoder(encoders or []) - def handle_json(self, request): + def handle_json(self, request_json: str) -> str | None: """ Handles an incoming request encoded as a JSON string. Returns a response as a JSON string for commands, and :class:`None` for notifications. - :param request: the serialized JSON-RPC request - :type request: string + :param request_json: the serialized JSON-RPC request + :type request_json: string :rtype: string or :class:`None` """ try: - request = json.loads(request, object_hook=self.decoder) + request: JsonRpcRequest = json.loads(request_json, object_hook=self.decoder) except ValueError: response = JsonRpcParseError().get_response() else: @@ -82,7 +134,10 @@ def handle_json(self, request): return None return json.dumps(response, cls=self.encoder) - def handle_data(self, request): + def handle_data( + self, + request: JsonRpcRequest | list[JsonRpcRequest], + ) -> JsonRpcResponse | list[JsonRpcResponse] | None: """ Handles an incoming request in the form of a Python data structure. @@ -95,16 +150,18 @@ def handle_data(self, request): """ if isinstance(request, list): return self._handle_batch(request) - else: - return self._handle_single_request(request) + return self._handle_single_request(request) - def _handle_batch(self, requests): + def _handle_batch( + self, + requests: list[JsonRpcRequest], + ) -> JsonRpcErrorResponse | list[JsonRpcResponse] | None: if not requests: return JsonRpcInvalidRequestError( data="Batch list cannot be empty" ).get_response() - responses = [] + responses: list[JsonRpcResponse] = [] for request in requests: response = self._handle_single_request(request) if response: @@ -112,12 +169,12 @@ def _handle_batch(self, requests): return responses or None - def _handle_single_request(self, request): + def _handle_single_request(self, request: JsonRpcRequest) -> JsonRpcResponse | None: try: self._validate_request(request) args, kwargs = self._get_params(request) - except JsonRpcInvalidRequestError as error: - return error.get_response() + except JsonRpcInvalidRequestError as exc: + return exc.get_response() try: method = self._get_method(request["method"]) @@ -125,69 +182,63 @@ def _handle_single_request(self, request): try: result = method(*args, **kwargs) - if self._is_notification(request): + if "id" not in request or request["id"] is None: + # Request is a notification, so we don't need to respond return None result = self._unwrap_result(result) - return { "jsonrpc": "2.0", "id": request["id"], "result": result, } - except TypeError as error: + except TypeError as exc: raise JsonRpcInvalidParamsError( data={ - "type": error.__class__.__name__, - "message": str(error), + "type": exc.__class__.__name__, + "message": str(exc), "traceback": traceback.format_exc(), } - ) - except Exception as error: + ) from exc + except Exception as exc: raise JsonRpcApplicationError( data={ - "type": error.__class__.__name__, - "message": str(error), + "type": exc.__class__.__name__, + "message": str(exc), "traceback": traceback.format_exc(), } - ) - except JsonRpcError as error: - if self._is_notification(request): + ) from exc + except JsonRpcError as exc: + if "id" not in request or request["id"] is None: + # Request is a notification, so we don't need to respond return None - return error.get_response(request["id"]) + return exc.get_response(request["id"]) - def _validate_request(self, request): - if not isinstance(request, dict): + def _validate_request(self, request: JsonRpcRequest) -> None: + if not isinstance(request, dict): # pyright: ignore[reportUnnecessaryIsInstance] raise JsonRpcInvalidRequestError(data="Request must be an object") if "jsonrpc" not in request: - raise JsonRpcInvalidRequestError( - data="'jsonrpc' member must be included" - ) + raise JsonRpcInvalidRequestError(data="'jsonrpc' member must be included") if request["jsonrpc"] != "2.0": - raise JsonRpcInvalidRequestError( - data="'jsonrpc' value must be '2.0'" - ) + raise JsonRpcInvalidRequestError(data="'jsonrpc' value must be '2.0'") if "method" not in request: - raise JsonRpcInvalidRequestError( - data="'method' member must be included" - ) - if not isinstance(request["method"], str): + raise JsonRpcInvalidRequestError(data="'method' member must be included") + if not isinstance(request["method"], str): # pyright: ignore[reportUnnecessaryIsInstance] raise JsonRpcInvalidRequestError(data="'method' must be a string") - def _get_params(self, request): + def _get_params(self, request: JsonRpcRequest) -> tuple[list[Any], dict[Any, Any]]: if "params" not in request: return [], {} params = request["params"] if isinstance(params, list): return params, {} - elif isinstance(params, dict): + if isinstance(params, dict): # pyright: ignore[reportUnnecessaryIsInstance] return [], params - else: - raise JsonRpcInvalidRequestError( - data="'params', if given, must be an array or an object" - ) + raise JsonRpcInvalidRequestError( + data="'params', if given, must be an array or an object" + ) - def _get_method(self, method_path): + def _get_method(self, method_path: str) -> Callable[..., Any]: if callable(self.objects.get(method_path, None)): # The mounted object is the callable return self.objects[method_path] @@ -202,30 +253,25 @@ def _get_method(self, method_path): mount, method_name = method_path.rsplit(".", 1) if method_name.startswith("_"): - raise JsonRpcMethodNotFoundError( - data="Private methods are not exported" - ) + raise JsonRpcMethodNotFoundError(data="Private methods are not exported") try: obj = self.objects[mount] - except KeyError: + except KeyError as exc: raise JsonRpcMethodNotFoundError( data=f"No object found at {mount!r}" - ) + ) from exc try: return getattr(obj, method_name) - except AttributeError: + except AttributeError as exc: raise JsonRpcMethodNotFoundError( data=f"Object mounted at {mount!r} has no member {method_name!r}" - ) - - def _is_notification(self, request): - return "id" not in request + ) from exc - def _unwrap_result(self, result): + def _unwrap_result(self, result: pykka.Future[T] | T) -> T: if isinstance(result, pykka.Future): - result = result.get() + return result.get() # pyright: ignore[reportUnknownVariableType] return result @@ -233,11 +279,14 @@ class JsonRpcError(Exception): code = -32000 message = "Unspecified server error" - def __init__(self, data=None): + def __init__(self, data: Any | None = None) -> None: self.data = data - def get_response(self, request_id=None): - response = { + def get_response( + self, + request_id: JsonRpcRequestId | None = None, + ) -> JsonRpcErrorResponse: + response: JsonRpcErrorResponse = { "jsonrpc": "2.0", "id": request_id, "error": {"code": self.code, "message": self.message}, @@ -272,8 +321,10 @@ class JsonRpcApplicationError(JsonRpcError): message = "Application error" -def get_combined_json_decoder(decoders): - def decode(dct): +def get_combined_json_decoder( + decoders: list[Callable[[dict[Any, Any]], Any]], +) -> Callable[[dict[Any, Any]], Any]: + def decode(dct: dict[Any, Any]) -> dict[Any, Any]: for decoder in decoders: dct = decoder(dct) return dct @@ -281,21 +332,22 @@ def decode(dct): return decode -def get_combined_json_encoder(encoders): +def get_combined_json_encoder( + encoders: list[type[json.JSONEncoder]], +) -> type[json.JSONEncoder]: class JsonRpcEncoder(json.JSONEncoder): - def default(self, obj): + def default(self, o: Any) -> Any: for encoder in encoders: try: - return encoder().default(obj) + return encoder().default(o) except TypeError: pass # Try next encoder - return json.JSONEncoder.default(self, obj) + return json.JSONEncoder.default(self, o) return JsonRpcEncoder class JsonRpcInspector: - """ Inspects a group of classes and functions to create a description of what methods they can expose over JSON-RPC 2.0. @@ -317,19 +369,17 @@ class JsonRpcInspector: :type objects: dict """ - def __init__(self, objects): - if "" in objects.keys(): - raise AttributeError( - "The empty string is not allowed as an object mount" - ) + def __init__(self, objects: dict[str, Callable[..., Any]]) -> None: + if "" in objects: + raise AttributeError("The empty string is not allowed as an object mount") self.objects = objects - def describe(self): + def describe(self) -> dict[str, Any]: """ Inspects the object and returns a data structure which describes the available properties and methods. """ - methods = {} + methods: dict[str, JsonRpcMethodDescription] = {} for mount, obj in self.objects.items(): if inspect.isroutine(obj): methods[mount] = self._describe_method(obj) @@ -341,8 +391,8 @@ def describe(self): methods[name] = description return methods - def _get_methods(self, obj): - methods = {} + def _get_methods(self, obj: Any) -> dict[str, JsonRpcMethodDescription]: + methods: dict[str, JsonRpcMethodDescription] = {} for name, value in inspect.getmembers(obj): if name.startswith("_"): continue @@ -353,23 +403,26 @@ def _get_methods(self, obj): methods[name] = method return methods - def _describe_method(self, method): + def _describe_method(self, method: Callable[..., Any]) -> JsonRpcMethodDescription: return { "description": inspect.getdoc(method), "params": self._describe_params(method), } - def _describe_params(self, method): + def _describe_params( + self, + method: Callable[..., Any], + ) -> list[JsonRpcParamDescription]: argspec = inspect.getfullargspec(method) - defaults = argspec.defaults and list(argspec.defaults) or [] + defaults = list(argspec.defaults) if argspec.defaults else [] num_args_without_default = len(argspec.args) - len(defaults) no_defaults = [None] * num_args_without_default defaults = no_defaults + defaults - params = [] + params: list[JsonRpcParamDescription] = [] - for arg, _default in zip(argspec.args, defaults): + for arg, _default in zip(argspec.args, defaults, strict=True): if arg == "self": continue params.append({"name": arg}) diff --git a/mopidy/internal/log.py b/src/mopidy/internal/log.py similarity index 78% rename from mopidy/internal/log.py rename to src/mopidy/internal/log.py index f1599dd26c..d0da17cae0 100644 --- a/mopidy/internal/log.py +++ b/src/mopidy/internal/log.py @@ -4,46 +4,40 @@ import logging.config import logging.handlers import platform -from typing import TYPE_CHECKING +from logging import LogRecord +from typing import TYPE_CHECKING, ClassVar, Literal if TYPE_CHECKING: - from logging import LogRecord - from typing import Dict, List, Optional, Tuple - - from typing_extensions import Literal, TypedDict - - LogColor = Literal[ - "black", - "red", - "green", - "yellow", - "blue", - "magenta", - "cyan", - "white", - ] - - # TODO Move config types into `mopidy.config` - - class Config(TypedDict): - logging: LoggingConfig - loglevels: Dict[str, int] - logcolors: Dict[str, LogColor] - - class LoggingConfig(TypedDict): - verbosity: int - format: str - color: bool - config_file: str - - -LOG_LEVELS: Dict[int, Dict[str, int]] = { - -1: dict(root=logging.ERROR, mopidy=logging.WARNING), - 0: dict(root=logging.ERROR, mopidy=logging.INFO), - 1: dict(root=logging.WARNING, mopidy=logging.DEBUG), - 2: dict(root=logging.INFO, mopidy=logging.DEBUG), - 3: dict(root=logging.DEBUG, mopidy=logging.DEBUG), - 4: dict(root=logging.NOTSET, mopidy=logging.NOTSET), + from mopidy.config import Config, LoggingConfig + +LogLevelName = Literal[ + "critical", + "error", + "warning", + "info", + "debug", + "trace", + "all", +] + +LogColorName = Literal[ + "black", + "red", + "green", + "yellow", + "blue", + "magenta", + "cyan", + "white", +] + +LOG_LEVELS: dict[int, dict[str, int]] = { + -1: {"root": logging.ERROR, "mopidy": logging.WARNING}, + 0: {"root": logging.ERROR, "mopidy": logging.INFO}, + 1: {"root": logging.WARNING, "mopidy": logging.DEBUG}, + 2: {"root": logging.INFO, "mopidy": logging.DEBUG}, + 3: {"root": logging.DEBUG, "mopidy": logging.DEBUG}, + 4: {"root": logging.NOTSET, "mopidy": logging.NOTSET}, } # Custom log level which has even lower priority than DEBUG @@ -57,7 +51,7 @@ class DelayedHandler(logging.Handler): def __init__(self) -> None: logging.Handler.__init__(self) self._released = False - self._buffer: List[LogRecord] = [] + self._buffer: list[LogRecord] = [] def handle(self, record: LogRecord) -> bool: if not self._released: @@ -81,7 +75,9 @@ def bootstrap_delayed_logging() -> None: def setup_logging( - config: Config, base_verbosity_level: int, args_verbosity_level: int + config: Config, + base_verbosity_level: int, + args_verbosity_level: int, ) -> None: logging.captureWarnings(True) @@ -136,7 +132,7 @@ def get_verbosity_level( class VerbosityFilter(logging.Filter): - def __init__(self, verbosity_level: int, loglevels: Dict[str, int]): + def __init__(self, verbosity_level: int, loglevels: dict[LogLevelName, int]): self.verbosity_level = verbosity_level self.loglevels = loglevels @@ -153,7 +149,7 @@ def filter(self, record: LogRecord) -> bool: #: Available log colors. -COLORS: List[LogColor] = [ +COLORS: list[LogColorName] = [ "black", "red", "green", @@ -166,7 +162,6 @@ def filter(self, record: LogRecord) -> bool: class ColorizingStreamHandler(logging.StreamHandler): - """ Stream handler which colorizes the log using ANSI escape sequences. @@ -180,7 +175,7 @@ class ColorizingStreamHandler(logging.StreamHandler): """ # Map logging levels to (background, foreground, bold/intense) - level_map: Dict[int, Tuple[Optional[LogColor], LogColor, bool]] = { + level_map: ClassVar[dict[int, tuple[LogColorName | None, LogColorName, bool]]] = { TRACE_LOG_LEVEL: (None, "blue", False), logging.DEBUG: (None, "blue", False), logging.INFO: (None, "white", False), @@ -189,14 +184,14 @@ class ColorizingStreamHandler(logging.StreamHandler): logging.CRITICAL: ("red", "white", True), } # Map logger name to foreground colors - logger_map: Dict[str, LogColor] = {} + logger_map: dict[LogLevelName, LogColorName] csi = "\x1b[" reset = "\x1b[0m" is_windows = platform.system() == "Windows" - def __init__(self, logger_colors: Dict[str, LogColor]) -> None: + def __init__(self, logger_colors: dict[LogLevelName, LogColorName]) -> None: super().__init__() self.logger_map = logger_colors @@ -229,8 +224,8 @@ def format(self, record: LogRecord) -> str: def colorize( self, message: str, - bg: Optional[LogColor] = None, - fg: Optional[LogColor] = None, + bg: LogColorName | None = None, + fg: LogColorName | None = None, bold: bool = False, ) -> str: params = [] @@ -241,7 +236,5 @@ def colorize( if bold: params.append("1") if params: - message = "".join( - (self.csi, ";".join(params), "m", message, self.reset) - ) + message = "".join((self.csi, ";".join(params), "m", message, self.reset)) return message diff --git a/mopidy/internal/models.py b/src/mopidy/internal/models.py similarity index 99% rename from mopidy/internal/models.py rename to src/mopidy/internal/models.py index 77bb3de890..7861e1add2 100644 --- a/mopidy/internal/models.py +++ b/src/mopidy/internal/models.py @@ -75,7 +75,6 @@ class PlaybackState(ValidatedImmutableObject): class TracklistState(ValidatedImmutableObject): - """ State of the tracklist controller. Internally used for save/load state. @@ -114,7 +113,6 @@ class TracklistState(ValidatedImmutableObject): class CoreState(ValidatedImmutableObject): - """ State of all Core controller. Internally used for save/load state. diff --git a/mopidy/internal/network.py b/src/mopidy/internal/network.py similarity index 93% rename from mopidy/internal/network.py rename to src/mopidy/internal/network.py index 04a2985947..3a48de5f16 100644 --- a/mopidy/internal/network.py +++ b/src/mopidy/internal/network.py @@ -11,13 +11,13 @@ def try_ipv6_socket() -> bool: return False try: socket.socket(socket.AF_INET6).close() - return True except OSError as exc: logger.debug( - f"Platform supports IPv6, but socket creation failed, " - f"disabling: {exc}" + f"Platform supports IPv6, but socket creation failed, disabling: {exc}" ) - return False + return False + else: + return True #: Boolean value that indicates if creating an IPv6 socket will succeed. diff --git a/mopidy/internal/path.py b/src/mopidy/internal/path.py similarity index 61% rename from mopidy/internal/path.py rename to src/mopidy/internal/path.py index 9493592400..90992705d7 100644 --- a/mopidy/internal/path.py +++ b/src/mopidy/internal/path.py @@ -1,9 +1,12 @@ import logging import pathlib import re -import urllib +import urllib.parse +from os import PathLike +from typing import AnyStr from mopidy.internal import xdg +from mopidy.types import Uri logger = logging.getLogger(__name__) @@ -11,41 +14,49 @@ XDG_DIRS = xdg.get_dirs() -def get_or_create_dir(dir_path): +def get_or_create_dir(dir_path: str | PathLike[str]) -> pathlib.Path: dir_path = expand_path(dir_path) if dir_path.is_file(): raise OSError( f"A file with the same name as the desired dir, " f"{dir_path!r}, already exists." ) - elif not dir_path.is_dir(): + if not dir_path.is_dir(): logger.info(f"Creating dir {dir_path.as_uri()}") dir_path.mkdir(mode=0o755, parents=True) return dir_path -def get_or_create_file(file_path, mkdir=True, content=None): +def get_or_create_file( + file_path: str | PathLike[str], + mkdir: bool = True, + content: AnyStr | None = None, +) -> pathlib.Path: file_path = expand_path(file_path) - if isinstance(content, str): - content = content.encode() + if file_path.is_file(): + return file_path if mkdir: get_or_create_dir(file_path.parent) - if not file_path.is_file(): - logger.info(f"Creating file {file_path.as_uri()}") - file_path.touch(exist_ok=False) - if content is not None: + logger.info(f"Creating file {file_path.as_uri()}") + file_path.touch(exist_ok=False) + match content: + case str(): + file_path.write_text(content) + case bytes(): file_path.write_bytes(content) + case None: + pass return file_path -def get_unix_socket_path(socket_path): +def get_unix_socket_path(socket_path: str) -> pathlib.Path | None: match = re.search("^unix:(.*)", socket_path) if not match: return None - return match.group(1) + return pathlib.Path(match.group(1)) -def path_to_uri(path): +def path_to_uri(path: str | PathLike[str]) -> str: """ Convert OS specific path to file:// URI. @@ -58,7 +69,7 @@ def path_to_uri(path): return pathlib.Path(path).as_uri() -def uri_to_path(uri): +def uri_to_path(uri: Uri | str) -> pathlib.Path: """ Convert an URI to a OS specific path. """ @@ -67,27 +78,30 @@ def uri_to_path(uri): return pathlib.Path(unicode_path) -def expand_path(path): +def expand_path(path: bytes | str | PathLike[str]) -> pathlib.Path: if isinstance(path, bytes): path = path.decode(errors="surrogateescape") - path = str(pathlib.Path(path)) + path = str(pathlib.Path(path)) # pyright: ignore[reportArgumentType,reportCallIssue] for xdg_var, xdg_dir in XDG_DIRS.items(): path = path.replace("$" + xdg_var, str(xdg_dir)) if "$" in path: - return None + raise ValueError(f"Unexpanded '$...' in path {path!r}") return pathlib.Path(path).expanduser().resolve() -def is_path_inside_base_dir(path, base_path): +def is_path_inside_base_dir( + path: bytes | str | PathLike[str], + base_path: bytes | str | PathLike[str], +) -> bool: if isinstance(path, bytes): path = path.decode(errors="surrogateescape") if isinstance(base_path, bytes): base_path = base_path.decode(errors="surrogateescape") - path = pathlib.Path(path).resolve() - base_path = pathlib.Path(base_path).resolve() + path = pathlib.Path(path).resolve() # pyright: ignore[reportArgumentType] + base_path = pathlib.Path(base_path).resolve() # pyright: ignore[reportArgumentType] if path.is_file(): # Use dir of file for prefix comparision, so we don't accept diff --git a/mopidy/internal/playlists.py b/src/mopidy/internal/playlists.py similarity index 84% rename from mopidy/internal/playlists.py rename to src/mopidy/internal/playlists.py index bc66c32d4d..dd5307285c 100644 --- a/mopidy/internal/playlists.py +++ b/src/mopidy/internal/playlists.py @@ -1,6 +1,6 @@ import configparser import io -import xml.etree.ElementTree as elementtree # noqa: N813 +from xml.etree import ElementTree from mopidy.internal import validation @@ -33,9 +33,9 @@ def detect_xspf_header(data): try: data = io.BytesIO(data) - for _event, element in elementtree.iterparse(data, events=["start"]): + for _event, element in ElementTree.iterparse(data, events=["start"]): return element.tag.lower() == "{http://xspf.org/ns/0/}playlist" - except elementtree.ParseError: + except ElementTree.ParseError: pass return False @@ -47,9 +47,9 @@ def detect_asx_header(data): try: data = io.BytesIO(data) - for _event, element in elementtree.iterparse(data, events=["start"]): + for _event, element in ElementTree.iterparse(data, events=["start"]): return element.tag.lower() == "asx" - except elementtree.ParseError: + except ElementTree.ParseError: pass return False @@ -90,11 +90,14 @@ def parse_pls(data): def parse_xspf(data): + element = None try: # Last element will be root. - for _event, element in elementtree.iterparse(io.BytesIO(data)): + for _event, element in ElementTree.iterparse(io.BytesIO(data)): element.tag = element.tag.lower() # normalize - except elementtree.ParseError: + except ElementTree.ParseError: + return + if element is None: return ns = "http://xspf.org/ns/0/" @@ -104,11 +107,14 @@ def parse_xspf(data): def parse_asx(data): + element = None try: # Last element will be root. - for _event, element in elementtree.iterparse(io.BytesIO(data)): + for _event, element in ElementTree.iterparse(io.BytesIO(data)): element.tag = element.tag.lower() # normalize - except elementtree.ParseError: + except ElementTree.ParseError: + return + if element is None: return for ref in element.findall("entry/ref[@href]"): diff --git a/mopidy/internal/process.py b/src/mopidy/internal/process.py similarity index 90% rename from mopidy/internal/process.py rename to src/mopidy/internal/process.py index 558f7fe87c..d893c8c270 100644 --- a/mopidy/internal/process.py +++ b/src/mopidy/internal/process.py @@ -13,7 +13,7 @@ def exit_process(): logger.debug("Interrupted main") -def sigterm_handler(signum, frame): +def sigterm_handler(_signum, _frame): """A :mod:`signal` handler which will exit the program on signal. This function is not called when the process' main thread is running a GLib @@ -37,9 +37,7 @@ def stop_actors_by_class(klass): def stop_remaining_actors(): num_actors = len(pykka.ActorRegistry.get_all()) while num_actors: - logger.error( - "There are actor threads still running, this is probably a bug" - ) + logger.error("There are actor threads still running, this is probably a bug") logger.debug( "Seeing %d actor and %d non-actor thread(s): %s", num_actors, diff --git a/mopidy/internal/storage.py b/src/mopidy/internal/storage.py similarity index 100% rename from mopidy/internal/storage.py rename to src/mopidy/internal/storage.py diff --git a/mopidy/internal/timer.py b/src/mopidy/internal/timer.py similarity index 100% rename from mopidy/internal/timer.py rename to src/mopidy/internal/timer.py diff --git a/src/mopidy/internal/validation.py b/src/mopidy/internal/validation.py new file mode 100644 index 0000000000..24b69379e5 --- /dev/null +++ b/src/mopidy/internal/validation.py @@ -0,0 +1,182 @@ +from __future__ import annotations + +import urllib.parse +from collections.abc import Iterable, Mapping +from typing import Any, Literal, TypeVar, Union, get_args + +from mopidy import exceptions +from mopidy.audio.constants import PlaybackState +from mopidy.types import ( + DistinctField, + Query, + QueryValue, + SearchField, + TracklistField, +) + + +def get_literals(literal_type: Any) -> set[str]: + # Check if it's a union + if hasattr(literal_type, "__origin__") and literal_type.__origin__ is Union: + literals = set() + for arg in get_args(literal_type): + literals.update(get_literals(arg)) + return literals + + # Check if it's a literal + if hasattr(literal_type, "__origin__") and literal_type.__origin__ is Literal: + return set(get_args(literal_type)) + + raise ValueError("Provided type is neither a Union nor a Literal type.") + + +T = TypeVar("T") + +PLAYBACK_STATES: set[str] = {ps.value for ps in PlaybackState} + +FIELD_TYPES: dict[str, type] = { + "album": str, + "albumartist": str, + "any": int | str, + "artist": str, + "comment": str, + "composer": str, + "date": str, + "disc_no": int, + "genre": str, + "musicbrainz_id": str, + "musicbrainz_albumid": str, + "musicbrainz_artistid": str, + "musicbrainz_trackid": str, + "name": str, + "performer": str, + "tlid": int, + "track_name": str, + "track_no": int, + "uri": str, +} +DISTINCT_FIELDS: dict[str, type] = { + x: FIELD_TYPES[x] for x in get_literals(DistinctField) +} +SEARCH_FIELDS: dict[str, type] = {x: FIELD_TYPES[x] for x in get_literals(SearchField)} +TRACKLIST_FIELDS: dict[str, type] = { + x: FIELD_TYPES[x] for x in get_literals(TracklistField) - {"tlid"} +} + + +# TODO: _check_iterable(check, msg, **kwargs) + [check(a) for a in arg]? +def _check_iterable( + arg, + msg, + **kwargs: Any, +) -> None: + """Ensure we have an iterable which is not a string or an iterator""" + if isinstance(arg, str): + raise exceptions.ValidationError(msg.format(arg=arg, **kwargs)) + if not isinstance(arg, Iterable): + raise exceptions.ValidationError(msg.format(arg=arg, **kwargs)) + if iter(arg) is iter(arg): + raise exceptions.ValidationError(msg.format(arg=arg, **kwargs)) + + +def check_choice( + arg: T, + choices: Iterable[T], + msg: str = "Expected one of {choices}, not {arg!r}", +) -> None: + if arg not in choices: + raise exceptions.ValidationError(msg.format(arg=arg, choices=tuple(choices))) + + +def check_boolean( + arg: bool, + msg: str = "Expected a boolean, not {arg!r}", +) -> None: + check_instance(arg, bool, msg=msg) + + +def check_instance( + arg: T, + cls: type[T], + msg: str = "Expected a {name} instance, not {arg!r}", +) -> None: + if not isinstance(arg, cls): + raise exceptions.ValidationError(msg.format(arg=arg, name=cls.__name__)) + + +def check_instances( + arg: Iterable[Any], + cls: type, + msg: str = "Expected a list of {name}, not {arg!r}", +) -> None: + _check_iterable(arg, msg, name=cls.__name__) + if not all(isinstance(instance, cls) for instance in arg): + raise exceptions.ValidationError(msg.format(arg=arg, name=cls.__name__)) + + +def check_integer( + arg: int, + min: int | None = None, + max: int | None = None, +) -> None: + if not isinstance(arg, int): + raise exceptions.ValidationError(f"Expected an integer, not {arg!r}") + if min is not None and arg < min: + raise exceptions.ValidationError( + f"Expected number larger or equal to {min}, not {arg!r}" + ) + if max is not None and arg > max: + raise exceptions.ValidationError( + f"Expected number smaller or equal to {max}, not {arg!r}" + ) + + +def check_query( + arg: Query[SearchField] | Query[TracklistField], + fields: Iterable[str] | None = None, +) -> None: + if fields is None: + fields = SEARCH_FIELDS.keys() + # TODO: normalize name -> track_name + # TODO: normalize value -> [value] + # TODO: normalize blank -> [] or just remove field? + + if not isinstance(arg, Mapping): + raise exceptions.ValidationError(f"Expected a query dictionary, not {arg!r}") + + for key, value in arg.items(): + check_choice( + key, + fields, + msg="Expected query field to be one of {choices}, not {arg!r}", + ) + msg = 'Expected "{key}" to be list of strings, not {arg!r}' + _check_iterable(value, msg, key=key) + [_check_query_value(key, v, msg) for v in value] + + +def _check_query_value( + key: DistinctField | (SearchField | TracklistField), + arg: QueryValue, + msg: str, +) -> None: + if not isinstance(arg, str) or not arg.strip(): + raise exceptions.ValidationError(msg.format(arg=arg, key=key)) + + +def check_uri( + arg: str, + msg="Expected a valid URI, not {arg!r}", +) -> None: + if not isinstance(arg, str): + raise exceptions.ValidationError(msg.format(arg=arg)) + if urllib.parse.urlparse(arg).scheme == "": + raise exceptions.ValidationError(msg.format(arg=arg)) + + +def check_uris( + arg: Iterable[str], + msg="Expected a list of URIs, not {arg!r}", +) -> None: + _check_iterable(arg, msg) + [check_uri(a, msg) for a in arg] diff --git a/mopidy/internal/xdg.py b/src/mopidy/internal/xdg.py similarity index 89% rename from mopidy/internal/xdg.py rename to src/mopidy/internal/xdg.py index 66e732a427..1fbef592a6 100644 --- a/mopidy/internal/xdg.py +++ b/src/mopidy/internal/xdg.py @@ -3,7 +3,7 @@ import pathlib -def get_dirs(): +def get_dirs() -> dict[str, pathlib.Path]: """Returns a dict of all the known XDG Base Directories for the current user. The keys ``XDG_CACHE_DIR``, ``XDG_CONFIG_DIR``, and ``XDG_DATA_DIR`` is @@ -33,7 +33,7 @@ def get_dirs(): return dirs -def _get_user_dirs(xdg_config_dir): +def _get_user_dirs(xdg_config_dir: pathlib.Path) -> dict[str, pathlib.Path]: """Returns a dict of XDG dirs read from ``$XDG_CONFIG_HOME/user-dirs.dirs``. @@ -57,9 +57,9 @@ def _get_user_dirs(xdg_config_dir): config = configparser.RawConfigParser() config.read_string(data.decode()) - result = {} + result: dict[str, pathlib.Path] = {} for k, v in config.items("XDG_USER_DIRS"): - if v is None: + if v is None: # pyright: ignore[reportUnnecessaryComparison] continue if isinstance(k, bytes): k = k.decode() diff --git a/mopidy/listener.py b/src/mopidy/listener.py similarity index 93% rename from mopidy/listener.py rename to src/mopidy/listener.py index 36a1ce15b6..d604cfac3e 100644 --- a/mopidy/listener.py +++ b/src/mopidy/listener.py @@ -21,8 +21,8 @@ def send(cls, event, **kwargs): # quickly deadlock. listener.tell( ProxyCall( - attr_path=["on_event"], - args=[event], + attr_path=("on_event",), + args=(event,), kwargs=kwargs, ) ) @@ -30,8 +30,7 @@ def send(cls, event, **kwargs): class Listener: def on_event(self, event, **kwargs): - """ - Called on all events. + """Called on all events. *MAY* be implemented by actor. By default, this method forwards the event to the specific event methods. diff --git a/mopidy/m3u/__init__.py b/src/mopidy/m3u/__init__.py similarity index 69% rename from mopidy/m3u/__init__.py rename to src/mopidy/m3u/__init__.py index ab6f755240..f6b5a8e114 100644 --- a/mopidy/m3u/__init__.py +++ b/src/mopidy/m3u/__init__.py @@ -1,23 +1,22 @@ import logging -import os +from pathlib import Path import mopidy from mopidy import config, ext +from mopidy.config import ConfigSchema logger = logging.getLogger(__name__) class Extension(ext.Extension): - dist_name = "Mopidy-M3U" ext_name = "m3u" version = mopidy.__version__ - def get_default_config(self): - conf_file = os.path.join(os.path.dirname(__file__), "ext.conf") - return config.read(conf_file) + def get_default_config(self) -> str: + return config.read(Path(__file__).parent / "ext.conf") - def get_config_schema(self): + def get_config_schema(self) -> ConfigSchema: schema = super().get_config_schema() schema["base_dir"] = config.Path(optional=True) schema["default_encoding"] = config.String() @@ -25,7 +24,7 @@ def get_config_schema(self): schema["playlists_dir"] = config.Path(optional=True) return schema - def setup(self, registry): + def setup(self, registry: ext.Registry) -> None: from .backend import M3UBackend registry.add("backend", M3UBackend) diff --git a/src/mopidy/m3u/backend.py b/src/mopidy/m3u/backend.py new file mode 100644 index 0000000000..b5a47d9d62 --- /dev/null +++ b/src/mopidy/m3u/backend.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, ClassVar + +import pykka + +from mopidy import backend +from mopidy.types import UriScheme + +from . import playlists + +if TYPE_CHECKING: + from mopidy.audio import AudioProxy + from mopidy.ext import Config + + +class M3UBackend(pykka.ThreadingActor, backend.Backend): + uri_schemes: ClassVar[list[UriScheme]] = [UriScheme("m3u")] + + def __init__( + self, + config: Config, + audio: AudioProxy, # noqa: ARG002 + ) -> None: + super().__init__() + self.playlists = playlists.M3UPlaylistsProvider(self, config) diff --git a/mopidy/m3u/ext.conf b/src/mopidy/m3u/ext.conf similarity index 100% rename from mopidy/m3u/ext.conf rename to src/mopidy/m3u/ext.conf diff --git a/mopidy/m3u/playlists.py b/src/mopidy/m3u/playlists.py similarity index 66% rename from mopidy/m3u/playlists.py rename to src/mopidy/m3u/playlists.py index 6f89d95697..c5f19a8daa 100644 --- a/mopidy/m3u/playlists.py +++ b/src/mopidy/m3u/playlists.py @@ -1,20 +1,32 @@ +from __future__ import annotations + import contextlib import locale import logging import operator import os -import pathlib import tempfile +from collections.abc import Generator +from pathlib import Path +from typing import IO, TYPE_CHECKING, Any, cast from mopidy import backend +from mopidy.exceptions import BackendError from mopidy.internal import path +from mopidy.m3u.types import M3UConfig from . import Extension, translator +if TYPE_CHECKING: + from mopidy.backend import Backend + from mopidy.ext import Config + from mopidy.models import Playlist, Ref + from mopidy.types import Uri + logger = logging.getLogger(__name__) -def log_environment_error(message, error): +def log_environment_error(message: str, error: EnvironmentError) -> None: if isinstance(error.strerror, bytes): strerror = error.strerror.decode(locale.getpreferredencoding()) else: @@ -23,11 +35,16 @@ def log_environment_error(message, error): @contextlib.contextmanager -def replace(path, mode="w+b", encoding=None, errors=None): +def replace( + path: Path, + mode: str = "w+b", + encoding: str | None = None, + errors: str | None = None, +) -> Generator[IO[Any], None, None]: (fd, tempname) = tempfile.mkstemp(dir=str(path.parent)) - tempname = pathlib.Path(tempname) + tempname = Path(tempname) try: - fp = open(fd, mode, encoding=encoding, errors=errors) + fp = open(fd, mode, encoding=encoding, errors=errors) # noqa: PTH123, SIM115 except Exception: tempname.unlink() os.close(fd) @@ -45,35 +62,37 @@ def replace(path, mode="w+b", encoding=None, errors=None): class M3UPlaylistsProvider(backend.PlaylistsProvider): - def __init__(self, backend, config): + def __init__(self, backend: Backend, config: Config) -> None: super().__init__(backend) - ext_config = config[Extension.ext_name] - if ext_config["playlists_dir"] is None: - self._playlists_dir = Extension.get_data_dir(config) - else: - self._playlists_dir = path.expand_path(ext_config["playlists_dir"]) - if ext_config["base_dir"] is None: - self._base_dir = self._playlists_dir - else: - self._base_dir = path.expand_path(ext_config["base_dir"]) + ext_config = cast(M3UConfig, config[Extension.ext_name]) + + self._playlists_dir = ( + path.expand_path(ext_config["playlists_dir"]) + if ext_config["playlists_dir"] + else Extension.get_data_dir(config) + ) + self._base_dir = ( + path.expand_path(ext_config["base_dir"]) + if ext_config["base_dir"] + else self._playlists_dir + ) self._default_encoding = ext_config["default_encoding"] self._default_extension = ext_config["default_extension"] - def as_list(self): + def as_list(self) -> list[Ref]: result = [] for entry in self._playlists_dir.iterdir(): if entry.suffix not in [".m3u", ".m3u8"]: continue - elif not entry.is_file(): + if not entry.is_file(): continue - else: - playlist_path = entry.relative_to(self._playlists_dir) - result.append(translator.path_to_ref(playlist_path)) + playlist_path = entry.relative_to(self._playlists_dir) + result.append(translator.path_to_ref(playlist_path)) result.sort(key=operator.attrgetter("name")) return result - def create(self, name): + def create(self, name: str) -> Playlist | None: path = translator.path_from_name(name.strip(), self._default_extension) try: with self._open(path, "w"): @@ -84,7 +103,7 @@ def create(self, name): else: return translator.playlist(path, [], mtime) - def delete(self, uri): + def delete(self, uri: Uri) -> bool: path = translator.uri_to_path(uri) if not self._is_in_basedir(path): logger.debug("Ignoring path outside playlist dir: %s", uri) @@ -97,7 +116,7 @@ def delete(self, uri): else: return True - def get_items(self, uri): + def get_items(self, uri: Uri) -> list[Ref] | None: path = translator.uri_to_path(uri) if not self._is_in_basedir(path): logger.debug("Ignoring path outside playlist dir: %s", uri) @@ -110,7 +129,7 @@ def get_items(self, uri): else: return items - def lookup(self, uri): + def lookup(self, uri: Uri) -> Playlist | None: path = translator.uri_to_path(uri) if not self._is_in_basedir(path): logger.debug("Ignoring path outside playlist dir: %s", uri) @@ -124,10 +143,10 @@ def lookup(self, uri): else: return translator.playlist(path, items, mtime) - def refresh(self): + def refresh(self) -> None: pass # nothing to do - def save(self, playlist): + def save(self, playlist: Playlist) -> Playlist | None: path = translator.uri_to_path(playlist.uri) if not self._is_in_basedir(path): logger.debug("Ignoring path outside playlist dir: %s", playlist.uri) @@ -147,28 +166,25 @@ def save(self, playlist): else: return translator.playlist(path, playlist.tracks, mtime) - def _abspath(self, path): - if not path.is_absolute(): - return self._playlists_dir / path - else: + def _abspath(self, path: Path) -> Path: + if path.is_absolute(): return path + return self._playlists_dir / path - def _is_in_basedir(self, local_path): + def _is_in_basedir(self, local_path: Path) -> bool: local_path = self._abspath(local_path) return path.is_path_inside_base_dir(local_path, self._playlists_dir) - def _open(self, path, mode="r"): - if path.suffix == ".m3u8": - encoding = "utf-8" - else: - encoding = self._default_encoding + def _open( + self, path: Path, mode: str = "r" + ) -> contextlib._GeneratorContextManager[IO[Any]] | IO[Any]: + encoding = "utf-8" if path.suffix == ".m3u8" else self._default_encoding if not path.is_absolute(): path = self._abspath(path) if not self._is_in_basedir(path): - raise Exception( - f"Path {path!r} is not inside playlist dir {self._playlist_dir!r}" + raise BackendError( + f"Path {path!r} is not inside playlist dir {self._playlists_dir!r}" ) if "w" in mode: return replace(path, mode, encoding=encoding, errors="replace") - else: - return path.open(mode, encoding=encoding, errors="replace") + return path.open(mode, encoding=encoding, errors="replace") diff --git a/mopidy/m3u/translator.py b/src/mopidy/m3u/translator.py similarity index 55% rename from mopidy/m3u/translator.py rename to src/mopidy/m3u/translator.py index 31afcf334c..6f8210bcd3 100644 --- a/mopidy/m3u/translator.py +++ b/src/mopidy/m3u/translator.py @@ -1,48 +1,60 @@ +from __future__ import annotations + import os -import pathlib -import urllib +import urllib.parse +from collections.abc import Iterable +from pathlib import Path +from typing import IO -from mopidy import models from mopidy.internal import path +from mopidy.models import Playlist, Ref, Track +from mopidy.types import Uri from . import Extension -def path_to_uri(path, scheme=Extension.ext_name): +def path_to_uri( + path: Path, + scheme: str = Extension.ext_name, +) -> Uri: """Convert file path to URI.""" bytes_path = os.path.normpath(bytes(path)) uripath = urllib.parse.quote_from_bytes(bytes_path) - return urllib.parse.urlunsplit((scheme, None, uripath, None, None)) + return Uri(urllib.parse.urlunsplit((scheme, None, uripath, None, None))) -def uri_to_path(uri): +def uri_to_path(uri: Uri) -> Path: """Convert URI to file path.""" return path.uri_to_path(uri) -def name_from_path(path): +def name_from_path(path: Path) -> str | None: """Extract name from file path.""" - name = bytes(pathlib.Path(path.stem)) + name = bytes(Path(path.stem)) try: return name.decode(errors="replace") except UnicodeError: return None -def path_from_name(name, ext=None, sep="|"): +def path_from_name( + name: str, + ext: str | None = None, + sep: str = "|", +) -> Path: """Convert name with optional extension to file path.""" - if ext: - name = name.replace(os.sep, sep) + ext - else: - name = name.replace(os.sep, sep) - return pathlib.Path(name) + name = name.replace(os.sep, sep) + ext if ext else name.replace(os.sep, sep) + return Path(name) -def path_to_ref(path): - return models.Ref.playlist(uri=path_to_uri(path), name=name_from_path(path)) +def path_to_ref(path: Path) -> Ref: + return Ref.playlist(uri=path_to_uri(path), name=name_from_path(path)) -def load_items(fp, basedir): +def load_items( + fp: IO[str], + basedir: Path, +) -> list[Ref]: refs = [] name = None for line in filter(None, (line.strip() for line in fp)): @@ -50,7 +62,7 @@ def load_items(fp, basedir): if line.startswith("#EXTINF:"): name = line.partition(",")[2] continue - elif not urllib.parse.urlsplit(line).scheme: + if not urllib.parse.urlsplit(line).scheme: path = basedir / line if not name: name = name_from_path(path) @@ -58,12 +70,15 @@ def load_items(fp, basedir): else: # TODO: ensure this is urlencoded uri = line # do *not* extract name from (stream?) URI path - refs.append(models.Ref.track(uri=uri, name=name)) + refs.append(Ref.track(uri=uri, name=name)) name = None return refs -def dump_items(items, fp): +def dump_items( + items: Iterable[Ref | Track], + fp: IO[str], +) -> None: if any(item.name for item in items): print("#EXTM3U", file=fp) for item in items: @@ -76,12 +91,16 @@ def dump_items(items, fp): print(item.uri, file=fp) -def playlist(path, items=None, mtime=None): +def playlist( + path: Path, + items: Iterable[Ref | Track] | None = None, + mtime: float | None = None, +) -> Playlist: if items is None: items = [] - return models.Playlist( + return Playlist( uri=path_to_uri(path), name=name_from_path(path), - tracks=[models.Track(uri=item.uri, name=item.name) for item in items], + tracks=[Track(uri=item.uri, name=item.name) for item in items], last_modified=(int(mtime * 1000) if mtime else None), ) diff --git a/src/mopidy/m3u/types.py b/src/mopidy/m3u/types.py new file mode 100644 index 0000000000..8176d1ed49 --- /dev/null +++ b/src/mopidy/m3u/types.py @@ -0,0 +1,11 @@ +from __future__ import annotations + +from pathlib import Path +from typing import Literal, TypedDict + + +class M3UConfig(TypedDict): + base_dir: Path | None + default_encoding: str + default_extension: Literal[".m3u", ".m3u8"] + playlists_dir: Path | None diff --git a/mopidy/mixer.py b/src/mopidy/mixer.py similarity index 59% rename from mopidy/mixer.py rename to src/mopidy/mixer.py index 239e60eb64..de513342e3 100644 --- a/mopidy/mixer.py +++ b/src/mopidy/mixer.py @@ -1,25 +1,24 @@ +# ruff: noqa: ARG002 + from __future__ import annotations import logging -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any, ClassVar + +import pykka +from pykka.typing import ActorMemberMixin, proxy_field, proxy_method from mopidy import listener if TYPE_CHECKING: - from typing import Any, Dict, Optional - - from typing_extensions import Literal - - MixerEvent = Literal["mute_changed", "volume_changed"] + from mopidy.types import Percentage logger = logging.getLogger(__name__) class Mixer: - - """ - Audio mixer API + """Audio mixer API. If the mixer has problems during initialization it should raise :exc:`mopidy.exceptions.MixerError` with a descriptive error message. This @@ -27,10 +26,9 @@ class Mixer: the issue. :param config: the entire Mopidy configuration - :type config: dict """ - name: str + name: ClassVar[str] = "" """ Name of the mixer. @@ -39,12 +37,11 @@ class Mixer: mixer. """ - def __init__(self, config: Dict) -> None: + def __init__(self, config: dict) -> None: pass - def get_volume(self) -> Optional[int]: - """ - Get volume level of the mixer on a linear scale from 0 to 100. + def get_volume(self) -> Percentage | None: + """Get volume level of the mixer on a linear scale from 0 to 100. Example values: @@ -56,26 +53,22 @@ def get_volume(self) -> Optional[int]: Volume is unknown. *MAY be implemented by subclass.* - - :rtype: int in range [0..100] or :class:`None` """ return None - def set_volume(self, volume: int) -> bool: - """ - Set volume level of the mixer. + def set_volume(self, volume: Percentage) -> bool: + """Set volume level of the mixer. *MAY be implemented by subclass.* + Returns :class:`True` if successful, :class:`False` otherwise. + :param volume: Volume in the range [0..100] - :type volume: int - :rtype: :class:`True` if success, :class:`False` if failure """ return False - def trigger_volume_changed(self, volume: int) -> None: - """ - Send ``volume_changed`` event to all mixer listeners. + def trigger_volume_changed(self, volume: Percentage) -> None: + """Send ``volume_changed`` event to all mixer listeners. This method should be called by subclasses when the volume is changed, either because of a call to :meth:`set_volume` or because of any @@ -84,32 +77,29 @@ def trigger_volume_changed(self, volume: int) -> None: logger.debug("Mixer event: volume_changed(volume=%d)", volume) MixerListener.send("volume_changed", volume=volume) - def get_mute(self) -> Optional[bool]: - """ - Get mute state of the mixer. + def get_mute(self) -> bool | None: + """Get mute state of the mixer. *MAY be implemented by subclass.* - :rtype: :class:`True` if muted, :class:`False` if unmuted, - :class:`None` if unknown. + Returns :class:`True` if muted, :class:`False` if unmuted, and + :class:`None` if unknown. """ return None def set_mute(self, mute: bool) -> bool: - """ - Mute or unmute the mixer. + """Mute or unmute the mixer. *MAY be implemented by subclass.* + Returns :class:`True` if successful, :class:`False` otherwise. + :param mute: :class:`True` to mute, :class:`False` to unmute - :type mute: bool - :rtype: :class:`True` if success, :class:`False` if failure """ return False def trigger_mute_changed(self, mute: bool) -> None: - """ - Send ``mute_changed`` event to all mixer listeners. + """Send ``mute_changed`` event to all mixer listeners. This method should be called by subclasses when the mute state is changed, either because of a call to :meth:`set_mute` or because of @@ -124,9 +114,7 @@ def ping(self) -> bool: class MixerListener(listener.Listener): - - """ - Marker interface for recipients of events sent by the mixer actor. + """Marker interface for recipients of events sent by the mixer actor. Any Pykka actor that mixes in this class will receive calls to the methods defined here when the corresponding events happen in the mixer actor. This @@ -136,28 +124,40 @@ class MixerListener(listener.Listener): """ @staticmethod - def send(event: MixerEvent, **kwargs: Any) -> None: - """Helper to allow calling of mixer listener events""" + def send(event: str, **kwargs: Any) -> None: + """Helper to allow calling of mixer listener events.""" listener.send(MixerListener, event, **kwargs) - def volume_changed(self, volume: int) -> None: - """ - Called after the volume has changed. + def volume_changed(self, volume: Percentage) -> None: + """Called after the volume has changed. *MAY* be implemented by actor. :param volume: the new volume - :type volume: int in range [0..100] """ - pass def mute_changed(self, mute: bool) -> None: - """ - Called after the mute state has changed. + """Called after the mute state has changed. *MAY* be implemented by actor. :param mute: :class:`True` if muted, :class:`False` if not muted :type mute: bool """ - pass + + +class MixerActor(pykka.ThreadingActor, Mixer): + pass + + +class MixerProxy(ActorMemberMixin, pykka.ActorProxy[MixerActor]): + """Mixer wrapped in a Pykka actor proxy.""" + + name = proxy_field(MixerActor.name) + get_volume = proxy_method(MixerActor.get_volume) + set_volume = proxy_method(MixerActor.set_volume) + trigger_volume_changed = proxy_method(MixerActor.trigger_volume_changed) + get_mute = proxy_method(MixerActor.get_mute) + set_mute = proxy_method(MixerActor.set_mute) + trigger_mute_changed = proxy_method(MixerActor.trigger_mute_changed) + ping = proxy_method(MixerActor.ping) diff --git a/mopidy/models/__init__.py b/src/mopidy/models/__init__.py similarity index 95% rename from mopidy/models/__init__.py rename to src/mopidy/models/__init__.py index 93427830e0..02374c7f21 100644 --- a/mopidy/models/__init__.py +++ b/src/mopidy/models/__init__.py @@ -3,25 +3,23 @@ from mopidy.models.serialize import ModelJSONEncoder, model_json_decoder __all__ = [ - "ImmutableObject", - "Ref", - "Image", - "Artist", "Album", - "Track", - "TlTrack", - "Playlist", - "SearchResult", + "Artist", + "Image", + "ImmutableObject", "model_json_decoder", "ModelJSONEncoder", + "Playlist", + "Ref", + "SearchResult", + "TlTrack", + "Track", "ValidatedImmutableObject", ] class Ref(ValidatedImmutableObject): - - """ - Model to represent URI references with a human friendly name and type + """Model to represent URI references with a human friendly name and type attached. This is intended for use a lightweight object "free" of metadata that can be passed around instead of using full blown models. @@ -42,7 +40,6 @@ class Ref(ValidatedImmutableObject): #: The object type, e.g. "artist", "album", "track", "playlist", #: "directory". Read-only. type = fields.Identifier() # TODO: consider locking this down. - # type = fields.Field(choices=(ALBUM, ARTIST, DIRECTORY, PLAYLIST, TRACK)) #: Constant used for comparison with the :attr:`type` field. ALBUM = "album" @@ -91,9 +88,7 @@ def track(cls, **kwargs): class Image(ValidatedImmutableObject): - - """ - :param string uri: URI of the image + """:param string uri: URI of the image :param int width: Optional width of image or :class:`None` :param int height: Optional height of image or :class:`None` """ @@ -109,9 +104,7 @@ class Image(ValidatedImmutableObject): class Artist(ValidatedImmutableObject): - - """ - :param uri: artist URI + """:param uri: artist URI :type uri: string :param name: artist name :type name: string @@ -135,9 +128,7 @@ class Artist(ValidatedImmutableObject): class Album(ValidatedImmutableObject): - - """ - :param uri: album URI + """:param uri: album URI :type uri: string :param name: album name :type name: string @@ -176,9 +167,7 @@ class Album(ValidatedImmutableObject): class Track(ValidatedImmutableObject): - - """ - :param uri: track URI + """:param uri: track URI :type uri: string :param name: track name :type name: string @@ -260,9 +249,7 @@ class Track(ValidatedImmutableObject): class TlTrack(ValidatedImmutableObject): - - """ - A tracklist track. Wraps a regular track and it's tracklist ID. + """A tracklist track. Wraps a regular track and it's tracklist ID. The use of :class:`TlTrack` allows the same track to appear multiple times in the tracklist. @@ -299,9 +286,7 @@ def __iter__(self): class Playlist(ValidatedImmutableObject): - - """ - :param uri: playlist URI + """:param uri: playlist URI :type uri: string :param name: playlist name :type name: string @@ -336,9 +321,7 @@ def length(self): class SearchResult(ValidatedImmutableObject): - - """ - :param uri: search result URI + """:param uri: search result URI :type uri: string :param tracks: matching tracks :type tracks: list of :class:`Track` elements diff --git a/mopidy/models/fields.py b/src/mopidy/models/fields.py similarity index 56% rename from mopidy/models/fields.py rename to src/mopidy/models/fields.py index 901aba6883..71e63b573e 100644 --- a/mopidy/models/fields.py +++ b/src/mopidy/models/fields.py @@ -1,10 +1,28 @@ +from __future__ import annotations + import sys +from collections.abc import Callable +from typing import ( + TYPE_CHECKING, + Any, + Generic, + TypeVar, + cast, + overload, +) +from mopidy.types import Uri -class Field: +if TYPE_CHECKING: + from collections.abc import Iterable - """ - Base field for use in +T = TypeVar("T") +V = TypeVar("V") +TField = TypeVar("TField", bound="Field") + + +class Field(Generic[T]): + """Base field for use in :class:`~mopidy.models.immutable.ValidatedImmutableObject`. These fields are responsible for type checking and other data sanitation in our models. @@ -19,8 +37,13 @@ class Field: :param choices: if set the field value must be one of these """ - def __init__(self, default=None, type=None, choices=None): - self._name = None # Set by ValidatedImmutableObjectMeta + def __init__( + self, + default: T | None = None, + type: type[T] | None = None, + choices: Iterable[T] | None = None, + ) -> None: + self._name: str | None = None # Set by ValidatedImmutableObjectMeta self._choices = choices self._default = default self._type = type @@ -28,8 +51,8 @@ def __init__(self, default=None, type=None, choices=None): if self._default is not None: self.validate(self._default) - def validate(self, value): - """Validate and possibly modify the field value before assignment""" + def validate(self, value: T) -> T: + """Validate and possibly modify the field value before assignment.""" if self._type and not isinstance(value, self._type): raise TypeError( f"Expected {self._name} to be a {self._type}, not {value!r}" @@ -40,34 +63,42 @@ def validate(self, value): ) return value - def __get__(self, instance, owner): - if not instance: + @overload + def __get__(self: TField, obj: None, objtype: None) -> TField: ... + + @overload + def __get__(self, obj: object, objtype: type[object]) -> T: ... + + def __get__( + self: TField, + obj: object | None, + objtype: type[object] | None, + ) -> T | TField: + if not obj: return self - return getattr(instance, "_" + self._name, self._default) + return cast(T, getattr(obj, f"_{self._name}", self._default)) - def __set__(self, instance, value): + def __set__(self, obj: object, value: T) -> None: if value is not None: value = self.validate(value) if value is None or value == self._default: - self.__delete__(instance) + self.__delete__(obj) else: - setattr(instance, "_" + self._name, value) - - def __delete__(self, instance): - if hasattr(instance, "_" + self._name): - delattr(instance, "_" + self._name) + setattr(obj, f"_{self._name}", value) + def __delete__(self, obj: object) -> None: + if hasattr(obj, f"_{self._name}"): + delattr(obj, f"_{self._name}") -class String(Field): - """ - Specialized :class:`Field` which is wired up for bytes and unicode. +class String(Field[str]): + """Specialized :class:`Field` which is wired up for bytes and unicode. :param default: default value for field """ - def __init__(self, default=None): + def __init__(self, default: str | None = None) -> None: # TODO: normalize to unicode? # TODO: only allow unicode? # TODO: disallow empty strings? @@ -75,8 +106,7 @@ def __init__(self, default=None): class Date(String): - """ - :class:`Field` for storing ISO 8601 dates as a string. + """:class:`Field` for storing ISO 8601 dates as a string. Supported formats are ``YYYY-MM-DD``, ``YYYY-MM`` and ``YYYY``, currently not validated. @@ -84,52 +114,54 @@ class Date(String): :param default: default value for field """ - pass # TODO: make this check for YYYY-MM-DD, YYYY-MM, YYYY using strptime. + # TODO: make this check for YYYY-MM-DD, YYYY-MM, YYYY using strptime. class Identifier(String): - """ - :class:`Field` for storing values such as GUIDs or other identifiers. + """:class:`Field` for storing values such as GUIDs or other identifiers. Values will be interned. :param default: default value for field """ - def validate(self, value): + def validate(self, value: str) -> str: value = super().validate(value) if isinstance(value, bytes): value = value.decode() return sys.intern(value) -class URI(Identifier): - """ - :class:`Field` for storing URIs +class URI(Field[Uri]): + """:class:`Field` for storing URIs. Values will be interned, currently not validated. :param default: default value for field """ - pass # TODO: validate URIs? + def validate(self, value: Uri) -> Uri: + value = super().validate(value) + if isinstance(value, bytes): + value = value.decode() + # TODO: validate URIs? + return Uri(sys.intern(value)) -class Integer(Field): - """ - :class:`Field` for storing integer numbers. +class Integer(Field[int]): + """:class:`Field` for storing integer numbers. :param default: default value for field :param min: field value must be larger or equal to this value when set :param max: field value must be smaller or equal to this value when set """ - def __init__(self, default=None, min=None, max=None): + def __init__(self, default=None, min=None, max=None) -> None: self._min = min self._max = max super().__init__(type=int, default=default) - def validate(self, value): + def validate(self, value: int) -> int: value = super().validate(value) if self._min is not None and value < self._min: raise ValueError( @@ -142,29 +174,33 @@ def validate(self, value): return value -class Boolean(Field): - """ - :class:`Field` for storing boolean values +class Boolean(Field[bool]): + """:class:`Field` for storing boolean values. :param default: default value for field """ - def __init__(self, default=None): + def __init__(self, default=None) -> None: super().__init__(type=bool, default=default) -class Collection(Field): - """ - :class:`Field` for storing collections of a given type. +class Collection(Field[tuple[V, ...] | frozenset[V]]): + """:class:`Field` for storing collections of a given type. :param type: all items stored in the collection must be of this type :param container: the type to store the items in """ - def __init__(self, type, container=tuple): + def __init__( + self, + type: type, + container: Callable[[], tuple | frozenset] = tuple, + ) -> None: super().__init__(type=type, default=container()) - def validate(self, value): + def validate(self, value: Iterable[Any]) -> tuple[V, ...] | frozenset[V]: + assert self._default is not None + assert self._type is not None if isinstance(value, str): raise TypeError( f"Expected {self._name} to be a collection of " @@ -176,4 +212,4 @@ def validate(self, value): f"Expected {self._name} to be a collection of " f"{self._type.__name__}, not {value!r}" ) - return self._default.__class__(value) or None + return self._default.__class__(value) diff --git a/mopidy/models/immutable.py b/src/mopidy/models/immutable.py similarity index 80% rename from mopidy/models/immutable.py rename to src/mopidy/models/immutable.py index fbf29ede68..f6ad57038a 100644 --- a/mopidy/models/immutable.py +++ b/src/mopidy/models/immutable.py @@ -1,16 +1,21 @@ +from __future__ import annotations + import copy import itertools import weakref +from collections.abc import Generator +from typing import Any, ClassVar, Generic, TypeVar from mopidy.models.fields import Field +T = TypeVar("T", bound="type") + # Registered models for automatic deserialization _models = {} class ImmutableObject: - """ - Superclass for immutable objects whose fields can only be modified via the + """Superclass for immutable objects whose fields can only be modified via the constructor. This version of this class has been retained to avoid breaking any clients @@ -26,7 +31,7 @@ class ImmutableObject: # slots as they will still get an instance dict. __slots__ = ["__weakref__"] - def __init__(self, *args, **kwargs): + def __init__(self, *_args, **kwargs): for key, value in kwargs.items(): if not self._is_valid_field(key): raise TypeError( @@ -55,13 +60,13 @@ def _set_field(self, name, value): else: self.__dict__[name] = value - def _items(self): - return self.__dict__.items() + def _items(self) -> Generator[tuple[str, Any], Any, None]: + yield from self.__dict__.items() def __repr__(self): kwarg_pairs = [] for key, value in sorted(self._items()): - if isinstance(value, (frozenset, tuple)): + if isinstance(value, frozenset | tuple): if not value: continue value = list(value) @@ -88,8 +93,7 @@ def __ne__(self, other): return not self.__eq__(other) def replace(self, **kwargs): - """ - Replace the fields in the model and return a new instance + """Replace the fields in the model and return a new instance. Examples:: @@ -105,9 +109,7 @@ def replace(self, **kwargs): other = copy.copy(self) for key, value in kwargs.items(): if not self._is_valid_field(key): - raise TypeError( - f"replace() got an unexpected keyword argument {key!r}" - ) + raise TypeError(f"replace() got an unexpected keyword argument {key!r}") other._set_field(key, value) return other @@ -115,7 +117,7 @@ def serialize(self): data = {} data["__model__"] = self.__class__.__name__ for key, value in self._items(): - if isinstance(value, (set, frozenset, list, tuple)): + if isinstance(value, set | frozenset | list | tuple): value = [ v.serialize() if isinstance(v, ImmutableObject) else v for v in value @@ -127,11 +129,17 @@ def serialize(self): return data -class _ValidatedImmutableObjectMeta(type): - +class _ValidatedImmutableObjectMeta(type, Generic[T]): """Helper that initializes fields, slots and memoizes instance creation.""" - def __new__(cls, name, bases, attrs): + _instances: dict[weakref.ReferenceType[_ValidatedImmutableObjectMeta[T]], T] = {} # noqa: RUF012 + + def __new__( + cls: type[_ValidatedImmutableObjectMeta], + name: str, + bases: tuple[type, ...], + attrs: dict[str, Any], + ) -> _ValidatedImmutableObjectMeta: fields = {} for base in bases: # Copy parent fields over to our state @@ -144,18 +152,20 @@ def __new__(cls, name, bases, attrs): attrs["_fields"] = fields attrs["_instances"] = weakref.WeakValueDictionary() - attrs["__slots__"] = list(attrs.get("__slots__", [])) + list( - fields.values() - ) + attrs["__slots__"] = list(attrs.get("__slots__", [])) + list(fields.values()) - clsc = super().__new__(cls, name, bases, attrs) + clsc: _ValidatedImmutableObjectMeta = super().__new__(cls, name, bases, attrs) if clsc.__name__ != "ValidatedImmutableObject": _models[clsc.__name__] = clsc return clsc - def __call__(cls, *args, **kwargs): # noqa: N805 + def __call__( + cls, + *args: Any, + **kwargs: Any, + ) -> T: instance = super().__call__(*args, **kwargs) return cls._instances.setdefault(weakref.ref(instance), instance) @@ -163,8 +173,7 @@ def __call__(cls, *args, **kwargs): # noqa: N805 class ValidatedImmutableObject( ImmutableObject, metaclass=_ValidatedImmutableObjectMeta ): - """ - Superclass for immutable objects whose fields can only be modified via the + """Superclass for immutable objects whose fields can only be modified via the constructor. Fields should be :class:`Field` instances to ensure type safety in our models. @@ -173,6 +182,8 @@ class ValidatedImmutableObject( give you the same instance twice. """ + _fields: ClassVar[dict[str, Any]] + _instances: ClassVar[weakref.WeakValueDictionary] __slots__ = ["_hash"] def __hash__(self): @@ -187,14 +198,13 @@ def _is_valid_field(self, name): def _set_field(self, name, value): object.__setattr__(self, name, value) - def _items(self): + def _items(self) -> Generator[tuple[str, Any], Any, None]: for field, key in self._fields.items(): if hasattr(self, key): yield field, getattr(self, key) def replace(self, **kwargs): - """ - Replace the fields in the model and return a new instance + """Replace the fields in the model and return a new instance. Examples:: diff --git a/mopidy/models/serialize.py b/src/mopidy/models/serialize.py similarity index 72% rename from mopidy/models/serialize.py rename to src/mopidy/models/serialize.py index 066f3ff6ea..96e8c50fd2 100644 --- a/mopidy/models/serialize.py +++ b/src/mopidy/models/serialize.py @@ -4,9 +4,7 @@ class ModelJSONEncoder(json.JSONEncoder): - - """ - Automatically serialize Mopidy models to JSON. + """Automatically serialize Mopidy models to JSON. Usage:: @@ -16,15 +14,14 @@ class ModelJSONEncoder(json.JSONEncoder): """ - def default(self, obj): - if isinstance(obj, immutable.ImmutableObject): - return obj.serialize() - return json.JSONEncoder.default(self, obj) + def default(self, o): + if isinstance(o, immutable.ImmutableObject): + return o.serialize() + return json.JSONEncoder.default(self, o) def model_json_decoder(dct): - """ - Automatically deserialize Mopidy models from JSON. + """Automatically deserialize Mopidy models from JSON. Usage:: diff --git a/mopidy/py.typed b/src/mopidy/py.typed similarity index 100% rename from mopidy/py.typed rename to src/mopidy/py.typed diff --git a/mopidy/softwaremixer/__init__.py b/src/mopidy/softwaremixer/__init__.py similarity index 66% rename from mopidy/softwaremixer/__init__.py rename to src/mopidy/softwaremixer/__init__.py index 6966b4e078..c5a5b25c8e 100644 --- a/mopidy/softwaremixer/__init__.py +++ b/src/mopidy/softwaremixer/__init__.py @@ -1,22 +1,19 @@ -import os +from pathlib import Path import mopidy from mopidy import config, ext class Extension(ext.Extension): - dist_name = "Mopidy-SoftwareMixer" ext_name = "softwaremixer" version = mopidy.__version__ def get_default_config(self): - conf_file = os.path.join(os.path.dirname(__file__), "ext.conf") - return config.read(conf_file) + return config.read(Path(__file__).parent / "ext.conf") def get_config_schema(self): - schema = super().get_config_schema() - return schema + return super().get_config_schema() def setup(self, registry): from .mixer import SoftwareMixer diff --git a/mopidy/softwaremixer/ext.conf b/src/mopidy/softwaremixer/ext.conf similarity index 100% rename from mopidy/softwaremixer/ext.conf rename to src/mopidy/softwaremixer/ext.conf diff --git a/mopidy/softwaremixer/mixer.py b/src/mopidy/softwaremixer/mixer.py similarity index 89% rename from mopidy/softwaremixer/mixer.py rename to src/mopidy/softwaremixer/mixer.py index 43f4055a01..980ecbb828 100644 --- a/mopidy/softwaremixer/mixer.py +++ b/src/mopidy/softwaremixer/mixer.py @@ -1,6 +1,7 @@ import logging import pykka +from pykka.typing import proxy_method from mopidy import mixer @@ -8,7 +9,6 @@ class SoftwareMixer(pykka.ThreadingActor, mixer.Mixer): - name = "software" def __init__(self, config): @@ -56,3 +56,8 @@ def set_mute(self, mute): return False self._audio_mixer.set_mute(mute) return True + + +class SoftwareMixerProxy(mixer.MixerProxy): + setup = proxy_method(SoftwareMixer.setup) + teardown = proxy_method(SoftwareMixer.teardown) diff --git a/mopidy/stream/__init__.py b/src/mopidy/stream/__init__.py similarity index 84% rename from mopidy/stream/__init__.py rename to src/mopidy/stream/__init__.py index aaeaad1869..657fb80b95 100644 --- a/mopidy/stream/__init__.py +++ b/src/mopidy/stream/__init__.py @@ -1,18 +1,16 @@ -import os +from pathlib import Path import mopidy from mopidy import config, ext class Extension(ext.Extension): - dist_name = "Mopidy-Stream" ext_name = "stream" version = mopidy.__version__ def get_default_config(self): - conf_file = os.path.join(os.path.dirname(__file__), "ext.conf") - return config.read(conf_file) + return config.read(Path(__file__).parent / "ext.conf") def get_config_schema(self): schema = super().get_config_schema() diff --git a/mopidy/stream/actor.py b/src/mopidy/stream/actor.py similarity index 76% rename from mopidy/stream/actor.py rename to src/mopidy/stream/actor.py index 8c993fbd18..c5627e140b 100644 --- a/mopidy/stream/actor.py +++ b/src/mopidy/stream/actor.py @@ -2,7 +2,7 @@ import logging import re import time -import urllib +import urllib.parse import pykka @@ -11,12 +11,13 @@ from mopidy.audio import scan, tags from mopidy.internal import http, playlists from mopidy.models import Track +from mopidy.types import Uri, UriScheme logger = logging.getLogger(__name__) class StreamBackend(pykka.ThreadingActor, backend.Backend): - def __init__(self, config, audio): + def __init__(self, config, audio) -> None: super().__init__() self._scanner = scan.Scanner( @@ -25,9 +26,7 @@ def __init__(self, config, audio): self._session = http.get_requests_session( proxy_config=config["proxy"], - user_agent=( - f"{stream.Extension.dist_name}/{stream.Extension.version}" - ), + user_agent=(f"{stream.Extension.dist_name}/{stream.Extension.version}"), ) blacklist = config["stream"]["metadata_blacklist"] @@ -41,21 +40,21 @@ def __init__(self, config, audio): self.playback = StreamPlaybackProvider(audio=audio, backend=self) self.playlists = None - self.uri_schemes = audio_lib.supported_uri_schemes( - config["stream"]["protocols"] - ) - - if "file" in self.uri_schemes and config["file"]["enabled"]: + uri_schemes = audio_lib.supported_uri_schemes(config["stream"]["protocols"]) + if UriScheme("file") in StreamBackend.uri_schemes and config["file"]["enabled"]: logger.warning( 'The stream/protocols config value includes the "file" ' 'protocol. "file" playback is now handled by Mopidy-File. ' "Please remove it from the stream/protocols config." ) - self.uri_schemes -= {"file"} + uri_schemes -= {UriScheme("file")} + StreamBackend.uri_schemes = sorted(uri_schemes) class StreamLibraryProvider(backend.LibraryProvider): - def lookup(self, uri): + backend: StreamBackend + + def lookup(self, uri: Uri) -> list[Track]: if urllib.parse.urlsplit(uri).scheme not in self.backend.uri_schemes: return [] @@ -82,7 +81,9 @@ def lookup(self, uri): class StreamPlaybackProvider(backend.PlaybackProvider): - def translate_uri(self, uri): + backend: StreamBackend + + def translate_uri(self, uri: Uri) -> Uri | None: if urllib.parse.urlsplit(uri).scheme not in self.backend.uri_schemes: return None @@ -99,15 +100,17 @@ def translate_uri(self, uri): return unwrapped_uri -# TODO: cleanup the return value of this. -def _unwrap_stream(uri, timeout, scanner, requests_session): - """ - Get a stream URI from a playlist URI, ``uri``. +def _unwrap_stream( # noqa: PLR0911 # TODO: cleanup the return value of this. + uri: Uri, + timeout: float, + scanner: scan.Scanner, + requests_session, +) -> tuple[Uri | None, scan._Result | None]: + """Get a stream URI from a playlist URI, ``uri``. Unwraps nested playlists until something that's not a playlist is found or the ``timeout`` is reached. """ - original_uri = uri seen_uris = set() deadline = time.time() + timeout @@ -115,13 +118,12 @@ def _unwrap_stream(uri, timeout, scanner, requests_session): while time.time() < deadline: if uri in seen_uris: logger.info( - "Unwrapping stream from URI (%s) failed: " - "playlist referenced itself", + "Unwrapping stream from URI (%s) failed: playlist referenced itself", uri, ) return None, None - else: - seen_uris.add(uri) + + seen_uris.add(uri) logger.debug("Unwrapping stream from URI: %s", uri) @@ -129,8 +131,7 @@ def _unwrap_stream(uri, timeout, scanner, requests_session): scan_timeout = deadline - time.time() if scan_timeout < 0: logger.info( - "Unwrapping stream from URI (%s) failed: " - "timed out in %sms", + "Unwrapping stream from URI (%s) failed: timed out in %sms", uri, timeout, ) @@ -147,9 +148,7 @@ def _unwrap_stream(uri, timeout, scanner, requests_session): and not scan_result.mime.startswith("application/") ) if scan_result.playable or has_interesting_mime: - logger.debug( - "Unwrapped potential %s stream: %s", scan_result.mime, uri - ) + logger.debug("Unwrapped potential %s stream: %s", scan_result.mime, uri) return uri, scan_result download_timeout = deadline - time.time() @@ -160,14 +159,11 @@ def _unwrap_stream(uri, timeout, scanner, requests_session): timeout, ) return None, None - content = http.download( - requests_session, uri, timeout=download_timeout / 1000 - ) + content = http.download(requests_session, uri, timeout=download_timeout / 1000) if content is None: logger.info( - "Unwrapping stream from URI (%s) failed: " - "error downloading URI %s", + "Unwrapping stream from URI (%s) failed: error downloading URI %s", original_uri, uri, ) @@ -184,4 +180,6 @@ def _unwrap_stream(uri, timeout, scanner, requests_session): # TODO Test streams and return first that seems to be playable new_uri = uris[0] logger.debug("Parsed playlist (%s) and found new URI: %s", uri, new_uri) - uri = urllib.parse.urljoin(uri, new_uri) + uri = Uri(urllib.parse.urljoin(uri, new_uri)) + + return None, None diff --git a/mopidy/stream/ext.conf b/src/mopidy/stream/ext.conf similarity index 100% rename from mopidy/stream/ext.conf rename to src/mopidy/stream/ext.conf diff --git a/src/mopidy/types.py b/src/mopidy/types.py new file mode 100644 index 0000000000..30271f95ff --- /dev/null +++ b/src/mopidy/types.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +from collections.abc import Iterable +from typing import TYPE_CHECKING, Literal, NewType, TypeVar + +if TYPE_CHECKING: + from typing import TypeAlias + +F = TypeVar("F") +QueryValue: TypeAlias = str | int +Query: TypeAlias = dict[F, Iterable[QueryValue]] + +# Types for distinct queries +DistinctField: TypeAlias = Literal[ + "uri", + "track_name", + "album", + "artist", + "albumartist", + "composer", + "performer", + "track_no", + "genre", + "date", + "comment", + "disc_no", + "musicbrainz_albumid", + "musicbrainz_artistid", + "musicbrainz_trackid", +] + +# Types for search queries +SearchField: TypeAlias = DistinctField | Literal["any"] +SearchQuery: TypeAlias = dict[SearchField, Iterable[QueryValue]] + +# Types for tracklist filtering +TracklistField: TypeAlias = Literal[ + "tlid", + "uri", + "name", + "genre", + "comment", + "musicbrainz_id", +] + +# Superset of all fields that can be used in a query +QueryField: TypeAlias = DistinctField | SearchField | TracklistField + +# URI types +Uri = NewType("Uri", str) +UriScheme = NewType("UriScheme", str) + +# Integer types +Percentage = NewType("Percentage", int) +DurationMs = NewType("DurationMs", int) diff --git a/mopidy/zeroconf.py b/src/mopidy/zeroconf.py similarity index 76% rename from mopidy/zeroconf.py rename to src/mopidy/zeroconf.py index 9b57c99a11..9051b9e960 100644 --- a/mopidy/zeroconf.py +++ b/src/mopidy/zeroconf.py @@ -4,7 +4,7 @@ logger = logging.getLogger(__name__) try: - import dbus + import dbus # pyright: ignore[reportMissingImports] except ImportError: dbus = None @@ -13,15 +13,12 @@ _AVAHI_PUBLISHFLAGS_NONE = 0 -def _is_loopback_address(host): - return ( - host.startswith("127.") - or host.startswith("::ffff:127.") - or host == "::1" - ) +def _is_loopback_address(host: str) -> bool: + return host.startswith(("127.", "::ffff:127.")) or host == "::1" -def _convert_text_list_to_dbus_format(text_list): +def _convert_text_list_to_dbus_format(text_list: list[str]): + assert dbus array = dbus.Array(signature="ay") for text in text_list: array.append([dbus.Byte(ord(c)) for c in text]) @@ -29,7 +26,6 @@ def _convert_text_list_to_dbus_format(text_list): class Zeroconf: - """Publish a network service with Zeroconf. Currently, this only works on Linux using Avahi via D-Bus. @@ -44,7 +40,15 @@ class Zeroconf: :type text: list of str """ - def __init__(self, name, stype, port, domain="", host="", text=None): + def __init__( # noqa: PLR0913 + self, + name: str, + stype: str, + port: int, + domain: str = "", + host: str = "", + text: list[str] | None = None, + ) -> None: self.stype = stype self.port = port self.domain = domain @@ -71,22 +75,19 @@ def __init__(self, name, stype, port, domain="", host="", text=None): except dbus.exceptions.DBusException as e: logger.debug("%s: Server failed: %s", self, e) - def __str__(self): + def __str__(self) -> str: return ( f"Zeroconf service {self.name!r} " f"({self.stype} at [{self.host}]:{self.port:d})" ) - def publish(self): + def publish(self) -> bool: # noqa: PLR0911 """Publish the service. Call when your service starts. """ - if _is_loopback_address(self.host): - logger.debug( - "%s: Publish on loopback interface is not supported.", self - ) + logger.debug("%s: Publish on loopback interface is not supported.", self) return False if not dbus: @@ -103,9 +104,7 @@ def publish(self): try: if not self.bus.name_has_owner("org.freedesktop.Avahi"): - logger.debug( - "%s: Avahi service not running; publish failed.", self - ) + logger.debug("%s: Avahi service not running; publish failed.", self) return False self.group = dbus.Interface( @@ -129,22 +128,24 @@ def publish(self): self.group.Commit() logger.debug("%s: Published", self) - return True except dbus.exceptions.DBusException as e: logger.debug("%s: Publish failed: %s", self, e) return False + else: + return True - def unpublish(self): + def unpublish(self) -> None: """Unpublish the service. Call when your service shuts down. """ + if not dbus or not self.group: + return - if self.group: - try: - self.group.Reset() - logger.debug("%s: Unpublished", self) - except dbus.exceptions.DBusException as e: - logger.debug("%s: Unpublish failed: %s", self, e) - finally: - self.group = None + try: + self.group.Reset() + logger.debug("%s: Unpublished", self) + except dbus.exceptions.DBusException as e: + logger.debug("%s: Unpublish failed: %s", self, e) + finally: + self.group = None diff --git a/tests/__init__.py b/tests/__init__.py index f2ae3f40cb..c34a58250b 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -14,7 +14,7 @@ def __eq__(self, rhs): try: return isinstance(rhs, self.klass) except TypeError: - return type(rhs) == type(self.klass) # noqa + return type(rhs) == type(self.klass) # noqa: E721 def __ne__(self, rhs): return not self.__eq__(rhs) diff --git a/tests/audio/test_actor.py b/tests/audio/test_actor.py index 27bc5c73fd..215e62a50d 100644 --- a/tests/audio/test_actor.py +++ b/tests/audio/test_actor.py @@ -1,9 +1,9 @@ import threading import unittest +from typing import ClassVar from unittest import mock import pykka - from mopidy import audio from mopidy.audio.constants import PlaybackState from mopidy.internal import path @@ -17,14 +17,14 @@ class BaseTest(unittest.TestCase): - uris = [ + uris: ClassVar[list[str]] = [ path.path_to_uri(path_to_data_dir("song1.wav")), path.path_to_uri(path_to_data_dir("song2.wav")), ] audio_class = audio.Audio - def setUp(self): # noqa: N802 + def setUp(self): config = { "audio": { "buffer_time": None, @@ -38,7 +38,7 @@ def setUp(self): # noqa: N802 self.song_uri = path.path_to_uri(path_to_data_dir("song1.wav")) self.audio = self.audio_class.start(config=config, mixer=None).proxy() - def tearDown(self): # noqa + def tearDown(self): pykka.ActorRegistry.stop_all() def possibly_trigger_fake_playback_error(self, uri): @@ -47,6 +47,9 @@ def possibly_trigger_fake_playback_error(self, uri): def possibly_trigger_fake_about_to_finish(self): pass + def possibly_trigger_fake_source_setup(self): + pass + class DummyMixin: audio_class = dummy_audio.DummyAudio @@ -59,6 +62,11 @@ def possibly_trigger_fake_about_to_finish(self): if callback: callback() + def possibly_trigger_fake_source_setup(self): + callback = self.audio.get_source_setup_callback().get() + if callback: + callback() + class AudioTest(BaseTest): def test_start_playback_existing_file(self): @@ -137,18 +145,18 @@ def clear_events(self): class AudioEventTest(BaseTest): - def setUp(self): # noqa: N802 + def setUp(self): super().setUp() self.audio.enable_sync_handler().get() self.listener = DummyAudioListener.start().proxy() - def tearDown(self): # noqa: N802 + def tearDown(self): super().tearDown() - def assertEvent(self, event, **kwargs): # noqa: N802 + def assert_event(self, event, **kwargs): assert (event, kwargs) in self.listener.get_events().get() - def assertNotEvent(self, event, **kwargs): # noqa: N802 + def assert_not_event(self, event, **kwargs): assert (event, kwargs) not in self.listener.get_events().get() # TODO: test without uri set, with bad uri and gapless... @@ -162,7 +170,7 @@ def test_state_change_stopped_to_playing_event(self): self.audio.start_playback() self.audio.wait_for_state_change().get() - self.assertEvent( + self.assert_event( "state_changed", old_state=PlaybackState.STOPPED, new_state=PlaybackState.PLAYING, @@ -175,7 +183,7 @@ def test_state_change_stopped_to_paused_event(self): self.audio.pause_playback() self.audio.wait_for_state_change().get() - self.assertEvent( + self.assert_event( "state_changed", old_state=PlaybackState.STOPPED, new_state=PlaybackState.PAUSED, @@ -192,7 +200,7 @@ def test_state_change_paused_to_playing_event(self): self.audio.start_playback() self.audio.wait_for_state_change().get() - self.assertEvent( + self.assert_event( "state_changed", old_state=PlaybackState.PAUSED, new_state=PlaybackState.PLAYING, @@ -209,7 +217,7 @@ def test_state_change_paused_to_stopped_event(self): self.audio.stop_playback() self.audio.wait_for_state_change().get() - self.assertEvent( + self.assert_event( "state_changed", old_state=PlaybackState.PAUSED, new_state=PlaybackState.STOPPED, @@ -226,7 +234,7 @@ def test_state_change_playing_to_paused_event(self): self.audio.pause_playback() self.audio.wait_for_state_change().get() - self.assertEvent( + self.assert_event( "state_changed", old_state=PlaybackState.PLAYING, new_state=PlaybackState.PAUSED, @@ -243,7 +251,7 @@ def test_state_change_playing_to_stopped_event(self): self.audio.stop_playback() self.audio.wait_for_state_change().get() - self.assertEvent( + self.assert_event( "state_changed", old_state=PlaybackState.PLAYING, new_state=PlaybackState.STOPPED, @@ -259,7 +267,7 @@ def test_stream_changed_event_on_playing(self): # Since we are going from stopped to playing, the state change is # enough to ensure the stream changed. self.audio.wait_for_state_change().get() - self.assertEvent("stream_changed", uri=self.uris[0]) + self.assert_event("stream_changed", uri=self.uris[0]) def test_stream_changed_event_on_multiple_changes(self): self.audio.prepare_change() @@ -268,14 +276,14 @@ def test_stream_changed_event_on_multiple_changes(self): self.audio.start_playback() self.audio.wait_for_state_change().get() - self.assertEvent("stream_changed", uri=self.uris[0]) + self.assert_event("stream_changed", uri=self.uris[0]) self.audio.prepare_change() self.audio.set_uri(self.uris[1]) self.audio.pause_playback() self.audio.wait_for_state_change().get() - self.assertEvent("stream_changed", uri=self.uris[1]) + self.assert_event("stream_changed", uri=self.uris[1]) def test_stream_changed_event_on_playing_to_paused(self): self.audio.prepare_change() @@ -284,13 +292,13 @@ def test_stream_changed_event_on_playing_to_paused(self): self.audio.start_playback() self.audio.wait_for_state_change().get() - self.assertEvent("stream_changed", uri=self.uris[0]) + self.assert_event("stream_changed", uri=self.uris[0]) self.listener.clear_events() self.audio.pause_playback() self.audio.wait_for_state_change().get() - self.assertNotEvent("stream_changed", uri=self.uris[0]) + self.assert_not_event("stream_changed", uri=self.uris[0]) def test_stream_changed_event_on_paused_to_stopped(self): self.audio.prepare_change() @@ -302,7 +310,7 @@ def test_stream_changed_event_on_paused_to_stopped(self): self.audio.stop_playback() self.audio.wait_for_state_change().get() - self.assertEvent("stream_changed", uri=None) + self.assert_event("stream_changed", uri=None) def test_position_changed_on_pause(self): self.audio.prepare_change() @@ -311,7 +319,7 @@ def test_position_changed_on_pause(self): self.audio.wait_for_state_change() self.audio.wait_for_state_change().get() - self.assertEvent("position_changed", position=0) + self.assert_event("position_changed", position=0) def test_stream_changed_event_on_paused_to_playing(self): self.audio.prepare_change() @@ -320,13 +328,13 @@ def test_stream_changed_event_on_paused_to_playing(self): self.audio.pause_playback() self.audio.wait_for_state_change().get() - self.assertEvent("stream_changed", uri=self.uris[0]) + self.assert_event("stream_changed", uri=self.uris[0]) self.listener.clear_events() self.audio.start_playback() self.audio.wait_for_state_change().get() - self.assertNotEvent("stream_changed", uri=self.uris[0]) + self.assert_not_event("stream_changed", uri=self.uris[0]) def test_position_changed_on_play(self): self.audio.prepare_change() @@ -335,7 +343,7 @@ def test_position_changed_on_play(self): self.audio.wait_for_state_change() self.audio.wait_for_state_change().get() - self.assertEvent("position_changed", position=0) + self.assert_event("position_changed", position=0) def test_position_changed_on_seek_while_stopped(self): self.audio.prepare_change() @@ -343,7 +351,7 @@ def test_position_changed_on_seek_while_stopped(self): self.audio.set_position(2000) self.audio.wait_for_state_change().get() - self.assertNotEvent("position_changed", position=0) + self.assert_not_event("position_changed", position=0) def test_position_changed_on_seek_after_play(self): self.audio.prepare_change() @@ -355,7 +363,7 @@ def test_position_changed_on_seek_after_play(self): self.audio.set_position(2000) self.audio.wait_for_state_change().get() - self.assertEvent("position_changed", position=2000) + self.assert_event("position_changed", position=2000) def test_position_changed_on_seek_after_pause(self): self.audio.prepare_change() @@ -367,7 +375,7 @@ def test_position_changed_on_seek_after_pause(self): self.audio.set_position(2000) self.audio.wait_for_state_change().get() - self.assertEvent("position_changed", position=2000) + self.assert_event("position_changed", position=2000) def test_tags_changed_on_playback(self): self.audio.prepare_change() @@ -375,7 +383,7 @@ def test_tags_changed_on_playback(self): self.audio.start_playback() self.audio.wait_for_state_change().get() - self.assertEvent("tags_changed", tags=mock.ANY) + self.assert_event("tags_changed", tags=mock.ANY) # Unlike the other events, having the state changed done is not # enough to ensure our event is called. So we setup a threading @@ -393,7 +401,7 @@ def test_stream_changed_event_on_paused(self): if not event.wait(timeout=1.0): self.fail("Stream changed not reached within deadline") - self.assertEvent("stream_changed", uri=self.uris[0]) + self.assert_event("stream_changed", uri=self.uris[0]) def test_reached_end_of_stream_event(self): event = self.listener.wait("reached_end_of_stream").get() @@ -432,8 +440,8 @@ def callback(): self.fail("EOS not received") # Check that both uris got played - self.assertEvent("stream_changed", uri=self.uris[0]) - self.assertEvent("stream_changed", uri=self.uris[1]) + self.assert_event("stream_changed", uri=self.uris[0]) + self.assert_event("stream_changed", uri=self.uris[1]) # Check that events counts check out. keys = [k for k, v in self.listener.get_events().get()] @@ -444,6 +452,19 @@ def callback(): # TODO: test tag states within gaples + def test_source_setup(self): + mock_callback = mock.Mock() + + self.audio.prepare_change() + self.audio.set_source_setup_callback(mock_callback).get() + self.audio.set_uri(self.uris[0]) + self.audio.start_playback() + + self.possibly_trigger_fake_source_setup() + self.audio.wait_for_state_change().get() + + mock_callback.assert_called_once() + # TODO: this does not belong in this testcase def test_current_tags_are_blank_to_begin_with(self): assert not self.audio.get_current_tags().get() @@ -488,7 +509,6 @@ def callback(): class AudioDummyEventTest(DummyMixin, AudioEventTest): - """Exercise the AudioEventTest against our mock audio classes.""" @@ -514,18 +534,18 @@ def test_invalid_output_raises_error(self): class AudioStateTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.audio = audio.Audio(config=None, mixer=None) def test_state_starts_as_stopped(self): - assert audio.PlaybackState.STOPPED == self.audio.state + assert self.audio.state == audio.PlaybackState.STOPPED def test_state_does_not_change_when_in_gst_ready_state(self): self.audio._handler.on_playbin_state_changed( Gst.State.NULL, Gst.State.READY, Gst.State.VOID_PENDING ) - assert audio.PlaybackState.STOPPED == self.audio.state + assert self.audio.state == audio.PlaybackState.STOPPED def test_state_changes_from_stopped_to_playing_on_play(self): self.audio._handler.on_playbin_state_changed( @@ -538,7 +558,7 @@ def test_state_changes_from_stopped_to_playing_on_play(self): Gst.State.PAUSED, Gst.State.PLAYING, Gst.State.VOID_PENDING ) - assert audio.PlaybackState.PLAYING == self.audio.state + assert self.audio.state == audio.PlaybackState.PLAYING def test_state_changes_from_playing_to_paused_on_pause(self): self.audio.state = audio.PlaybackState.PLAYING @@ -547,7 +567,7 @@ def test_state_changes_from_playing_to_paused_on_pause(self): Gst.State.PLAYING, Gst.State.PAUSED, Gst.State.VOID_PENDING ) - assert audio.PlaybackState.PAUSED == self.audio.state + assert self.audio.state == audio.PlaybackState.PAUSED def test_state_changes_from_playing_to_stopped_on_stop(self): self.audio.state = audio.PlaybackState.PLAYING @@ -562,11 +582,11 @@ def test_state_changes_from_playing_to_stopped_on_stop(self): # self.audio._handler.on_playbin_state_changed( # Gst.State.READY, Gst.State.NULL, Gst.State.VOID_PENDING) - assert audio.PlaybackState.STOPPED == self.audio.state + assert self.audio.state == audio.PlaybackState.STOPPED class AudioBufferingTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.audio = audio.Audio(config=None, mixer=None) self.audio._playbin = mock.Mock(spec=["set_state"]) @@ -621,14 +641,12 @@ def test_change_to_stopped_while_buffering(self): class AudioLiveTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): config = {"proxy": {}} self.audio = audio.Audio(config=config, mixer=None) self.audio._playbin = mock.Mock(spec=["set_property"]) self.source = mock.MagicMock() - # Avoid appsrc.configure() - self.source.get_factory.get_name = mock.Mock(return_value="not_appsrc") self.source.props = mock.Mock(spec=["is_live"]) def test_not_live_mode(self): @@ -645,20 +663,9 @@ def test_live_mode(self): self.source.set_live.assert_called_with(True) - def test_not_live_mode_after_set_appsrc(self): - self.audio._live_stream = True - - # Embrace appsrc.configure() - self.source.get_factory.get_name.return_value = "appsrc" - - self.audio.set_appsrc("") - self.audio._on_source_setup("dummy", self.source) - - self.source.set_live.assert_not_called() - class DownloadBufferingTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.audio = audio.Audio(config=None, mixer=None) self.audio._playbin = mock.Mock(spec=["set_property"]) @@ -671,7 +678,7 @@ def test_download_flag_is_passed_to_playbin_if_download_buffering_is_enabled( playbin.set_property.assert_has_calls([mock.call("flags", 0x02 | 0x80)]) - def test_download_flag_is_not_passed_to_playbin_if_download_buffering_is_not_enabled( # noqa: B950 + def test_download_flag_is_not_passed_to_playbin_if_download_buffering_is_disabled( self, ): playbin = self.audio._playbin @@ -680,11 +687,25 @@ def test_download_flag_is_not_passed_to_playbin_if_download_buffering_is_not_ena playbin.set_property.assert_has_calls([mock.call("flags", 0x02)]) - def test_download_flag_is_not_passed_to_playbin_if_set_appsrc( # noqa: B950 - self, - ): - playbin = self.audio._playbin - self.audio.set_appsrc("") +class SourceSetupCallbackTest(unittest.TestCase): + def setUp(self): + config = {"proxy": {}} + self.audio = audio.Audio(config=config, mixer=None) + self.audio._playbin = mock.Mock(spec=["set_property"]) - playbin.set_property.assert_has_calls([mock.call("flags", 0x02)]) + self.source = mock.MagicMock() + + def test_source_setup_callback(self): + mock_callback = mock.MagicMock() + self.audio.set_source_setup_callback(mock_callback) + + self.audio._on_source_setup("dummy", self.source) + + mock_callback.assert_called_once_with(self.source) + + self.audio.set_source_setup_callback(None) + + self.audio._on_source_setup("dummy", self.source) + + mock_callback.assert_called_once() diff --git a/tests/audio/test_listener.py b/tests/audio/test_listener.py index afc7a25ae4..0c42c95441 100644 --- a/tests/audio/test_listener.py +++ b/tests/audio/test_listener.py @@ -5,7 +5,7 @@ class AudioListenerTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.listener = audio.AudioListener() def test_on_event_forwards_to_specific_handler(self): diff --git a/tests/audio/test_scan.py b/tests/audio/test_scan.py index 86856f16ec..538bf01d2a 100644 --- a/tests/audio/test_scan.py +++ b/tests/audio/test_scan.py @@ -8,7 +8,7 @@ class ScannerTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.errors = {} self.result = {} @@ -42,7 +42,7 @@ def check_if_missing_plugin(self): def test_tags_is_set(self): self.scan(self.find("scanner/simple")) - assert list(self.result.values())[0].tags + assert next(iter(self.result.values())).tags def test_errors_is_not_set(self): self.scan(self.find("scanner/simple")) @@ -58,8 +58,8 @@ def test_duration_is_set(self): ogg = path_to_data_dir("scanner/simple/song1.ogg") mp3 = path_to_data_dir("scanner/simple/song1.mp3") - assert self.result[mp3].duration == 4680 - assert self.result[ogg].duration == 4680 + assert self.result[mp3].duration == 4608 + assert self.result[ogg].duration == 4704 def test_artist_is_set(self): self.scan(self.find("scanner/simple")) @@ -91,7 +91,7 @@ def test_nonexistant_dir_does_not_fail(self): def test_other_media_is_ignored(self): self.scan(self.find("scanner/image")) - assert not list(self.result.values())[0].playable + assert not next(iter(self.result.values())).playable def test_log_file_that_gst_thinks_is_mpeg_1_is_ignored(self): self.scan([path_to_data_dir("scanner/example.log")]) diff --git a/tests/audio/test_tags.py b/tests/audio/test_tags.py index e56c46aa4e..071fa495ae 100644 --- a/tests/audio/test_tags.py +++ b/tests/audio/test_tags.py @@ -26,7 +26,7 @@ def make_taglist(self, tag, values): taglist = Gst.TagList.new_empty() for value in values: - if isinstance(value, (GLib.Date, Gst.DateTime)): + if isinstance(value, GLib.Date | Gst.DateTime): taglist.add_value(Gst.TagMergeMode.APPEND, tag, value) continue @@ -96,7 +96,7 @@ def test_integer_tag(self): # TODO: current test is trying to test everything at once with a complete tags # set, instead we might want to try with a minimal one making testing easier. class TagsToTrackTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.tags = { "album": ["album"], "track-number": [1], @@ -120,9 +120,7 @@ def setUp(self): # noqa: N802 "bitrate": [1000], } - artist = Artist( - name="artist", musicbrainz_id="artistid", sortname="sortname" - ) + artist = Artist(name="artist", musicbrainz_id="artistid", sortname="sortname") composer = Artist(name="composer") performer = Artist(name="performer") albumartist = Artist(name="albumartist", musicbrainz_id="albumartistid") @@ -209,9 +207,7 @@ def test_multiple_track_genre(self): def test_missing_track_date(self): del self.tags["date"] self.check( - self.track.replace( - album=self.track.album.replace(date=None), date=None - ) + self.track.replace(album=self.track.album.replace(date=None), date=None) ) def test_multiple_track_date(self): @@ -242,7 +238,7 @@ def test_multiple_track_artist_name(self): def test_missing_track_artist_musicbrainz_id(self): del self.tags["musicbrainz-artistid"] - artist = list(self.track.artists)[0].replace(musicbrainz_id=None) + artist = next(iter(self.track.artists)).replace(musicbrainz_id=None) self.check(self.track.replace(artists=[artist])) def test_multiple_track_artist_musicbrainz_id(self): @@ -315,7 +311,7 @@ def test_multiple_album_artist_name(self): def test_missing_album_artist_musicbrainz_id(self): del self.tags["musicbrainz-albumartistid"] - albumartist = list(self.track.album.artists)[0] + albumartist = next(iter(self.track.album.artists)) albumartist = albumartist.replace(musicbrainz_id=None) album = self.track.album.replace(artists=[albumartist]) self.check(self.track.replace(album=album)) diff --git a/tests/audio/test_utils.py b/tests/audio/test_utils.py deleted file mode 100644 index c32824f7ce..0000000000 --- a/tests/audio/test_utils.py +++ /dev/null @@ -1,20 +0,0 @@ -import pytest - -from mopidy.audio import utils -from mopidy.internal.gi import Gst - - -class TestCreateBuffer: - def test_creates_buffer(self): - buf = utils.create_buffer(b"123", timestamp=0, duration=1000000) - - assert isinstance(buf, Gst.Buffer) - assert buf.pts == 0 - assert buf.duration == 1000000 - assert buf.get_size() == len(b"123") - - def test_fails_if_data_has_zero_length(self): - with pytest.raises(ValueError) as excinfo: - utils.create_buffer(b"", timestamp=0, duration=1000000) - - assert "Cannot create buffer without data" in str(excinfo.value) diff --git a/tests/backend/test_backend.py b/tests/backend/test_backend.py index 80569e50f9..f521ecc8a6 100644 --- a/tests/backend/test_backend.py +++ b/tests/backend/test_backend.py @@ -1,5 +1,6 @@ import unittest +import pytest from mopidy import backend from tests import dummy_backend @@ -13,13 +14,13 @@ def test_default_get_images_impl(self): class PlaylistsTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.provider = backend.PlaylistsProvider(backend=None) def test_as_list_default_impl(self): - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): self.provider.as_list() def test_get_items_default_impl(self): - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): self.provider.get_items("some uri") diff --git a/tests/backend/test_listener.py b/tests/backend/test_listener.py index b81354fa4d..374c1fc4da 100644 --- a/tests/backend/test_listener.py +++ b/tests/backend/test_listener.py @@ -5,7 +5,7 @@ class BackendListenerTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.listener = backend.BackendListener() def test_on_event_forwards_to_specific_handler(self): diff --git a/tests/config/test_config.py b/tests/config/test_config.py index 311500ac79..c55982e44c 100644 --- a/tests/config/test_config.py +++ b/tests/config/test_config.py @@ -97,9 +97,9 @@ def test_load_file_with_error(self): class ValidateTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.schema = config.ConfigSchema("foo") - self.schema["bar"] = config.ConfigValue() + self.schema["bar"] = config.String() def test_empty_config_no_schemas(self): conf, errors = config._validate({}, []) @@ -163,7 +163,7 @@ def test_config_single_schema_config_error(self): class PreProcessorTest(unittest.TestCase): - maxDiff = None # Show entire diff. # noqa: N815 + maxDiff = None # Show entire diff. def test_empty_config(self): result = config._preprocess("") @@ -192,9 +192,7 @@ def test_initial_comment_inline_handling(self): def test_inline_semicolon_comment(self): result = config._preprocess("[section]\nfoo = bar ; baz") - assert ( - result == "[__COMMENTS__]\n[section]\nfoo = bar\n__INLINE0__ = baz" - ) + assert result == "[__COMMENTS__]\n[section]\nfoo = bar\n__INLINE0__ = baz" def test_no_inline_hash_comment(self): result = config._preprocess("[section]\nfoo = bar # baz") @@ -218,29 +216,25 @@ def test_conversion(self): class PostProcessorTest(unittest.TestCase): - maxDiff = None # Show entire diff. # noqa: N815 + maxDiff = None # Show entire diff. def test_empty_config(self): result = config._postprocess("[__COMMENTS__]") assert result == "" def test_plain_section(self): - result = config._postprocess( - "[__COMMENTS__]\n" "[section]\n" "foo = bar" - ) + result = config._postprocess("[__COMMENTS__]\n[section]\nfoo = bar") assert result == "[section]\nfoo = bar" def test_initial_comments(self): - result = config._postprocess( - "[__COMMENTS__]\n" "__SEMICOLON0__ = foobar" - ) + result = config._postprocess("[__COMMENTS__]\n__SEMICOLON0__ = foobar") assert result == "; foobar" - result = config._postprocess("[__COMMENTS__]\n" "__HASH0__ = foobar") + result = config._postprocess("[__COMMENTS__]\n__HASH0__ = foobar") assert result == "# foobar" result = config._postprocess( - "[__COMMENTS__]\n" "__SEMICOLON0__ = foo\n" "__HASH1__ = bar" + "[__COMMENTS__]\n__SEMICOLON0__ = foo\n__HASH1__ = bar" ) assert result == "; foo\n# bar" @@ -255,7 +249,7 @@ def test_initial_comment_inline_handling(self): def test_inline_semicolon_comment(self): result = config._postprocess( - "[__COMMENTS__]\n" "[section]\n" "foo = bar\n" "__INLINE0__ = baz" + "[__COMMENTS__]\n[section]\nfoo = bar\n__INLINE0__ = baz" ) assert result == "[section]\nfoo = bar ; baz" @@ -264,9 +258,7 @@ def test_no_inline_hash_comment(self): assert result == "[__COMMENTS__]\n[section]\nfoo = bar # baz" def test_section_extra_text(self): - result = config._postprocess( - "[__COMMENTS__]\n" "[section]\n" "__SECTION0__ = foobar" - ) + result = config._postprocess("[__COMMENTS__]\n[section]\n__SECTION0__ = foobar") assert result == "[section] foobar" def test_section_extra_text_inline_semicolon(self): diff --git a/tests/config/test_schemas.py b/tests/config/test_schemas.py index f339d06d48..30a87ed4dd 100644 --- a/tests/config/test_schemas.py +++ b/tests/config/test_schemas.py @@ -8,7 +8,7 @@ class ConfigSchemaTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.schema = schemas.ConfigSchema("test") self.schema["foo"] = mock.Mock() self.schema["bar"] = mock.Mock() @@ -88,8 +88,8 @@ def test_conversion(self): schema = schemas.MapConfigSchema("test", types.LogLevel()) result, errors = schema.deserialize({"foo.bar": "DEBUG", "baz": "INFO"}) - assert logging.DEBUG == result["foo.bar"] - assert logging.INFO == result["baz"] + assert result["foo.bar"] == logging.DEBUG + assert result["baz"] == logging.INFO class DidYouMeanTest(unittest.TestCase): diff --git a/tests/config/test_types.py b/tests/config/test_types.py index d763bc113c..2c4edc1797 100644 --- a/tests/config/test_types.py +++ b/tests/config/test_types.py @@ -1,15 +1,17 @@ +import codecs import logging +import re import socket +from typing import ClassVar from unittest import mock import pytest - from mopidy.config import types from mopidy.internal import log @pytest.mark.parametrize( - "value, expected", + ("value", "expected"), [ # bytes are coded from UTF-8 and string-escaped: (b"abc", "abc"), @@ -28,7 +30,7 @@ def test_decode(value, expected): @pytest.mark.parametrize( - "value, expected", + ("value", "expected"), [ # unicode strings are string-escaped and encoded as UTF-8: ("abc", "abc"), @@ -52,37 +54,6 @@ def test_encode_decode_invalid_utf8(): assert result == data.decode(errors="surrogateescape") -class TestConfigValue: - def test_deserialize_decodes_bytes(self): - cv = types.ConfigValue() - - result = cv.deserialize(b"abc") - - assert isinstance(result, str) - - def test_serialize_conversion_to_string(self): - cv = types.ConfigValue() - - result = cv.serialize(object()) - - assert isinstance(result, str) - - def test_serialize_none(self): - cv = types.ConfigValue() - - result = cv.serialize(None) - - assert isinstance(result, str) - assert result == "" - - def test_serialize_supports_display(self): - cv = types.ConfigValue() - - result = cv.serialize(object(), display=True) - - assert isinstance(result, str) - - class TestDeprecated: def test_deserialize_returns_deprecated_value(self): cv = types.Deprecated() @@ -107,6 +78,7 @@ def test_deserialize_conversion_success(self): assert result == "foo" assert isinstance(result, str) + assert not isinstance(result, types._TransformedValue) def test_deserialize_decodes_utf8(self): cv = types.String() @@ -196,6 +168,46 @@ def test_deserialize_enforces_choices_optional(self): with pytest.raises(ValueError): cv.deserialize(b"foobar") + @pytest.mark.parametrize( + ("original", "transformed"), + ( + ("abc", "abc"), + ("ABC", "abc"), + ("aBc", "abc"), + ("123", "123"), + ("abc123def456", "abc123def456"), + ("ABC123def456GHI789", "abc123def456ghi789"), + ), + ) + def test_deserialize_utilises_transformer(self, original: str, transformed: str): + cv = types.String(transformer=lambda value: value.lower()) + + result = cv.deserialize(original) + assert isinstance(result, str) + assert isinstance(result, types._TransformedValue) + assert result == transformed + assert result.original == original + + @pytest.mark.parametrize( + ("original", "transformed"), + ( + ("abc", "abc"), + ("ABC", "abc"), + ("aBc", "abc"), + ("123", "123"), + ("abc123def456", "abc123def456"), + ("ABC123def456GHI789", "abc123def456ghi789"), + ), + ) + def test_serialize_transformed_value(self, original: str, transformed: str): + cv = types.String() + transformed_value = types._TransformedValue(original, transformed) + + result = cv.serialize(transformed_value) + assert isinstance(result, str) + assert not isinstance(result, types._TransformedValue) + assert result == original + class TestSecret: def test_deserialize_decodes_utf8(self): @@ -204,6 +216,7 @@ def test_deserialize_decodes_utf8(self): result = cv.deserialize("æøå".encode()) assert isinstance(result, str) + assert not isinstance(result, types._TransformedValue) assert result == "æøå" def test_deserialize_enforces_required(self): @@ -218,6 +231,18 @@ def test_deserialize_respects_optional(self): assert cv.deserialize(b"") is None assert cv.deserialize(b" ") is None + def test_deserialize_utilises_transformer(self): + cv = types.Secret( + transformer=lambda value: codecs.decode(value, encoding="rot13") + ) + + result = cv.deserialize("zbcvql") + + assert isinstance(result, str) + assert isinstance(result, types._TransformedValue) + assert result == "mopidy" + assert result.original == "zbcvql" + def test_serialize_none(self): cv = types.Secret() @@ -226,6 +251,16 @@ def test_serialize_none(self): assert isinstance(result, str) assert result == "" + def test_serialize_transformed_value(self): + cv = types.Secret() + transformed_value = types._TransformedValue("zbcvql", "mopidy") + + result = cv.serialize(transformed_value) + + assert isinstance(result, str) + assert not isinstance(result, types._TransformedValue) + assert result == "zbcvql" + def test_serialize_for_display_masks_value(self): cv = types.Secret() @@ -242,6 +277,16 @@ def test_serialize_none_for_display(self): assert isinstance(result, str) assert result == "" + def test_serialize_transformed_value_for_display_masks_value(self): + cv = types.Secret() + transformed_value = types._TransformedValue("zbcvql", "mopidy") + + result = cv.serialize(transformed_value, display=True) + + assert isinstance(result, str) + assert not isinstance(result, types._TransformedValue) + assert result == "********" + class TestInteger: def test_deserialize_conversion_success(self): @@ -296,6 +341,55 @@ def test_deserialize_enforces_maximum(self): cv.deserialize("15") +class TestFloat: + def test_deserialize_conversion_success(self): + cv = types.Float() + + assert cv.deserialize("123") == 123.0 + assert cv.deserialize("0") == 0.0 + assert cv.deserialize("-10") == -10.0 + assert cv.deserialize("3.14") == 3.14 + assert cv.deserialize("123.45") == 123.45 + assert cv.deserialize("-456.78") == -456.78 + + def test_deserialize_conversion_failure(self): + cv = types.Float() + + errmsg = re.escape("could not convert string to float") + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("asd") + + def test_deserialize_enforces_required(self): + cv = types.Float() + + errmsg = "must be set" + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("") + + def test_deserialize_respects_optional(self): + cv = types.Float(optional=True) + + assert cv.deserialize("") is None + + def test_deserialize_enforces_minimum(self): + cv = types.Float(minimum=10) + + assert cv.deserialize("10.1") == 10.1 + + errmsg = re.escape("must be larger than") + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("9.9") + + def test_deserialize_enforces_maximum(self): + cv = types.Float(maximum=10) + + assert cv.deserialize("9.9") == 9.9 + + errmsg = re.escape("must be smaller than") + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("10.1") + + class TestBoolean: def test_deserialize_conversion_success(self): cv = types.Boolean() @@ -367,9 +461,528 @@ def test_serialize_invalid_values(self): cv.serialize("1") +class TestPair: + def test_deserialize_conversion_success(self): + cv = types.Pair() + + result = cv.deserialize("foo|bar") + assert result == ("foo", "bar") + + result = cv.deserialize(" foo|bar") + assert result == ("foo", "bar") + + result = cv.deserialize("foo|bar ") + assert result == ("foo", "bar") + + result = cv.deserialize(" fo o | bar ") + assert result == ("fo o", "bar") + + result = cv.deserialize("foo|bar|baz") + assert result == ("foo", "bar|baz") + + def test_deserialize_decodes_utf8(self): + cv = types.Pair() + + result = cv.deserialize("æ|å".encode()) + assert result == ("æ", "å") + + result = cv.deserialize("æ | ø\n".encode()) + assert result == ("æ", "ø") + + result = cv.deserialize("æ ø| å".encode()) + assert result == ("æ ø", "å") + + result = cv.deserialize(" æ | øå \n".encode()) + assert result == ("æ", "øå") + + def test_deserialize_enforces_required(self): + cv = types.Pair() + + errmsg = re.escape("must be set") + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("") + + def test_deserialize_respects_optional(self): + cv = types.Pair(optional=True) + + assert cv.deserialize("") is None + assert cv.deserialize(" ") is None + + def test_deserialize_enforces_required_separator(self): + cv = types.Pair() + + errmsg = "^" + re.escape("Config value must include '|' separator: abc") + "$" + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("abc") + + def test_deserialize_respects_optional_separator(self): + cv = types.Pair(optional_pair=True) + + result = cv.deserialize("abc") + assert result == ("abc", "abc") + + result = cv.deserialize("abc|def") + assert result == ("abc", "def") + + @pytest.mark.parametrize("sep", ("!", "@", "#", "$", "%", "^", "&", "*", "/", "\\")) + def test_deserialize_respects_custom_separator(self, sep: str): + cv = types.Pair(separator=sep) + + result = cv.deserialize(f"abc{sep}def") + assert result == ("abc", "def") + + result = cv.deserialize(f"abc|def{sep}ghi|jkl") + assert result == ("abc|def", "ghi|jkl") + + result = cv.deserialize(f"abc{sep}def{sep}ghi") + assert result == ("abc", f"def{sep}ghi") + + result = cv.deserialize(f"ab|cd{sep}ef|gh{sep}ij|kl") + assert result == ("ab|cd", f"ef|gh{sep}ij|kl") + + result = cv.deserialize(f"|abcd|{sep}efgh|") + assert result == ("|abcd|", "efgh|") + + errmsg = ( + "^" + + re.escape(f"Config value must include {sep!r} separator: abc|def") + + "$" + ) + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("abc|def") + + @pytest.mark.parametrize("sep", ("!", "@", "#", "$", "%", "^", "&", "*", "/", "\\")) + def test_deserialize_respects_optional_custom_separator(self, sep: str): + cv = types.Pair(optional_pair=True, separator=sep) + + result = cv.deserialize(f"abc{sep}def") + assert result == ("abc", "def") + + result = cv.deserialize("abcdef") + assert result == ("abcdef", "abcdef") + + result = cv.deserialize("abc|def") + assert result == ("abc|def", "abc|def") + + result = cv.deserialize(f"|abc{sep}def|") + assert result == ("|abc", "def|") + + @pytest.mark.parametrize("optional", (True, False)) + @pytest.mark.parametrize("optional_pair", (True, False)) + def test_deserialize_enforces_required_pair_values( + self, optional: bool, optional_pair: bool + ): + cv = types.Pair(optional=optional, optional_pair=optional_pair) + + errmsg = re.escape("must be set") + + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("abc|") + + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("|def") + + @pytest.mark.parametrize("optional", (True, False)) + @pytest.mark.parametrize("optional_pair", (True, False)) + @pytest.mark.parametrize("sep", ("!", "@", "#", "$", "%", "^", "&", "*", "/", "\\")) + def test_deserialize_enforces_required_pair_values_with_custom_separator( + self, optional: bool, optional_pair: bool, sep: str + ): + cv = types.Pair(optional=optional, optional_pair=optional_pair, separator=sep) + + errmsg = re.escape("must be set") + + with pytest.raises(ValueError, match=errmsg): + cv.deserialize(f"abc{sep}") + + with pytest.raises(ValueError, match=errmsg): + cv.deserialize(f"{sep}def") + + with pytest.raises(ValueError, match=errmsg): + cv.deserialize(f"abc|def{sep}") + + with pytest.raises(ValueError, match=errmsg): + cv.deserialize(f"{sep}ghi|jkl") + + def test_deserialize_with_custom_subtypes(self): + cv = types.Pair(subtypes=(types.String(), types.Integer())) + result = cv.deserialize("abc|10") + assert result == ("abc", 10) + + cv = types.Pair(subtypes=(types.Float(), types.Boolean())) + result = cv.deserialize("3.14|true") + assert result == (3.14, True) + + cv = types.Pair(subtypes=(types.Path(), types.String())) + result = cv.deserialize("/dev/null | empty") + assert result == ("/dev/null", "empty") + + with mock.patch("socket.getaddrinfo") as getaddrinfo_mock: + cv = types.Pair(subtypes=(types.Hostname(), types.Port())) + result = cv.deserialize("localhost|6680") + assert result == ("localhost", 6680) + getaddrinfo_mock.assert_called_once_with("localhost", None) + + def test_deserialize_with_custom_subtypes_enforces_required(self): + cv = types.Pair(subtypes=(types.Integer(), types.Integer())) + + errmsg = re.escape("must be set") + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("") + + def test_deserialize_with_custom_subtypes_respects_optional(self): + cv = types.Pair(optional=True, subtypes=(types.Float(), types.Float())) + + assert cv.deserialize("") is None + + def test_deserialize_with_custom_subtypes_enforces_required_separator(self): + errmsg = "^" + re.escape("Config value must include '|' separator: ") + + cv = types.Pair(subtypes=(types.String(), types.Secret())) + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("abc") + + cv = types.Pair(subtypes=(types.String(), types.Integer())) + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("123") + + def test_deserialize_with_custom_subtypes_respects_optional_separator(self): + cv = types.Pair(optional_pair=True, subtypes=(types.Integer(), types.Integer())) + result = cv.deserialize("42") + assert result == (42, 42) + + cv = types.Pair(optional_pair=True, subtypes=(types.Path(), types.String())) + result = cv.deserialize("/dev/null") + assert result == ("/dev/null", "/dev/null") + + cv = types.Pair(optional_pair=True, subtypes=(types.Port(), types.Port())) + result = cv.deserialize("443") + assert result == (443, 443) + + def test_deserialize_with_custom_subtypes_optional_separator_mixed_types( + self, + ): + cv = types.Pair(optional_pair=True, subtypes=(types.String(), types.Integer())) + + errmsg = re.escape("invalid literal for int() with base 10") + + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("abc") + + def test_deserialize_with_optional_custom_subtypes(self): + cv = types.Pair(subtypes=(types.String(), types.String(optional=True))) + result = cv.deserialize("abc|") + assert result == ("abc", None) + + cv = types.Pair(subtypes=(types.String(optional=True), types.String())) + result = cv.deserialize("|def") + assert result == (None, "def") + + cv = types.Pair( + subtypes=(types.String(optional=True), types.String(optional=True)) + ) + result = cv.deserialize("|") + assert result == (None, None) + + def test_serialize(self): + cv = types.Pair() + result = cv.serialize(("abc", "def")) + assert result == "abc|def" + result = cv.serialize(("abc", None)) + assert result == "abc|" + result = cv.serialize((None, "abc")) + assert result == "|abc" + + cv = types.Pair(subtypes=(types.String(), types.Integer())) + result = cv.serialize(("abc", 42)) + assert result == "abc|42" + result = cv.serialize(("abc", None)) + assert result == "abc|" + result = cv.serialize((None, 42)) + assert result == "|42" + + cv = types.Pair(subtypes=(types.String(), types.Path())) + result = cv.serialize(("null", "/dev/null")) + assert result == "null|/dev/null" + result = cv.serialize(("tmp", types._ExpandedPath("/tmp", "/tmp"))) + assert result == "tmp|/tmp" + result = cv.serialize(("null", None)) + assert result == "null|" + result = cv.serialize((None, types._ExpandedPath("/dev/null", "/dev/null"))) + assert result == "|/dev/null" + + @pytest.mark.parametrize("sep", ("!", "@", "#", "$", "%", "^", "&", "*", "/", "\\")) + def test_serialize_with_custom_separator(self, sep: str): + cv = types.Pair(separator=sep) + result = cv.serialize(("abc", "def")) + assert result == f"abc{sep}def" + result = cv.serialize(("abc", None)) + assert result == f"abc{sep}" + result = cv.serialize((None, "abc")) + assert result == f"{sep}abc" + + cv = types.Pair(separator=sep, subtypes=(types.String(), types.Integer())) + result = cv.serialize(("abc", 42)) + assert result == f"abc{sep}42" + result = cv.serialize(("abc", None)) + assert result == f"abc{sep}" + result = cv.serialize((None, 42)) + assert result == f"{sep}42" + + cv = types.Pair(separator=sep, subtypes=(types.String(), types.Path())) + result = cv.serialize(("null", "/dev/null")) + assert result == f"null{sep}/dev/null" + result = cv.serialize(("tmp", types._ExpandedPath("/tmp", "/tmp"))) + assert result == f"tmp{sep}/tmp" + result = cv.serialize(("null", None)) + assert result == f"null{sep}" + result = cv.serialize((None, types._ExpandedPath("/dev/null", "/dev/null"))) + assert result == f"{sep}/dev/null" + + def test_serialize_returns_single_value_with_optional_pair(self): + cv = types.Pair(optional_pair=True) + + result = cv.serialize(("abc", "abc")) + assert result == "abc" + + result = cv.serialize(("abc", "def")) + assert result == "abc|def" + + result = cv.serialize(("abc", "abc"), display=True) + assert result == "abc|abc" + + result = cv.serialize(("abc", "def"), display=True) + assert result == "abc|def" + + def test_deserialize_nested_pair_success(self): + cv = types.Pair(subtypes=(types.Integer(), types.Pair())) + result = cv.deserialize("50|def|ghi") + assert result == (50, ("def", "ghi")) + + cv = types.Pair( + separator="#", + subtypes=( + types.String(), + types.Pair(subtypes=(types.Integer(), types.Integer())), + ), + ) + result = cv.deserialize("xyz#4|-5") + assert result == ("xyz", (4, -5)) + + cv = types.Pair( + subtypes=( + types.Pair(separator="*", subtypes=(types.Float(), types.Float())), + types.String(), + ), + ) + result = cv.deserialize("42*2.5|abc") + assert result == ((42, 2.5), "abc") + + cv = types.Pair( + subtypes=( + types.Pair(separator="#"), + types.Pair(), + ), + ) + result = cv.deserialize("abc#def|ghi|jkl") + assert result == (("abc", "def"), ("ghi", "jkl")) + + def test_deserialize_nested_pair_fail(self): + cv = types.Pair( + subtypes=( + types.Pair(), + types.String(), + ), + ) + errmsg = "^" + re.escape("Config value must include '|' separator: abc") + "$" + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("abc|def|ghi") + + cv = types.Pair( + optional_pair=True, + subtypes=( + types.Pair(), + types.String(), + ), + ) + errmsg = "^" + re.escape("Config value must include '|' separator: abc") + "$" + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("abc|def|ghi") + + cv = types.Pair( + optional_pair=True, + subtypes=( + types.String(), + types.Pair(), + ), + ) + errmsg = "^" + re.escape("Config value must include '|' separator: def") + "$" + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("abc|def") + + cv = types.Pair( + subtypes=( + types.Pair(), + types.Pair(separator="#"), + ), + ) + errmsg = "^" + re.escape("Config value must include '|' separator: abc") + "$" + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("abc|def|ghi#jkl") + + def test_deserialize_nested_pair_optional(self): + cv = types.Pair( + subtypes=( + types.Pair(optional=True), + types.Pair(), + ) + ) + result = cv.deserialize("|abc|def") + assert result == (None, ("abc", "def")) + + cv = types.Pair( + subtypes=( + types.Pair(), + types.Pair(optional=True), + ), + ) + errmsg = "^" + re.escape("Config value must include '|' separator: abc") + "$" + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("abc|def|") + + cv = types.Pair( + subtypes=( + types.Pair(separator="#"), + types.Pair(optional=True), + ), + ) + result = cv.deserialize("abc#def|") + assert result == (("abc", "def"), None) + + cv = types.Pair( + separator="#", + subtypes=( + types.Pair(), + types.Pair(optional=True), + ), + ) + result = cv.deserialize("mno|xyz#") + assert result == (("mno", "xyz"), None) + + cv = types.Pair( + subtypes=( + types.Pair(optional=True), + types.Pair(optional=True), + ), + ) + result = cv.deserialize("|") + assert result == (None, None) + + def test_deserialize_nested_pair_optional_pair(self): + cv = types.Pair( + subtypes=( + types.Pair(optional_pair=True), + types.String(), + ), + ) + result = cv.deserialize("abc|def|ghi") + assert result == (("abc", "abc"), "def|ghi") + + cv = types.Pair( + subtypes=( + types.String(), + types.Pair(optional_pair=True), + ) + ) + result = cv.deserialize("abc|def") + assert result == ("abc", ("def", "def")) + + cv = types.Pair( + subtypes=( + types.Pair(optional_pair=True), + types.Pair(), + ), + ) + result = cv.deserialize("abc|def|ghi") + assert result == (("abc", "abc"), ("def", "ghi")) + + cv = types.Pair( + subtypes=( + types.Pair(), + types.Pair(optional_pair=True), + ), + ) + errmsg = "^" + re.escape("Config value must include '|' separator: abc") + "$" + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("abc|def|ghi") + + cv = types.Pair( + subtypes=( + types.Pair(separator="#"), + types.Pair(optional_pair=True), + ), + ) + result = cv.deserialize("abc#def|ghi") + assert result == (("abc", "def"), ("ghi", "ghi")) + + cv = types.Pair( + subtypes=( + types.Pair(optional_pair=True), + types.Pair(optional_pair=True), + ), + ) + result = cv.deserialize("abc|def") + assert result == (("abc", "abc"), ("def", "def")) + + errmsg = re.escape("must be set") + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("|") + + cv = types.Pair( + optional_pair=True, + subtypes=( + types.Pair(optional_pair=True), + types.Pair(optional_pair=True), + ), + ) + result = cv.deserialize("abc") + assert result == (("abc", "abc"), ("abc", "abc")) + + def test_serialize_nested_pair(self): + cv = types.Pair(subtypes=(types.String(), types.Pair())) + result = cv.serialize(("abc", ("def", "ghi"))) + assert result == "abc|def|ghi" + + cv = types.Pair( + subtypes=( + types.Pair(separator="#"), + types.String(), + ), + ) + result = cv.serialize((("abc", "def"), "ghi")) + assert result == "abc#def|ghi" + + cv = types.Pair( + separator="#", + subtypes=(types.Pair(), types.Pair()), + ) + result = cv.serialize((("abc", "def"), ("ghi", "jkl"))) + assert result == "abc|def#ghi|jkl" + + cv = types.Pair( + optional_pair=True, + subtypes=( + types.Pair(optional_pair=True), + types.Pair(optional_pair=True), + ), + ) + result = cv.serialize((("abc", "abc"), ("abc", "abc"))) + assert result == "abc" + result = cv.serialize((("abc", "abc"), ("abc", "abc")), display=True) + assert result == "abc|abc|abc|abc" + + class TestList: - # TODO: add test_deserialize_ignores_blank - # TODO: add test_serialize_ignores_blank # TODO: add test_deserialize_handles_escapes def test_deserialize_conversion_success(self): @@ -381,12 +994,30 @@ def test_deserialize_conversion_success(self): result = cv.deserialize(b" foo,bar\nbar\nbaz") assert result == ("foo,bar", "bar", "baz") + def test_deserialize_conversion_success_unique(self): + cv = types.List(unique=True) + + result = cv.deserialize("foo, bar, baz ") + assert result == {"foo", "bar", "baz"} + + result = cv.deserialize("foo,bar,foo,baz,foo") + assert result == {"foo", "bar", "baz"} + + result = cv.deserialize(" foo,bar\nbar\nbaz") + assert result == {"foo,bar", "bar", "baz"} + def test_deserialize_creates_tuples(self): cv = types.List(optional=True) assert isinstance(cv.deserialize(b"foo,bar,baz"), tuple) assert isinstance(cv.deserialize(b""), tuple) + def test_deserialize_creates_frozensets(self): + cv = types.List(optional=True, unique=True) + + assert isinstance(cv.deserialize("foo,bar,baz"), frozenset) + assert isinstance(cv.deserialize(""), frozenset) + def test_deserialize_decodes_utf8(self): cv = types.List() @@ -416,7 +1047,23 @@ def test_deserialize_respects_optional(self): assert cv.deserialize(b"") == () - def test_serialize(self): + @pytest.mark.parametrize( + "value", + ( + "foo,,bar,,baz", + "foo, ,bar, , baz", + "foo, ,bar, , baz, ", + "foo\n\nbar\n\nbaz\n", + "foo \n bar \n \n baz", + ), + ) + def test_deserialize_ignores_blanks(self, value): + cv = types.List() + + result = cv.deserialize(value) + assert result == ("foo", "bar", "baz") + + def test_serialize_tuples(self): cv = types.List() result = cv.serialize(("foo", "bar", "baz")) @@ -424,6 +1071,16 @@ def test_serialize(self): assert isinstance(result, str) assert result == "\n foo\n bar\n baz" + def test_serialize_sets(self): + cv = types.List(unique=True) + + result = cv.serialize({"foo", "bar", "baz"}) + + assert isinstance(result, str) + assert "\n foo" in result + assert "\n bar" in result + assert "\n baz" in result + def test_serialize_none(self): cv = types.List() @@ -432,6 +1089,97 @@ def test_serialize_none(self): assert isinstance(result, str) assert result == "" + @pytest.mark.parametrize( + "value", + ( + ("foo", "", "bar", "baz"), + ("foo", "bar", "", "", "baz"), + ), + ) + def test_serialize_ignores_blanks(self, value): + cv = types.List() + + result = cv.serialize(value) + + assert result == "\n foo\n bar\n baz" + + def test_serialize_ignores_blanks_sets(self): + cv = types.List(unique=True) + + result = cv.serialize({"foo", "", "bar", "baz"}) + assert "\n foo" in result + assert "\n bar" in result + assert "\n baz" in result + assert "\n \n" not in result + assert not result.endswith("\n ") + + def test_deserialize_with_custom_subtype(self): + cv = types.List(subtype=types.Integer()) + expected = (1, 2, 3) + assert cv.deserialize("1, 2, 3") == expected + assert cv.deserialize("1\n2\n3") == expected + assert cv.deserialize("\n 1\n 2\n 3") == expected + + cv = types.List(subtype=types.Pair()) + expected = (("a", "x"), ("b", "y"), ("c", "z")) + assert cv.deserialize("a|x,b|y,c|z") == expected + assert cv.deserialize("a|x\nb|y\nc|z") == expected + assert cv.deserialize("\n a|x\n b|y\n c|z") == expected + + cv = types.List( + subtype=types.Pair(subtypes=(types.Integer(), types.Integer())), + ) + expected = ((7, 1), (8, 2), (9, 3)) + assert cv.deserialize("7|1,8|2,9|3") == expected + assert cv.deserialize("7|1\n8|2\n9|3") == expected + assert cv.deserialize("\n 7|1\n 8|2\n 9|3") == expected + + with mock.patch("socket.getaddrinfo") as getaddrinfo_mock: + cv = types.List( + subtype=types.Pair(subtypes=(types.Hostname(), types.Port())), + ) + expected = (("localhost", 8080), ("example.com", 443)) + assert cv.deserialize("localhost|8080,example.com|443") == expected + assert cv.deserialize("localhost|8080\nexample.com|443") == expected + call_localhost = mock.call("localhost", None) + call_examplecom = mock.call("example.com", None) + assert getaddrinfo_mock.mock_calls == [ + call_localhost, + call_examplecom, + call_localhost, + call_examplecom, + ] + + def test_deserialize_with_custom_subtype_enforces_required(self): + cv = types.List(subtype=types.Float()) + + errmsg = re.escape("must be set") + with pytest.raises(ValueError, match=errmsg): + cv.deserialize("") + + def test_deserialize_with_custom_subtype_respects_optional(self): + cv = types.List(optional=True, subtype=types.Float()) + + assert cv.deserialize("") == () + + def test_serialize_with_custom_subtype(self): + cv = types.List(subtype=types.Integer()) + result = cv.serialize((1, 2, 3)) + assert result == "\n 1\n 2\n 3" + + cv = types.List(subtype=types.Pair()) + result = cv.serialize((("a", "x"), ("b", "y"), ("c", "z"))) + assert result == "\n a|x\n b|y\n c|z" + + cv = types.List( + subtype=types.Pair( + separator="#", + subtypes=(types.Integer(), types.Integer()), + ), + ) + result = cv.serialize(((7, 1), (8, 2), (9, 3))) + assert result == "\n 7#1\n 8#2\n 9#3" + class TestLogColor: def test_deserialize(self): @@ -461,7 +1209,7 @@ def test_serialize_ignores_unknown_color(self): class TestLogLevel: - levels = { + levels: ClassVar[dict[str, int]] = { "critical": logging.CRITICAL, "error": logging.ERROR, "warning": logging.WARNING, @@ -622,3 +1370,8 @@ def test_serialize_supports_unicode_string(self): cv = types.Path() assert cv.serialize("æøå") == "æøå" + + def test_serialize_none(self): + cv = types.Path() + + assert cv.serialize(None) == "" diff --git a/tests/config/test_validator.py b/tests/config/test_validator.py index d87854d2a3..7032c97949 100644 --- a/tests/config/test_validator.py +++ b/tests/config/test_validator.py @@ -1,9 +1,8 @@ -import unittest - +import pytest from mopidy.config import validators -class ValidateChoiceTest(unittest.TestCase): +class ValidateChoiceTest: def test_no_choices_passes(self): validators.validate_choice("foo", None) @@ -12,17 +11,18 @@ def test_valid_value_passes(self): validators.validate_choice(1, [1, 2, 3]) def test_empty_choices_fails(self): - self.assertRaises(ValueError, validators.validate_choice, "foo", []) + with pytest.raises(ValueError): + validators.validate_choice("foo", []) def test_invalid_value_fails(self): words = ["foo", "bar", "baz"] - self.assertRaises( - ValueError, validators.validate_choice, "foobar", words - ) - self.assertRaises(ValueError, validators.validate_choice, 5, [1, 2, 3]) + with pytest.raises(ValueError): + validators.validate_choice("foobar", words) + with pytest.raises(ValueError): + validators.validate_choice(5, [1, 2, 3]) -class ValidateMinimumTest(unittest.TestCase): +class ValidateMinimumTest: def test_no_minimum_passes(self): validators.validate_minimum(10, None) @@ -30,13 +30,15 @@ def test_valid_value_passes(self): validators.validate_minimum(10, 5) def test_to_small_value_fails(self): - self.assertRaises(ValueError, validators.validate_minimum, 10, 20) + with pytest.raises(ValueError): + validators.validate_minimum(10, 20) def test_to_small_value_fails_with_zero_as_minimum(self): - self.assertRaises(ValueError, validators.validate_minimum, -1, 0) + with pytest.raises(ValueError): + validators.validate_minimum(-1, 0) -class ValidateMaximumTest(unittest.TestCase): +class ValidateMaximumTest: def test_no_maximum_passes(self): validators.validate_maximum(5, None) @@ -44,13 +46,15 @@ def test_valid_value_passes(self): validators.validate_maximum(5, 10) def test_to_large_value_fails(self): - self.assertRaises(ValueError, validators.validate_maximum, 10, 5) + with pytest.raises(ValueError): + validators.validate_maximum(10, 5) def test_to_large_value_fails_with_zero_as_maximum(self): - self.assertRaises(ValueError, validators.validate_maximum, 5, 0) + with pytest.raises(ValueError): + validators.validate_maximum(5, 0) -class ValidateRequiredTest(unittest.TestCase): +class ValidateRequiredTest: def test_passes_when_false(self): validators.validate_required("foo", False) validators.validate_required("", False) @@ -63,5 +67,7 @@ def test_passes_when_required_and_set(self): validators.validate_required([1], True) def test_blocks_when_required_and_emtpy(self): - self.assertRaises(ValueError, validators.validate_required, "", True) - self.assertRaises(ValueError, validators.validate_required, [], True) + with pytest.raises(ValueError): + validators.validate_required("", True) + with pytest.raises(ValueError): + validators.validate_required([], True) diff --git a/tests/core/test_actor.py b/tests/core/test_actor.py index d27fb8f942..6c9552d6a1 100644 --- a/tests/core/test_actor.py +++ b/tests/core/test_actor.py @@ -4,12 +4,12 @@ import unittest from unittest import mock -import pykka - import mopidy +import pykka +import pytest from mopidy.audio import PlaybackState from mopidy.core import Core, CoreListener -from mopidy.internal import models, storage, versioning +from mopidy.internal import models, storage from mopidy.models import Track from tests import dummy_mixer @@ -52,7 +52,7 @@ def make_backend_mock( class CoreActorTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.backend1 = make_backend_mock( "B1", uri_schemes=["dummy1"], @@ -70,9 +70,13 @@ def setUp(self): # noqa: N802 has_playlists=True, ) - self.core = Core(mixer=None, backends=[self.backend1, self.backend2]) + self.core = Core( + config={}, + mixer=None, + backends=[self.backend1, self.backend2], + ) - def tearDown(self): # noqa: N802 + def tearDown(self): pykka.ActorRegistry.stop_all() def test_uri_schemes_has_uris_from_all_backends(self): @@ -102,7 +106,9 @@ def test_exclude_backend_from_sublists_on_error_when_first(self): ) core = Core( - mixer=None, backends=[backend3, self.backend1, self.backend2] + config={}, + mixer=None, + backends=[backend3, self.backend1, self.backend2], ) assert core.backends == [self.backend1, self.backend2] @@ -122,7 +128,9 @@ def test_exclude_backend_from_sublists_on_error_when_not_first(self): ) core = Core( - mixer=None, backends=[self.backend1, backend3, self.backend2] + config={}, + mixer=None, + backends=[self.backend1, backend3, self.backend2], ) assert core.backends == [self.backend1, self.backend2] @@ -134,17 +142,18 @@ def test_exclude_backend_from_sublists_on_error_when_not_first(self): def test_backends_with_colliding_uri_schemes_fails(self): self.backend2.uri_schemes.get.return_value = ["dummy1", "dummy2"] - self.assertRaisesRegex( + with pytest.raises( AssertionError, - "Cannot add URI scheme 'dummy1' for B2, " - "it is already handled by B1", - Core, - mixer=None, - backends=[self.backend1, self.backend2], - ) + match="Cannot add URI scheme 'dummy1' for B2, it is already handled by B1", + ): + Core( + config={}, + mixer=None, + backends=[self.backend1, self.backend2], + ) def test_version(self): - assert self.core.get_version() == versioning.get_version() + assert self.core.get_version() == mopidy.__version__ @mock.patch("mopidy.core.playback.listener.CoreListener", spec=CoreListener) def test_state_changed(self, listener_mock): @@ -174,9 +183,13 @@ def setUp(self): } self.mixer = dummy_mixer.create_proxy() - self.core = Core(config=config, mixer=self.mixer, backends=[]) + self.core = Core( + config=config, + mixer=self.mixer, + backends=[], + ) - def tearDown(self): # noqa: N802 + def tearDown(self): pykka.ActorRegistry.stop_all() shutil.rmtree(self.temp_dir) @@ -240,9 +253,7 @@ def test_load_state_with_data(self): ), ] ), - playback=models.PlaybackState( - tlid=12, state="paused", time_position=432 - ), + playback=models.PlaybackState(tlid=12, state="paused", time_position=432), mixer=models.MixerState(mute=True, volume=12), ) storage.dump(self.state_file, data) diff --git a/tests/core/test_events.py b/tests/core/test_events.py index 828a196347..308f0cc799 100644 --- a/tests/core/test_events.py +++ b/tests/core/test_events.py @@ -2,7 +2,6 @@ from unittest import mock import pykka - from mopidy import core from mopidy.internal import deprecation from mopidy.models import Track @@ -12,7 +11,7 @@ @mock.patch.object(core.CoreListener, "send") class BackendEventsTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): config = {"core": {"max_tracklist_length": 10000}} self.backend = dummy_backend.create_proxy() @@ -24,7 +23,7 @@ def setUp(self): # noqa: N802 with deprecation.ignore(): self.core = core.Core.start(config, backends=[self.backend]).proxy() - def tearDown(self): # noqa: N802 + def tearDown(self): pykka.ActorRegistry.stop_all() def test_forwards_backend_playlists_loaded_event_to_frontends(self, send): diff --git a/tests/core/test_history.py b/tests/core/test_history.py index 4a09dd2328..eaa4ea5006 100644 --- a/tests/core/test_history.py +++ b/tests/core/test_history.py @@ -1,12 +1,13 @@ import unittest +import pytest from mopidy.core import HistoryController from mopidy.internal.models import HistoryState, HistoryTrack from mopidy.models import Artist, Ref, Track class PlaybackHistoryTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.tracks = [ Track( uri="dummy1:a", @@ -30,7 +31,7 @@ def test_add_track(self): assert self.history.get_length() == 3 def test_non_tracks_are_rejected(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): self.history._add_track(object()) assert self.history.get_length() == 0 @@ -54,7 +55,7 @@ def test_track_artist_no_name(self): class CoreHistorySaveLoadStateTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.tracks = [ Track(uri="dummy1:a", name="foober"), Track(uri="dummy2:a", name="foo"), @@ -105,7 +106,7 @@ def test_load(self): assert hist[3] == (56, self.refs[1]) def test_load_invalid_type(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): self.history._load_state(11, None) def test_load_none(self): diff --git a/tests/core/test_library.py b/tests/core/test_library.py index 674979c96a..88a5ee7795 100644 --- a/tests/core/test_library.py +++ b/tests/core/test_library.py @@ -1,13 +1,14 @@ import unittest from unittest import mock +import pytest from mopidy import backend, core from mopidy.internal import validation from mopidy.models import Image, Ref, SearchResult, Track class BaseCoreLibraryTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): dummy1_root = Ref.directory(uri="dummy1:directory", name="dummy1") self.backend1 = mock.Mock() self.backend1.uri_schemes.get.return_value = ["dummy1"] @@ -36,7 +37,9 @@ def setUp(self): # noqa: N802 self.backend3.has_library_browse.return_value.get.return_value = False self.core = core.Core( - mixer=None, backends=[self.backend1, self.backend2, self.backend3] + config={}, + mixer=None, + backends=[self.backend1, self.backend2, self.backend3], ) @@ -339,11 +342,11 @@ def test_checks_field_is_valid(self, check_choice_mock): ) def test_any_field_raises_valueerror(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): self.core.library.get_distinct("any") def test_unknown_tag_in_query_raises_valueerror(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): self.core.library.get_distinct("album", {"track": ["a"]}) def test_track_name_field_maps_to_track_for_backwards_compatibility(self): @@ -402,23 +405,23 @@ def test_wrong_result_types_removed_and_logged(self, logger_mock): class LegacyFindExactToSearchLibraryTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.backend = mock.Mock() self.backend.actor_ref.actor_class.__name__ = "DummyBackend" self.backend.uri_schemes.get.return_value = ["dummy"] self.backend.library = mock.Mock(spec=backend.LibraryProvider) - self.core = core.Core(mixer=None, backends=[self.backend]) + self.core = core.Core(config={}, mixer=None, backends=[self.backend]) def test_core_search_call_backend_search_with_exact(self): self.core.library.search(query={"any": ["a"]}) self.backend.library.search.assert_called_once_with( - query=dict(any=["a"]), uris=None, exact=False + query={"any": ["a"]}, uris=None, exact=False ) def test_core_search_with_exact_call_backend_search_with_exact(self): self.core.library.search(query={"any": ["a"]}, exact=True) self.backend.library.search.assert_called_once_with( - query=dict(any=["a"]), uris=None, exact=True + query={"any": ["a"]}, uris=None, exact=True ) def test_core_search_with_handles_legacy_backend(self): @@ -428,7 +431,7 @@ def test_core_search_with_handles_legacy_backend(self): class MockBackendCoreLibraryBase(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): dummy_root = Ref.directory(uri="dummy:directory", name="dummy") self.library = mock.Mock(spec=backend.LibraryProvider) @@ -439,7 +442,7 @@ def setUp(self): # noqa: N802 self.backend.uri_schemes.get.return_value = ["dummy"] self.backend.library = self.library - self.core = core.Core(mixer=None, backends=[self.backend]) + self.core = core.Core(config={}, mixer=None, backends=[self.backend]) @mock.patch("mopidy.core.library.logger") @@ -584,7 +587,7 @@ def test_backend_raises_lookuperror(self, logger): # TODO: is this behavior desired? Do we need to continue handling # LookupError case specially. self.library.search.return_value.get.side_effect = LookupError - with self.assertRaises(LookupError): + with pytest.raises(LookupError): self.core.library.search(query={"any": ["foo"]}) def test_backend_returns_none(self, logger): diff --git a/tests/core/test_listener.py b/tests/core/test_listener.py index 330f1edabb..d41194288a 100644 --- a/tests/core/test_listener.py +++ b/tests/core/test_listener.py @@ -6,15 +6,13 @@ class CoreListenerTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.listener = CoreListener() def test_on_event_forwards_to_specific_handler(self): self.listener.track_playback_paused = mock.Mock() - self.listener.on_event( - "track_playback_paused", track=TlTrack(), position=0 - ) + self.listener.on_event("track_playback_paused", track=TlTrack(), position=0) self.listener.track_playback_paused.assert_called_with( track=TlTrack(), position=0 diff --git a/tests/core/test_mixer.py b/tests/core/test_mixer.py index 601a0d6b77..4b5d4d91d7 100644 --- a/tests/core/test_mixer.py +++ b/tests/core/test_mixer.py @@ -2,7 +2,7 @@ from unittest import mock import pykka - +import pytest from mopidy import core, mixer from mopidy.internal.models import MixerState @@ -10,9 +10,13 @@ class CoreMixerTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.mixer = mock.Mock(spec=mixer.Mixer) - self.core = core.Core(mixer=self.mixer, backends=[]) + self.core = core.Core( + config={}, + mixer=self.mixer, + backends=[], + ) def test_get_volume(self): self.mixer.get_volume.return_value.get.return_value = 30 @@ -40,8 +44,12 @@ def test_set_mute(self): class CoreNoneMixerTest(unittest.TestCase): - def setUp(self): # noqa: N802 - self.core = core.Core(mixer=None, backends=[]) + def setUp(self): + self.core = core.Core( + config={}, + mixer=None, + backends=[], + ) def test_get_volume_return_none_because_it_is_unknown(self): assert self.core.mixer.get_volume() is None @@ -58,11 +66,15 @@ def test_set_mute_return_false_because_it_failed(self): @mock.patch.object(mixer.MixerListener, "send") class CoreMixerListenerTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.mixer = dummy_mixer.create_proxy() - self.core = core.Core(mixer=self.mixer, backends=[]) + self.core = core.Core( + config={}, + mixer=self.mixer, + backends=[], + ) - def tearDown(self): # noqa: N802 + def tearDown(self): pykka.ActorRegistry.stop_all() def test_forwards_mixer_volume_changed_event_to_frontends(self, send): @@ -79,8 +91,12 @@ def test_forwards_mixer_mute_changed_event_to_frontends(self, send): @mock.patch.object(mixer.MixerListener, "send") class CoreNoneMixerListenerTest(unittest.TestCase): - def setUp(self): # noqa: N802 - self.core = core.Core(mixer=None, backends=[]) + def setUp(self): + self.core = core.Core( + config={}, + mixer=None, + backends=[], + ) def test_forwards_mixer_volume_changed_event_to_frontends(self, send): assert self.core.mixer.set_volume(volume=60) is False @@ -92,10 +108,14 @@ def test_forwards_mixer_mute_changed_event_to_frontends(self, send): class MockBackendCoreMixerBase(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.mixer = mock.Mock() self.mixer.actor_ref.actor_class.__name__ = "DummyMixer" - self.core = core.Core(mixer=self.mixer, backends=[]) + self.core = core.Core( + config={}, + mixer=None, + backends=[], + ) class GetVolumeBadBackendTest(MockBackendCoreMixerBase): @@ -147,9 +167,13 @@ def test_backend_returns_wrong_type(self): class CoreMixerSaveLoadStateTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.mixer = dummy_mixer.create_proxy() - self.core = core.Core(mixer=self.mixer, backends=[]) + self.core = core.Core( + config={}, + mixer=self.mixer, + backends=[], + ) def test_save_mute(self): volume = 32 @@ -183,7 +207,7 @@ def test_load_not_covered(self): target = MixerState(volume=56, mute=False) coverage = ["other"] self.core.mixer._load_state(target, coverage) - assert 21 == self.core.mixer.get_volume() + assert self.core.mixer.get_volume() == 21 assert self.core.mixer.get_mute() is True def test_load_mute_on(self): @@ -203,7 +227,7 @@ def test_load_mute_off(self): assert self.core.mixer.get_mute() is False def test_load_invalid_type(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): self.core.mixer._load_state(11, None) def test_load_none(self): diff --git a/tests/core/test_playback.py b/tests/core/test_playback.py index a21ae6d6bb..b2e9a6328c 100644 --- a/tests/core/test_playback.py +++ b/tests/core/test_playback.py @@ -1,8 +1,8 @@ +from typing import ClassVar from unittest import mock import pykka import pytest - from mopidy import backend, core from mopidy.internal import deprecation from mopidy.internal.models import PlaybackState @@ -33,7 +33,7 @@ def _translate_uri_call_limit(self, uri): if "limit_never" in uri: # unplayable return None - elif "limit_one" in uri: + if "limit_one" in uri: # one time playable if self._call_onetime: return None @@ -43,12 +43,11 @@ def _translate_uri_call_limit(self, uri): def translate_uri(self, uri): if "error" in uri: raise Exception(uri) - elif "unplayable" in uri: + if "unplayable" in uri: return None - elif "limit" in uri: + if "limit" in uri: return self._translate_uri_call_limit(uri) - else: - return uri + return uri class MyTestBackend(dummy_backend.DummyBackend): @@ -58,8 +57,10 @@ def __init__(self, config, audio): class BaseTest: - config = {"core": {"max_tracklist_length": 10000}} - tracks = [ + config: ClassVar[dict[str, dict[str, int]]] = { + "core": {"max_tracklist_length": 10000} + } + tracks: ClassVar[list[Track]] = [ Track(uri="dummy:a", length=1234, name="foo"), Track(uri="dummy:b", length=1234), Track(uri="dummy:c", length=1234), @@ -67,18 +68,14 @@ class BaseTest: def setup_method(self, method): self.audio = dummy_audio.create_proxy(config=self.config, mixer=None) - self.backend = MyTestBackend.start( - audio=self.audio, config=self.config - ).proxy() + self.backend = MyTestBackend.start(audio=self.audio, config=self.config).proxy() self.core = core.Core( audio=self.audio, backends=[self.backend], config=self.config ) self.playback = self.core.playback # We don't have a core actor running, so call about to finish directly. - self.audio.set_about_to_finish_callback( - self.playback._on_about_to_finish - ) + self.audio.set_about_to_finish_callback(self.playback._on_about_to_finish) with deprecation.ignore(): self.core.tracklist.add(self.tracks) @@ -114,7 +111,7 @@ class TestPlayHandling(BaseTest): def test_get_current_tl_track_play(self): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() assert self.core.playback.get_current_tl_track() == tl_tracks[0] @@ -122,7 +119,7 @@ def test_get_current_tl_track_play(self): def test_get_current_track_play(self): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() assert self.core.playback.get_current_track() == self.tracks[0] @@ -130,7 +127,7 @@ def test_get_current_track_play(self): def test_get_current_tlid_play(self): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() assert self.core.playback.get_current_tlid() == tl_tracks[0].tlid @@ -141,7 +138,7 @@ def test_play_skips_to_next_on_unplayable_track(self): self.audio.trigger_fake_playback_failure(tl_tracks[0].track.uri) - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() current_tl_track = self.core.playback.get_current_tl_track() @@ -152,7 +149,7 @@ def test_resume_skips_to_next_on_unplayable_track(self): resuming playback.""" tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.core.playback.pause() self.audio.trigger_fake_playback_failure(tl_tracks[1].track.uri) @@ -164,22 +161,11 @@ def test_resume_skips_to_next_on_unplayable_track(self): current_tl_track = self.core.playback.get_current_tl_track() assert tl_tracks[2] == current_tl_track - def test_play_tlid(self): - tl_tracks = self.core.tracklist.get_tl_tracks() - - self.core.playback.play(tlid=tl_tracks[1].tlid) - self.replay_events() - - current_tl_track = self.core.playback.get_current_tl_track() - assert tl_tracks[1] == current_tl_track - def test_default_is_live_behaviour_is_not_live(self): assert not self.backend.playback.is_live(self.tracks[0].uri).get() def test_download_buffering_is_not_enabled_by_default(self): - assert not self.backend.playback.should_download( - self.tracks[0].uri - ).get() + assert not self.backend.playback.should_download(self.tracks[0].uri).get() class TestNextHandling(BaseTest): @@ -214,7 +200,7 @@ def test_get_current_track_next(self): assert current_track == self.tracks[1] @pytest.mark.parametrize( - "repeat, random, single, consume, index, result", + ("repeat", "random", "single", "consume", "index", "result"), [ (False, False, False, False, 0, 1), (False, False, False, False, 2, None), @@ -222,12 +208,10 @@ def test_get_current_track_next(self): (True, False, False, False, 2, 0), ], ) - def test_next_all_modes( - self, repeat, random, single, consume, index, result - ): + def test_next_all_modes(self, repeat, random, single, consume, index, result): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[index]) + self.core.playback.play(tl_tracks[index].tlid) self.replay_events() self.core.tracklist.set_repeat(repeat) self.core.tracklist.set_random(random) @@ -240,14 +224,12 @@ def test_next_all_modes( if result is None: assert self.core.playback.get_current_tl_track() is None else: - assert ( - self.core.playback.get_current_tl_track() == tl_tracks[result] - ) + assert self.core.playback.get_current_tl_track() == tl_tracks[result] def test_next_keeps_finished_track_in_tracklist(self): tl_track = self.core.tracklist.get_tl_tracks()[0] - self.core.playback.play(tl_track) + self.core.playback.play(tl_track.tlid) self.replay_events() self.core.playback.next() @@ -258,7 +240,7 @@ def test_next_keeps_finished_track_in_tracklist(self): def test_next_skips_over_unplayable_track(self): tl_tracks = self.core.tracklist.get_tl_tracks() self.audio.trigger_fake_playback_failure(tl_tracks[1].track.uri) - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() self.core.playback.next() @@ -303,7 +285,7 @@ class TestPreviousHandling(BaseTest): def test_get_current_tl_track_prev(self): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[1]) + self.core.playback.play(tl_tracks[1].tlid) self.core.playback.previous() self.replay_events() @@ -312,14 +294,14 @@ def test_get_current_tl_track_prev(self): def test_get_current_track_prev(self): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[1]) + self.core.playback.play(tl_tracks[1].tlid) self.core.playback.previous() self.replay_events() assert self.core.playback.get_current_track() == self.tracks[0] @pytest.mark.parametrize( - "repeat, random, single, consume, index, result", + ("repeat", "random", "single", "consume", "index", "result"), [ (False, False, False, False, 0, None), (False, False, False, False, 1, 0), @@ -327,12 +309,10 @@ def test_get_current_track_prev(self): (True, False, False, False, 1, 1), # FIXME: #1694 ], ) - def test_previous_all_modes( - self, repeat, random, single, consume, index, result - ): + def test_previous_all_modes(self, repeat, random, single, consume, index, result): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[index]) + self.core.playback.play(tl_tracks[index].tlid) self.core.tracklist.set_repeat(repeat) self.core.tracklist.set_random(random) self.core.tracklist.set_single(single) @@ -344,14 +324,12 @@ def test_previous_all_modes( if result is None: assert self.core.playback.get_current_tl_track() is None else: - assert ( - self.core.playback.get_current_tl_track() == tl_tracks[result] - ) + assert self.core.playback.get_current_tl_track() == tl_tracks[result] def test_previous_keeps_finished_track_in_tracklist(self): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[1]) + self.core.playback.play(tl_tracks[1].tlid) self.core.playback.previous() self.replay_events() @@ -361,7 +339,7 @@ def test_previous_keeps_finished_track_in_tracklist(self): def test_previous_keeps_finished_track_even_in_consume_mode(self): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[1]) + self.core.playback.play(tl_tracks[1].tlid) self.core.tracklist.set_consume(True) self.core.playback.previous() @@ -372,7 +350,7 @@ def test_previous_keeps_finished_track_even_in_consume_mode(self): def test_previous_skips_over_unplayable_track(self): tl_tracks = self.core.tracklist.get_tl_tracks() self.audio.trigger_fake_playback_failure(tl_tracks[1].track.uri) - self.core.playback.play(tl_tracks[2]) + self.core.playback.play(tl_tracks[2].tlid) self.replay_events() self.core.playback.previous() @@ -388,7 +366,7 @@ def test_previous_skips_over_change_track_error(self): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[2]) + self.core.playback.play(tl_tracks[2].tlid) self.replay_events() self.core.playback.previous() @@ -404,7 +382,7 @@ def test_previous_skips_over_change_track_unplayable(self): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[2]) + self.core.playback.play(tl_tracks[2].tlid) self.replay_events() self.core.playback.previous() @@ -417,7 +395,7 @@ class TestOnAboutToFinish(BaseTest): def test_on_about_to_finish_keeps_finished_track_in_tracklist(self): tl_track = self.core.tracklist.get_tl_tracks()[0] - self.core.playback.play(tl_track) + self.core.playback.play(tl_track.tlid) self.trigger_about_to_finish() assert tl_track in self.core.tracklist.get_tl_tracks() @@ -430,7 +408,7 @@ def test_on_about_to_finish_skips_over_change_track_error(self): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() self.trigger_about_to_finish() @@ -445,7 +423,7 @@ def test_on_about_to_finish_skips_over_change_track_unplayable(self): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() self.trigger_about_to_finish() @@ -457,7 +435,7 @@ class TestConsumeHandling(BaseTest): def test_next_in_consume_mode_removes_finished_track(self): tl_track = self.core.tracklist.get_tl_tracks()[0] - self.core.playback.play(tl_track) + self.core.playback.play(tl_track.tlid) self.core.tracklist.set_consume(True) self.replay_events() @@ -471,7 +449,7 @@ def test_next_in_consume_mode_removes_unplayable_track(self): unplayable_tl_track = self.core.tracklist.get_tl_tracks()[-1] self.audio.trigger_fake_playback_failure(unplayable_tl_track.track.uri) - self.core.playback.play(last_playable_tl_track) + self.core.playback.play(last_playable_tl_track.tlid) self.core.tracklist.set_consume(True) self.core.playback.next() @@ -482,7 +460,7 @@ def test_next_in_consume_mode_removes_unplayable_track(self): def test_on_about_to_finish_in_consume_mode_removes_finished_track(self): tl_track = self.core.tracklist.get_tl_tracks()[0] - self.core.playback.play(tl_track) + self.core.playback.play(tl_track.tlid) self.core.tracklist.set_consume(True) self.trigger_about_to_finish() @@ -548,17 +526,14 @@ def test_current_tl_track_after_end_of_stream(self): assert self.playback.get_current_tl_track() is None -@mock.patch( - "mopidy.core.playback.listener.CoreListener", spec=core.CoreListener -) +@mock.patch("mopidy.core.playback.listener.CoreListener", spec=core.CoreListener) class TestEventEmission(BaseTest): - - maxDiff = None # noqa: N815 + maxDiff = None def test_play_when_stopped_emits_events(self, listener_mock): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() assert listener_mock.send.mock_calls == [ @@ -573,14 +548,14 @@ def test_play_when_stopped_emits_events(self, listener_mock): def test_play_when_paused_emits_events(self, listener_mock): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() self.core.playback.pause() self.replay_events() listener_mock.reset_mock() - self.core.playback.play(tl_tracks[1]) + self.core.playback.play(tl_tracks[1].tlid) self.replay_events() assert listener_mock.send.mock_calls == [ @@ -600,11 +575,11 @@ def test_play_when_paused_emits_events(self, listener_mock): def test_play_when_playing_emits_events(self, listener_mock): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() listener_mock.reset_mock() - self.core.playback.play(tl_tracks[2]) + self.core.playback.play(tl_tracks[2].tlid) self.replay_events() assert listener_mock.send.mock_calls == [ @@ -624,7 +599,7 @@ def test_play_when_playing_emits_events(self, listener_mock): def test_pause_emits_events(self, listener_mock): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() self.core.playback.seek(1000) @@ -648,7 +623,7 @@ def test_pause_emits_events(self, listener_mock): def test_resume_emits_events(self, listener_mock): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() self.core.playback.pause() @@ -673,7 +648,7 @@ def test_resume_emits_events(self, listener_mock): def test_stop_emits_events(self, listener_mock): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() self.core.playback.seek(1000) self.replay_events() @@ -698,7 +673,7 @@ def test_stop_emits_events(self, listener_mock): def test_next_emits_events(self, listener_mock): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() self.core.playback.seek(1000) self.replay_events() @@ -721,13 +696,11 @@ def test_next_emits_events(self, listener_mock): mock.call("track_playback_started", tl_track=tl_tracks[1]), ] - def test_next_emits_events_when_consume_mode_is_enabled( - self, listener_mock - ): + def test_next_emits_events_when_consume_mode_is_enabled(self, listener_mock): tl_tracks = self.core.tracklist.get_tl_tracks() self.core.tracklist.set_consume(True) - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() self.core.playback.seek(1000) self.replay_events() @@ -754,7 +727,7 @@ def test_next_emits_events_when_consume_mode_is_enabled( def test_gapless_track_change_emits_events(self, listener_mock): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() listener_mock.reset_mock() @@ -777,7 +750,7 @@ def test_gapless_track_change_emits_events(self, listener_mock): def test_seek_emits_seeked_event(self, listener_mock): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() listener_mock.reset_mock() @@ -789,7 +762,7 @@ def test_seek_emits_seeked_event(self, listener_mock): def test_seek_past_end_of_track_emits_events(self, listener_mock): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() listener_mock.reset_mock() @@ -813,7 +786,7 @@ def test_seek_past_end_of_track_emits_events(self, listener_mock): def test_seek_race_condition_emits_events(self, listener_mock): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.trigger_about_to_finish(replay_until="stream_changed") self.replay_events() listener_mock.reset_mock() @@ -832,7 +805,7 @@ def test_seek_race_condition_emits_events(self, listener_mock): def test_previous_emits_events(self, listener_mock): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[1]) + self.core.playback.play(tl_tracks[1].tlid) self.replay_events() listener_mock.reset_mock() @@ -855,8 +828,7 @@ def test_previous_emits_events(self, listener_mock): class TestUnplayableURI(BaseTest): - - tracks = [ + tracks: ClassVar[list[Track]] = [ Track(uri="unplayable://"), Track(uri="dummy:b"), ] @@ -906,11 +878,11 @@ class TestSeek(BaseTest): def test_seek_normalizes_negative_positions_to_zero(self): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() self.core.playback.seek(-100) # Dummy audio doesn't progress time. - assert 0 == self.core.playback.get_time_position() + assert self.core.playback.get_time_position() == 0 def test_seek_fails_for_track_without_duration(self): track = self.tracks[0].replace(length=None) @@ -922,12 +894,12 @@ def test_seek_fails_for_track_without_duration(self): self.replay_events() assert not self.core.playback.seek(1000) - assert 0 == self.core.playback.get_time_position() + assert self.core.playback.get_time_position() == 0 def test_seek_play_stay_playing(self): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() self.core.playback.seek(1000) @@ -936,7 +908,7 @@ def test_seek_play_stay_playing(self): def test_seek_paused_stay_paused(self): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() self.core.playback.pause() @@ -948,7 +920,7 @@ def test_seek_paused_stay_paused(self): def test_seek_race_condition_after_about_to_finish(self): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() self.trigger_about_to_finish(replay_until="stream_changed") @@ -1076,7 +1048,7 @@ def trigger_stream_changed(self): self.core.stream_changed(uri=None) def test_play_selects_dummy1_backend(self): - self.core.playback.play(self.tl_tracks[0]) + self.core.playback.play(self.tl_tracks[0].tlid) self.trigger_stream_changed() self.playback1.prepare_change.assert_called_once_with() @@ -1085,7 +1057,7 @@ def test_play_selects_dummy1_backend(self): assert not self.playback2.play.called def test_play_selects_dummy2_backend(self): - self.core.playback.play(self.tl_tracks[1]) + self.core.playback.play(self.tl_tracks[1].tlid) self.trigger_stream_changed() assert not self.playback1.play.called @@ -1094,7 +1066,7 @@ def test_play_selects_dummy2_backend(self): self.playback2.play.assert_called_once_with() def test_pause_selects_dummy1_backend(self): - self.core.playback.play(self.tl_tracks[0]) + self.core.playback.play(self.tl_tracks[0].tlid) self.trigger_stream_changed() self.core.playback.pause() @@ -1103,7 +1075,7 @@ def test_pause_selects_dummy1_backend(self): assert not self.playback2.pause.called def test_pause_selects_dummy2_backend(self): - self.core.playback.play(self.tl_tracks[1]) + self.core.playback.play(self.tl_tracks[1].tlid) self.trigger_stream_changed() self.core.playback.pause() @@ -1112,7 +1084,7 @@ def test_pause_selects_dummy2_backend(self): self.playback2.pause.assert_called_once_with() def test_resume_selects_dummy1_backend(self): - self.core.playback.play(self.tl_tracks[0]) + self.core.playback.play(self.tl_tracks[0].tlid) self.trigger_stream_changed() self.core.playback.pause() @@ -1122,7 +1094,7 @@ def test_resume_selects_dummy1_backend(self): assert not self.playback2.resume.called def test_resume_selects_dummy2_backend(self): - self.core.playback.play(self.tl_tracks[1]) + self.core.playback.play(self.tl_tracks[1].tlid) self.trigger_stream_changed() self.core.playback.pause() @@ -1132,7 +1104,7 @@ def test_resume_selects_dummy2_backend(self): self.playback2.resume.assert_called_once_with() def test_stop_selects_dummy1_backend(self): - self.core.playback.play(self.tl_tracks[0]) + self.core.playback.play(self.tl_tracks[0].tlid) self.trigger_stream_changed() self.core.playback.stop() @@ -1142,7 +1114,7 @@ def test_stop_selects_dummy1_backend(self): assert not self.playback2.stop.called def test_stop_selects_dummy2_backend(self): - self.core.playback.play(self.tl_tracks[1]) + self.core.playback.play(self.tl_tracks[1].tlid) self.trigger_stream_changed() self.core.playback.stop() @@ -1152,7 +1124,7 @@ def test_stop_selects_dummy2_backend(self): self.playback2.stop.assert_called_once_with() def test_seek_selects_dummy1_backend(self): - self.core.playback.play(self.tl_tracks[0]) + self.core.playback.play(self.tl_tracks[0].tlid) self.trigger_stream_changed() self.core.playback.seek(10000) @@ -1161,7 +1133,7 @@ def test_seek_selects_dummy1_backend(self): assert not self.playback2.seek.called def test_seek_selects_dummy2_backend(self): - self.core.playback.play(self.tl_tracks[1]) + self.core.playback.play(self.tl_tracks[1].tlid) self.trigger_stream_changed() self.core.playback.seek(10000) @@ -1170,7 +1142,7 @@ def test_seek_selects_dummy2_backend(self): self.playback2.seek.assert_called_once_with(10000) def test_time_position_selects_dummy1_backend(self): - self.core.playback.play(self.tl_tracks[0]) + self.core.playback.play(self.tl_tracks[0].tlid) self.trigger_stream_changed() self.core.playback.get_time_position() @@ -1179,7 +1151,7 @@ def test_time_position_selects_dummy1_backend(self): assert not self.playback2.get_time_position.called def test_time_position_selects_dummy2_backend(self): - self.core.playback.play(self.tl_tracks[1]) + self.core.playback.play(self.tl_tracks[1].tlid) self.trigger_stream_changed() self.core.playback.get_time_position() @@ -1237,12 +1209,10 @@ class TestCorePlaybackSaveLoadState(BaseTest): def test_save(self): tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[1]) + self.core.playback.play(tl_tracks[1].tlid) self.replay_events() - state = PlaybackState( - time_position=0, state="playing", tlid=tl_tracks[1].tlid - ) + state = PlaybackState(time_position=0, state="playing", tlid=tl_tracks[1].tlid) value = self.core.playback._save_state() assert state == value @@ -1252,16 +1222,14 @@ def test_load(self): self.core.playback.stop() self.replay_events() - assert "stopped" == self.core.playback.get_state() + assert self.core.playback.get_state() == "stopped" - state = PlaybackState( - time_position=0, state="playing", tlid=tl_tracks[2].tlid - ) + state = PlaybackState(time_position=0, state="playing", tlid=tl_tracks[2].tlid) coverage = ["play-last"] self.core.playback._load_state(state, coverage) self.replay_events() - assert "playing" == self.core.playback.get_state() + assert self.core.playback.get_state() == "playing" assert tl_tracks[2] == self.core.playback.get_current_tl_track() def test_load_not_covered(self): @@ -1269,16 +1237,14 @@ def test_load_not_covered(self): self.core.playback.stop() self.replay_events() - assert "stopped" == self.core.playback.get_state() + assert self.core.playback.get_state() == "stopped" - state = PlaybackState( - time_position=0, state="playing", tlid=tl_tracks[2].tlid - ) + state = PlaybackState(time_position=0, state="playing", tlid=tl_tracks[2].tlid) coverage = ["other"] self.core.playback._load_state(state, coverage) self.replay_events() - assert "stopped" == self.core.playback.get_state() + assert self.core.playback.get_state() == "stopped" assert self.core.playback.get_current_tl_track() is None def test_load_invalid_type(self): @@ -1290,7 +1256,7 @@ def test_load_none(self): class TestBug1352Regression(BaseTest): - tracks = [ + tracks: ClassVar[list[Track]] = [ Track(uri="dummy:a", length=40000), Track(uri="dummy:b", length=40000), ] @@ -1317,13 +1283,12 @@ def test_next_when_paused_updates_history(self): class TestEndlessLoop(BaseTest): - - tracks_play = [ + tracks_play: ClassVar[list[Track]] = [ Track(uri="dummy:limit_never:a"), Track(uri="dummy:limit_never:b"), ] - tracks_other = [ + tracks_other: ClassVar[list[Track]] = [ Track(uri="dummy:limit_never:a"), Track(uri="dummy:limit_one"), Track(uri="dummy:limit_never:b"), @@ -1338,7 +1303,7 @@ def test_play(self): self.core.tracklist.set_repeat(True) tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[0]) + self.core.playback.play(tl_tracks[0].tlid) self.replay_events() assert not self.backend.playback.is_call_limit_reached().get() @@ -1352,7 +1317,7 @@ def test_next(self): self.core.tracklist.set_repeat(True) tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[1]) + self.core.playback.play(tl_tracks[1].tlid) self.replay_events() self.core.playback.next() @@ -1369,7 +1334,7 @@ def test_previous(self): self.core.tracklist.set_repeat(True) tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[1]) + self.core.playback.play(tl_tracks[1].tlid) self.replay_events() self.core.playback.previous() @@ -1386,7 +1351,7 @@ def test_on_about_to_finish(self): self.core.tracklist.set_repeat(True) tl_tracks = self.core.tracklist.get_tl_tracks() - self.core.playback.play(tl_tracks[1]) + self.core.playback.play(tl_tracks[1].tlid) self.replay_events() self.trigger_about_to_finish() diff --git a/tests/core/test_playlists.py b/tests/core/test_playlists.py index 14a1f1a7a0..622ea41405 100644 --- a/tests/core/test_playlists.py +++ b/tests/core/test_playlists.py @@ -6,7 +6,7 @@ class BasePlaylistsTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.plr1a = Ref.playlist(name="A", uri="dummy1:pl:a") self.plr1b = Ref.playlist(name="B", uri="dummy1:pl:b") self.plr2a = Ref.playlist(name="A", uri="dummy2:pl:a") @@ -48,7 +48,9 @@ def setUp(self): # noqa: N802 self.backend3.playlists = None self.core = core.Core( - mixer=None, backends=[self.backend3, self.backend1, self.backend2] + config={}, + mixer=None, + backends=[self.backend3, self.backend1, self.backend2], ) @@ -257,7 +259,7 @@ def test_get_uri_schemes(self): class MockBackendCorePlaylistsBase(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.playlists = mock.Mock(spec=backend.PlaylistsProvider) self.backend = mock.Mock() @@ -265,7 +267,11 @@ def setUp(self): # noqa: N802 self.backend.uri_schemes.get.return_value = ["dummy"] self.backend.playlists = self.playlists - self.core = core.Core(mixer=None, backends=[self.backend]) + self.core = core.Core( + config={}, + mixer=None, + backends=[self.backend], + ) @mock.patch("mopidy.core.playlists.logger") diff --git a/tests/core/test_tracklist.py b/tests/core/test_tracklist.py index 9b9c115ea0..cfdc8b3dcb 100644 --- a/tests/core/test_tracklist.py +++ b/tests/core/test_tracklist.py @@ -1,13 +1,14 @@ import unittest from unittest import mock +import pytest from mopidy import backend, core from mopidy.internal.models import TracklistState from mopidy.models import TlTrack, Track class TracklistTest(unittest.TestCase): - def setUp(self): # noqa: + def setUp(self): config = {"core": {"max_tracklist_length": 10000}} self.tracks = [ @@ -28,9 +29,7 @@ def lookup(uri): self.backend.library = self.library self.core = core.Core(config, mixer=None, backends=[self.backend]) - self.tl_tracks = self.core.tracklist.add( - uris=[t.uri for t in self.tracks] - ) + self.tl_tracks = self.core.tracklist.add(uris=[t.uri for t in self.tracks]) def test_add_by_uri_looks_up_uri_in_library(self): self.library.lookup.reset_mock() @@ -39,7 +38,7 @@ def test_add_by_uri_looks_up_uri_in_library(self): tl_tracks = self.core.tracklist.add(uris=["dummy1:a"]) self.library.lookup.assert_called_once_with("dummy1:a") - assert 1 == len(tl_tracks) + assert len(tl_tracks) == 1 assert self.tracks[0] == tl_tracks[0].track assert tl_tracks == self.core.tracklist.get_tl_tracks()[(-1):] @@ -56,62 +55,55 @@ def test_add_by_uris_looks_up_uris_in_library(self): mock.call("dummy1:c"), ] ) - assert 3 == len(tl_tracks) + assert len(tl_tracks) == 3 assert self.tracks[0] == tl_tracks[0].track assert self.tracks[1] == tl_tracks[1].track assert self.tracks[2] == tl_tracks[2].track - assert ( - tl_tracks - == self.core.tracklist.get_tl_tracks()[(-len(tl_tracks)) :] - ) + assert tl_tracks == self.core.tracklist.get_tl_tracks()[(-len(tl_tracks)) :] def test_remove_removes_tl_tracks_matching_query(self): tl_tracks = self.core.tracklist.remove({"name": ["foo"]}) - assert 2 == len(tl_tracks) + assert len(tl_tracks) == 2 self.assertListEqual(self.tl_tracks[:2], tl_tracks) - assert 1 == self.core.tracklist.get_length() - self.assertListEqual( - self.tl_tracks[2:], self.core.tracklist.get_tl_tracks() - ) + assert self.core.tracklist.get_length() == 1 + self.assertListEqual(self.tl_tracks[2:], self.core.tracklist.get_tl_tracks()) def test_remove_works_with_dict_instead_of_kwargs(self): tl_tracks = self.core.tracklist.remove({"name": ["foo"]}) - assert 2 == len(tl_tracks) + assert len(tl_tracks) == 2 self.assertListEqual(self.tl_tracks[:2], tl_tracks) - assert 1 == self.core.tracklist.get_length() - self.assertListEqual( - self.tl_tracks[2:], self.core.tracklist.get_tl_tracks() - ) + assert self.core.tracklist.get_length() == 1 + self.assertListEqual(self.tl_tracks[2:], self.core.tracklist.get_tl_tracks()) def test_filter_returns_tl_tracks_matching_query(self): tl_tracks = self.core.tracklist.filter({"name": ["foo"]}) - assert 2 == len(tl_tracks) + assert len(tl_tracks) == 2 self.assertListEqual(self.tl_tracks[:2], tl_tracks) def test_filter_works_with_dict_instead_of_kwargs(self): tl_tracks = self.core.tracklist.filter({"name": ["foo"]}) - assert 2 == len(tl_tracks) + assert len(tl_tracks) == 2 self.assertListEqual(self.tl_tracks[:2], tl_tracks) def test_filter_fails_if_values_isnt_iterable(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): self.core.tracklist.filter({"tlid": 3}) def test_filter_fails_if_values_is_a_string(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): self.core.tracklist.filter({"uri": "a"}) # TODO Extract tracklist tests from the local backend tests class TracklistIndexTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): config = {"core": {"max_tracklist_length": 10000}} self.tracks = [ @@ -129,14 +121,12 @@ def lookup(uris): self.core.playback = mock.Mock(spec=core.PlaybackController) - self.tl_tracks = self.core.tracklist.add( - uris=[t.uri for t in self.tracks] - ) + self.tl_tracks = self.core.tracklist.add(uris=[t.uri for t in self.tracks]) def test_index_returns_index_of_track(self): - assert 0 == self.core.tracklist.index(self.tl_tracks[0]) - assert 1 == self.core.tracklist.index(self.tl_tracks[1]) - assert 2 == self.core.tracklist.index(self.tl_tracks[2]) + assert self.core.tracklist.index(self.tl_tracks[0]) == 0 + assert self.core.tracklist.index(self.tl_tracks[1]) == 1 + assert self.core.tracklist.index(self.tl_tracks[2]) == 2 def test_index_returns_none_if_item_not_found(self): tl_track = TlTrack(0, Track()) @@ -146,14 +136,14 @@ def test_index_returns_none_if_called_with_none(self): assert self.core.tracklist.index(None) is None def test_index_errors_out_for_invalid_tltrack(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): self.core.tracklist.index("abc") def test_index_return_index_when_called_with_tlids(self): tl_tracks = self.tl_tracks - assert 0 == self.core.tracklist.index(tlid=tl_tracks[0].tlid) - assert 1 == self.core.tracklist.index(tlid=tl_tracks[1].tlid) - assert 2 == self.core.tracklist.index(tlid=tl_tracks[2].tlid) + assert self.core.tracklist.index(tlid=tl_tracks[0].tlid) == 0 + assert self.core.tracklist.index(tlid=tl_tracks[1].tlid) == 1 + assert self.core.tracklist.index(tlid=tl_tracks[2].tlid) == 2 def test_index_returns_none_if_tlid_not_found(self): assert self.core.tracklist.index(tlid=123) is None @@ -162,7 +152,7 @@ def test_index_returns_none_if_called_with_tlid_none(self): assert self.core.tracklist.index(tlid=None) is None def test_index_errors_out_for_invalid_tlid(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): self.core.tracklist.index(tlid=-1) def test_index_without_args_returns_current_tl_track_index(self): @@ -174,13 +164,13 @@ def test_index_without_args_returns_current_tl_track_index(self): ] assert self.core.tracklist.index() is None - assert 0 == self.core.tracklist.index() - assert 1 == self.core.tracklist.index() - assert 2 == self.core.tracklist.index() + assert self.core.tracklist.index() == 0 + assert self.core.tracklist.index() == 1 + assert self.core.tracklist.index() == 2 class TracklistSaveLoadStateTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): config = {"core": {"max_tracklist_length": 10000}} self.tracks = [ @@ -237,15 +227,15 @@ def test_load(self): assert self.core.tracklist.get_repeat() is True assert self.core.tracklist.get_single() is True assert self.core.tracklist.get_random() is False - assert 12 == self.core.tracklist._next_tlid - assert 4 == self.core.tracklist.get_length() + assert self.core.tracklist._next_tlid == 12 + assert self.core.tracklist.get_length() == 4 assert self.tl_tracks == self.core.tracklist.get_tl_tracks() assert self.core.tracklist.get_version() > old_version # after load, adding more tracks must be possible self.core.tracklist.add(uris=[self.tracks[1].uri]) - assert 13 == self.core.tracklist._next_tlid - assert 5 == self.core.tracklist.get_length() + assert self.core.tracklist._next_tlid == 13 + assert self.core.tracklist.get_length() == 5 def test_load_mode_only(self): old_version = self.core.tracklist.get_version() @@ -263,8 +253,8 @@ def test_load_mode_only(self): assert self.core.tracklist.get_repeat() is True assert self.core.tracklist.get_single() is True assert self.core.tracklist.get_random() is False - assert 1 == self.core.tracklist._next_tlid - assert 0 == self.core.tracklist.get_length() + assert self.core.tracklist._next_tlid == 1 + assert self.core.tracklist.get_length() == 0 assert [] == self.core.tracklist.get_tl_tracks() assert self.core.tracklist.get_version() == old_version @@ -284,13 +274,13 @@ def test_load_tracklist_only(self): assert self.core.tracklist.get_repeat() is False assert self.core.tracklist.get_single() is False assert self.core.tracklist.get_random() is False - assert 12 == self.core.tracklist._next_tlid - assert 4 == self.core.tracklist.get_length() + assert self.core.tracklist._next_tlid == 12 + assert self.core.tracklist.get_length() == 4 assert self.tl_tracks == self.core.tracklist.get_tl_tracks() assert self.core.tracklist.get_version() > old_version def test_load_invalid_type(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): self.core.tracklist._load_state(11, None) def test_load_none(self): diff --git a/tests/data/.skip_this_file b/tests/data/.skip_this_file new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/data/scanner/simple/song1.mp3 b/tests/data/scanner/simple/song1.mp3 index ad5aa37a97..ff4273106c 100644 Binary files a/tests/data/scanner/simple/song1.mp3 and b/tests/data/scanner/simple/song1.mp3 differ diff --git a/tests/data/scanner/simple/song1.ogg b/tests/data/scanner/simple/song1.ogg index 18f5a2d914..6cf4742e22 100644 Binary files a/tests/data/scanner/simple/song1.ogg and b/tests/data/scanner/simple/song1.ogg differ diff --git a/tests/data/symlink.m3u b/tests/data/symlink.m3u new file mode 120000 index 0000000000..3fb525407f --- /dev/null +++ b/tests/data/symlink.m3u @@ -0,0 +1 @@ +one.m3u \ No newline at end of file diff --git a/tests/data/symlink_no.m3u b/tests/data/symlink_no.m3u new file mode 120000 index 0000000000..33c882c251 --- /dev/null +++ b/tests/data/symlink_no.m3u @@ -0,0 +1 @@ +../file/test_browse.py \ No newline at end of file diff --git a/tests/dummy_audio.py b/tests/dummy_audio.py index 8501f6fef4..b231060114 100644 --- a/tests/dummy_audio.py +++ b/tests/dummy_audio.py @@ -4,9 +4,7 @@ tests of the core and backends. """ - import pykka - from mopidy import audio @@ -21,7 +19,8 @@ def __init__(self, config=None, mixer=None): self.state = audio.PlaybackState.STOPPED self._volume = 0 self._position = 0 - self._callback = None + self._source_setup_callback = None + self._about_to_finish_callback = None self._uri = None self._stream_changed = False self._live_stream = False @@ -36,12 +35,6 @@ def set_uri(self, uri, live_stream=False, download=False): self._live_stream = live_stream self._tags = {} - def set_appsrc(self, *args, **kwargs): - pass - - def emit_data(self, buffer_): - pass - def get_position(self): return self._position @@ -58,6 +51,7 @@ def pause_playback(self): def prepare_change(self): self._uri = None + self._source_setup_callback = None return True def stop_playback(self): @@ -70,14 +64,14 @@ def set_volume(self, volume): self._volume = volume return True - def set_metadata(self, track): - pass - def get_current_tags(self): return self._tags + def set_source_setup_callback(self, callback): + self._source_setup_callback = callback + def set_about_to_finish_callback(self, callback): - self._callback = callback + self._about_to_finish_callback = callback def enable_sync_handler(self): pass @@ -121,14 +115,22 @@ def trigger_fake_tags_changed(self, tags): self._tags.update(tags) audio.AudioListener.send("tags_changed", tags=self._tags.keys()) + def get_source_setup_callback(self): + # This needs to be called from outside the actor or we lock up. + def wrapper(): + if self._source_setup_callback: + self._source_setup_callback() + + return wrapper + def get_about_to_finish_callback(self): # This needs to be called from outside the actor or we lock up. def wrapper(): - if self._callback: + if self._about_to_finish_callback: self.prepare_change() - self._callback() + self._about_to_finish_callback() - if not self._uri or not self._callback: + if not self._uri or not self._about_to_finish_callback: self._tags = {} audio.AudioListener.send("reached_end_of_stream") else: diff --git a/tests/dummy_backend.py b/tests/dummy_backend.py index 0ee3d2ddea..356b52e46e 100644 --- a/tests/dummy_backend.py +++ b/tests/dummy_backend.py @@ -4,9 +4,7 @@ used in tests of the frontends. """ - import pykka - from mopidy import backend from mopidy.models import Playlist, Ref, SearchResult @@ -109,14 +107,12 @@ def set_allow_save(self, enabled): self._allow_save = enabled def as_list(self): - return [ - Ref.playlist(uri=pl.uri, name=pl.name) for pl in self._playlists - ] + return [Ref.playlist(uri=pl.uri, name=pl.name) for pl in self._playlists] def get_items(self, uri): playlist = self.lookup(uri) if playlist is None: - return + return None return [Ref.track(uri=t.uri, name=t.name) for t in playlist.tracks] def lookup(self, uri): @@ -124,6 +120,7 @@ def lookup(self, uri): for playlist in self._playlists: if playlist.uri == uri: return playlist + return None def refresh(self): pass diff --git a/tests/dummy_mixer.py b/tests/dummy_mixer.py index 313341236d..caadf24b73 100644 --- a/tests/dummy_mixer.py +++ b/tests/dummy_mixer.py @@ -1,5 +1,4 @@ import pykka - from mopidy import mixer diff --git a/tests/file/conftest.py b/tests/file/conftest.py index d0bd777dfe..7e36b7188f 100644 --- a/tests/file/conftest.py +++ b/tests/file/conftest.py @@ -1,15 +1,35 @@ +from unittest import mock + import pytest +from mopidy.file import backend + +from tests import path_to_data_dir + + +@pytest.fixture() +def media_dirs(): + return [str(path_to_data_dir(""))] + +@pytest.fixture() +def follow_symlinks(): + return False -@pytest.fixture -def file_config(): - return {"file": {}} +@pytest.fixture() +def config(media_dirs, follow_symlinks): + return { + "proxy": {}, + "file": { + "show_dotfiles": False, + "media_dirs": media_dirs, + "excluded_file_extensions": [".conf"], + "follow_symlinks": follow_symlinks, + "metadata_timeout": 1000, + }, + } -@pytest.fixture -def file_library(file_config): - # Import library, thus scanner, thus gobject as late as possible to avoid - # hard to track import errors during conftest setup. - from mopidy.file import library - return library.FileLibraryProvider(backend=None, config=file_config) +@pytest.fixture() +def provider(config): + return backend.FileBackend(audio=mock.Mock(), config=config).library diff --git a/tests/file/test_browse.py b/tests/file/test_browse.py index 470e517470..83a282bb9e 100644 --- a/tests/file/test_browse.py +++ b/tests/file/test_browse.py @@ -1,44 +1,46 @@ -# TODO Test browse() -from unittest import mock - import pytest - -from mopidy.file import backend from mopidy.internal import path from tests import path_to_data_dir -@pytest.fixture -def config(): - return { - "proxy": {}, - "file": { - "show_dotfiles": False, - "media_dirs": [str(path_to_data_dir(""))], - "excluded_file_extensions": [], - "follow_symlinks": False, - "metadata_timeout": 1000, - }, - } - - -@pytest.fixture -def audio(): - return mock.Mock() - - -@pytest.fixture -def track_uri(): - return path.path_to_uri(path_to_data_dir("song1.wav")) - - -def test_file_browse(config, audio, track_uri, caplog): - provider = backend.FileBackend(audio=audio, config=config).library - result = provider.browse(track_uri) - - assert len(result) == 0 - assert any(map(lambda record: record.levelname == "ERROR", caplog.records)) - assert any( - map(lambda record: "Rejected attempt" in record.message, caplog.records) - ) +@pytest.mark.parametrize("follow_symlinks", [True, False]) +@pytest.mark.parametrize( + ("uri", "levelname"), + [ + ("file:root", None), + ("not_in_data_path", "WARNING"), + (path.path_to_uri(path_to_data_dir("song1.wav")), "ERROR"), + (path.path_to_uri(path_to_data_dir("")), None), + ], +) +def test_file_browse(provider, uri, levelname, caplog): + result = provider.browse(uri) + assert isinstance(result, list) + if levelname: + assert len(result) == 0 + record = caplog.records[0] + assert record.levelname == levelname + assert "Rejected attempt" in record.message + return + + assert len(result) >= 1 + + +@pytest.mark.parametrize( + ("media_dirs", "expected"), + [ + ([str(path_to_data_dir(""))], False), + ([str(path_to_data_dir("")), str(path_to_data_dir(""))], True), + ([], None), + ([str(path_to_data_dir("song1.wav"))], None), + (["|" + str(path_to_data_dir(""))], False), + ], +) +def test_file_root_directory(provider, expected): + ref = provider.root_directory + if expected is None: + assert not ref + return + assert ref.name == "Files" + assert (ref.uri == "file:root") == expected diff --git a/tests/file/test_file.py b/tests/file/test_file.py new file mode 100644 index 0000000000..e627fa1249 --- /dev/null +++ b/tests/file/test_file.py @@ -0,0 +1,18 @@ +"""Test file (except browse and lookup)""" + +from mopidy import ext +from mopidy.file import Extension +from mopidy.file.backend import FileBackend + + +def test_file_init(): + """Test class Extension in __init__.""" + for extension_data in ext.load_extensions(): + extension = extension_data.extension + if isinstance(extension, Extension): + assert extension.dist_name == "Mopidy-File" + registry = ext.Registry() + extension.setup(registry) + assert registry["backend"][0] == FileBackend + return + raise AssertionError("Mopidy-File not loaded!") diff --git a/tests/file/test_lookup.py b/tests/file/test_lookup.py index 01ae5db05f..8fda1ef658 100644 --- a/tests/file/test_lookup.py +++ b/tests/file/test_lookup.py @@ -1,40 +1,14 @@ from unittest import mock import pytest - -from mopidy.file import backend +from mopidy import exceptions from mopidy.internal import path from tests import path_to_data_dir -@pytest.fixture -def config(): - return { - "proxy": {}, - "file": { - "show_dotfiles": False, - "media_dirs": [], - "excluded_file_extensions": [], - "follow_symlinks": False, - "metadata_timeout": 1000, - }, - } - - -@pytest.fixture -def audio(): - return mock.Mock() - - -@pytest.fixture -def track_uri(): - return path.path_to_uri(path_to_data_dir("song1.wav")) - - -def test_lookup(config, audio, track_uri): - provider = backend.FileBackend(audio=audio, config=config).library - +@pytest.mark.parametrize("track_uri", [path.path_to_uri(path_to_data_dir("song1.wav"))]) +def test_lookup(provider, track_uri): result = provider.lookup(track_uri) assert len(result) == 1 @@ -42,3 +16,13 @@ def test_lookup(config, audio, track_uri): assert track.uri == track_uri assert track.length == 4406 assert track.name == "song1.wav" + + with mock.patch( + "mopidy.file.library.tags.convert_tags_to_track", + side_effect=exceptions.ScannerError("test"), + ): + result = provider.lookup(track_uri) + assert len(result) == 1 + track = result[0] + assert track.uri == track_uri + assert track.name == "song1.wav" diff --git a/tests/http/test_extension.py b/tests/http/test_extension.py new file mode 100644 index 0000000000..1bb41dc13a --- /dev/null +++ b/tests/http/test_extension.py @@ -0,0 +1,35 @@ +from mopidy import config as config_lib +from mopidy.http import Extension + + +def test_get_default_config(): + ext = Extension() + + config = ext.get_default_config() + + assert "[http]" in config + assert "enabled = true" in config + + +def test_get_config_schema(): + ext = Extension() + + schema = ext.get_config_schema() + + assert "enabled" in schema + assert "hostname" in schema + assert "port" in schema + assert "zeroconf" in schema + assert "allowed_origins" in schema + assert "csrf_protection" in schema + assert "default_app" in schema + + +def test_default_config_is_valid(): + ext = Extension() + + config = ext.get_default_config() + schema = ext.get_config_schema() + _, errors = config_lib.load([], [schema], [config], []) + + assert errors.get("http") is None diff --git a/tests/http/test_handlers.py b/tests/http/test_handlers.py index 4e8a91563b..4fc4b5d700 100644 --- a/tests/http/test_handlers.py +++ b/tests/http/test_handlers.py @@ -1,12 +1,13 @@ -import os import unittest +from pathlib import Path from unittest import mock +import mopidy +import pytest +import tornado.httpclient import tornado.testing import tornado.web import tornado.websocket - -import mopidy from mopidy.http import handlers @@ -18,7 +19,7 @@ def get_app(self): r"/(.*)", handlers.StaticFileHandler, { - "path": os.path.dirname(__file__), + "path": Path(__file__).parent, "default_filename": "test_handlers.py", }, ) @@ -28,14 +29,14 @@ def get_app(self): def test_static_handler(self): response = self.fetch("/test_handlers.py", method="GET") - assert 200 == response.code + assert response.code == 200 assert response.headers["X-Mopidy-Version"] == mopidy.__version__ assert response.headers["Cache-Control"] == "no-cache" def test_static_default_filename(self): response = self.fetch("/", method="GET") - assert 200 == response.code + assert response.code == 200 assert response.headers["X-Mopidy-Version"] == mopidy.__version__ assert response.headers["Cache-Control"] == "no-cache" @@ -50,16 +51,20 @@ def get_app(self): handlers.WebSocketHandler, { "core": self.core, - "allowed_origins": [], + "allowed_origins": frozenset(), "csrf_protection": True, }, ) ] ) - def connection(self): - url = self.get_url("/ws").replace("http", "ws") - return tornado.websocket.websocket_connect(url) + def connection(self, **kwargs): + conn_kwargs = { + "url": self.get_url("/ws").replace("http", "ws"), + } + conn_kwargs.update(kwargs) + request = tornado.httpclient.HTTPRequest(**conn_kwargs) + return tornado.websocket.websocket_connect(request) @tornado.testing.gen_test def test_invalid_json_rpc_request_doesnt_crash_handler(self): @@ -93,6 +98,143 @@ def test_broadcast_to_client_without_ws_connection_present(self): client.ws_connection = None handlers.WebSocketHandler.broadcast("message", self.io_loop) + @tornado.testing.gen_test + def test_good_origin(self): + headers = {"Origin": "http://localhost", "Host": "localhost"} + conn = yield self.connection(headers=headers) + assert conn + + @tornado.testing.gen_test + def test_bad_origin(self): + headers = {"Origin": "http://foobar", "Host": "localhost"} + with pytest.raises(tornado.httpclient.HTTPClientError) as exc_info: + _ = yield self.connection(headers=headers) + assert exc_info.value.code == 403 + + +class JsonRpcHandlerTestBase(tornado.testing.AsyncHTTPTestCase): + csrf_protection = True + + def setUp(self): + super().setUp() + self.headers = {"Host": "localhost:6680"} + + def get_app(self): + self.core = mock.Mock() + return tornado.web.Application( + [ + ( + r"/rpc", + handlers.JsonRpcHandler, + { + "core": self.core, + "allowed_origins": set(), + "csrf_protection": self.csrf_protection, + }, + ) + ] + ) + + def assert_extra_response_headers(self, headers): + assert headers["Cache-Control"] == "no-cache" + assert headers["X-Mopidy-Version"] == mopidy.__version__ + assert headers["Accept"] == "application/json" + assert headers["Content-Type"] == "application/json; utf-8" + + def get_cors_response_headers(self): + yield ( + "Access-Control-Allow-Origin", + self.headers.get("Origin"), + ) + yield ( + "Access-Control-Allow-Headers", + "Content-Type", + ) + + def test_head(self): + response = self.fetch("/rpc", method="HEAD") + + assert response.code == 200 + self.assert_extra_response_headers(response.headers) + + +class JsonRpcHandlerTestCSRFEnabled(JsonRpcHandlerTestBase): + def test_options_sets_cors_headers(self): + self.headers.update({"Origin": "http://localhost:6680"}) + response = self.fetch("/rpc", method="OPTIONS", headers=self.headers) + + assert response.code == 204 + for k, v in self.get_cors_response_headers(): + assert response.headers[k] == v + + def test_options_bad_origin_forbidden(self): + self.headers.update({"Origin": "http://foo:6680"}) + response = self.fetch("/rpc", method="OPTIONS", headers=self.headers) + + assert response.code == 403 + assert response.reason == "Access denied for origin http://foo:6680" + for k, _ in self.get_cors_response_headers(): + assert k not in response.headers + + def test_options_no_origin_forbidden(self): + response = self.fetch("/rpc", method="OPTIONS", headers=self.headers) + + assert response.code == 403 + assert response.reason == "Access denied for origin None" + for k, _ in self.get_cors_response_headers(): + assert k not in response.headers + + def test_post_no_content_type_unsupported(self): + response = self.fetch("/rpc", method="POST", body="hi", headers=self.headers) + + assert response.code == 415 + for k, _ in self.get_cors_response_headers(): + assert k not in response.headers + + def test_post_wrong_content_type_unsupported(self): + self.headers.update({"Content-Type": "application/cats"}) + response = self.fetch("/rpc", method="POST", body="hi", headers=self.headers) + + assert response.code == 415 + assert response.reason == "Content-Type must be application/json" + for k, _ in self.get_cors_response_headers(): + assert k not in response.headers + + def test_post_no_origin_ok_but_doesnt_set_cors_headers(self): + self.headers.update({"Content-Type": "application/json"}) + response = self.fetch("/rpc", method="POST", body="hi", headers=self.headers) + + assert response.code == 200 + for k, _ in self.get_cors_response_headers(): + assert k not in response.headers + + def test_post_with_origin_ok_sets_cors_headers(self): + self.headers.update( + {"Content-Type": "application/json", "Origin": "http://foobar:6680"} + ) + response = self.fetch("/rpc", method="POST", body="hi", headers=self.headers) + + assert response.code == 200 + self.assert_extra_response_headers(response.headers) + for k, v in self.get_cors_response_headers(): + assert response.headers[k] == v + + +class JsonRpcHandlerTestCSRFDisabled(JsonRpcHandlerTestBase): + csrf_protection = False + + def test_options_no_origin_success(self): + response = self.fetch("/rpc", method="OPTIONS", headers=self.headers) + + assert response.code == 204 + + def test_post_no_content_type_ok(self): + response = self.fetch("/rpc", method="POST", body="hi", headers=self.headers) + + assert response.code == 200 + for k, _ in self.get_cors_response_headers(): + assert k not in response.headers + class CheckOriginTests(unittest.TestCase): def setUp(self): @@ -128,6 +270,4 @@ def test_different_port_blocked(self): def test_extra_origin_allowed(self): self.allowed.add("other:6680") - assert handlers.check_origin( - "http://other:6680", self.headers, self.allowed - ) + assert handlers.check_origin("http://other:6680", self.headers, self.allowed) diff --git a/tests/http/test_server.py b/tests/http/test_server.py index b89b347058..c8ed0211a1 100644 --- a/tests/http/test_server.py +++ b/tests/http/test_server.py @@ -1,13 +1,12 @@ -import os import shutil import tempfile import urllib +from pathlib import Path from unittest import mock +import mopidy import tornado.testing import tornado.wsgi - -import mopidy from mopidy.http import actor, handlers @@ -18,7 +17,7 @@ def get_config(self): "hostname": "127.0.0.1", "port": 6680, "zeroconf": "", - "allowed_origins": [], + "allowed_origins": frozenset(), "csrf_protection": True, "default_app": "mopidy", } @@ -29,14 +28,15 @@ def get_app(self): core.get_version = mock.MagicMock(name="get_version") core.get_version.return_value = mopidy.__version__ - testapps = [dict(name="testapp")] - teststatics = [dict(name="teststatic")] + testapps = [{"name": "testapp"}] + teststatics = [{"name": "teststatic"}] apps = [ { "name": "mopidy", "factory": handlers.make_mopidy_app_factory( - testapps, teststatics + apps=testapps, + statics=teststatics, ), } ] @@ -89,12 +89,12 @@ class MopidyWebSocketHandlerTest(HttpServerTest): def test_should_return_ws(self): response = self.fetch("/mopidy/ws", method="GET") - assert 'Can "Upgrade" only to "WebSocket".' == response.body.decode() + assert response.body.decode() == 'Can "Upgrade" only to "WebSocket".' def test_should_return_ws_old(self): response = self.fetch("/mopidy/ws/", method="GET") - assert 'Can "Upgrade" only to "WebSocket".' == response.body.decode() + assert response.body.decode() == 'Can "Upgrade" only to "WebSocket".' class MopidyRPCHandlerTest(HttpServerTest): @@ -157,58 +157,6 @@ def test_should_return_mopidy_version(self): "result": mopidy.__version__, } == tornado.escape.json_decode(response.body) - def test_should_return_extra_headers(self): - response = self.fetch("/mopidy/rpc", method="HEAD") - - assert "Accept" in response.headers - assert "X-Mopidy-Version" in response.headers - assert "Cache-Control" in response.headers - assert "Content-Type" in response.headers - - def test_should_require_correct_content_type(self): - cmd = tornado.escape.json_encode( - { - "method": "core.get_version", - "params": [], - "jsonrpc": "2.0", - "id": 1, - } - ) - - response = self.fetch( - "/mopidy/rpc", - method="POST", - body=cmd, - headers={"Content-Type": "text/plain"}, - ) - - assert response.code == 415 - assert response.reason == "Content-Type must be application/json" - - def test_different_origin_returns_access_denied(self): - response = self.fetch( - "/mopidy/rpc", - method="OPTIONS", - headers={"Host": "me:6680", "Origin": "http://evil:666"}, - ) - - assert response.code == 403 - assert response.reason == "Access denied for origin http://evil:666" - - def test_same_origin_returns_cors_headers(self): - response = self.fetch( - "/mopidy/rpc", - method="OPTIONS", - headers={"Host": "me:6680", "Origin": "http://me:6680"}, - ) - - assert ( - response.headers["Access-Control-Allow-Origin"] == "http://me:6680" - ) - assert ( - response.headers["Access-Control-Allow-Headers"] == "Content-Type" - ) - class MopidyRPCHandlerNoCSRFProtectionTest(HttpServerTest): def get_config(self): @@ -275,10 +223,19 @@ def get_app(self): } core = mock.Mock() - statics = [dict(name="static", path=os.path.dirname(__file__))] + statics = [ + { + "name": "static", + "path": Path(__file__).parent, + } + ] http_server = actor.HttpServer( - config=config, core=core, sockets=[], apps=[], statics=statics + config=config, + core=core, + sockets=[], + apps=[], + statics=statics, ) return tornado.web.Application(http_server._get_request_handlers()) @@ -292,7 +249,7 @@ def test_without_slash_should_redirect(self): def test_can_serve_static_files(self): response = self.fetch("/static/test_server.py", method="GET") - assert 200 == response.code + assert response.code == 200 assert response.headers["X-Mopidy-Version"] == mopidy.__version__ assert response.headers["Cache-Control"] == "no-cache" @@ -366,7 +323,7 @@ def get_app(self): } core = mock.Mock() - apps = [dict(name="default_app", factory=default_webapp_factory)] + apps = [{"name": "default_app", "factory": default_webapp_factory}] http_server = actor.HttpServer( config=config, core=core, sockets=[], apps=apps, statics=[] @@ -380,9 +337,7 @@ def test_should_redirect_to_default_app(self): assert response.code == 302 assert response.headers["Location"] == "/default_app/" - response = self.fetch( - "/default_app/", method="GET", follow_redirects=True - ) + response = self.fetch("/default_app/", method="GET", follow_redirects=True) assert response.code == 200 assert "Hello from default webapp" in response.body.decode() @@ -400,10 +355,19 @@ def get_app(self): } core = mock.Mock() - statics = [dict(name="default_app", path=os.path.dirname(__file__))] + statics = [ + { + "name": "default_app", + "path": Path(__file__).parent, + } + ] http_server = actor.HttpServer( - config=config, core=core, sockets=[], apps=[], statics=statics + config=config, + core=core, + sockets=[], + apps=[], + statics=statics, ) return tornado.web.Application(http_server._get_request_handlers()) @@ -417,7 +381,7 @@ def test_should_redirect_to_default_app(self): class HttpServerWithInvalidDefaultApp(HttpServerTest): def get_config(self): - config = super(HttpServerWithInvalidDefaultApp, self).get_config() + config = super().get_config() config["http"]["default_app"] = "invalid_webclient" return config @@ -478,7 +442,12 @@ def get_app(self): } core = mock.Mock() - apps = [{"name": "cookie_secret", "factory": cookie_secret_app_factory}] + apps = [ + { + "name": "cookie_secret", + "factory": cookie_secret_app_factory, + } + ] http_server = actor.HttpServer( config=config, core=core, sockets=[], apps=apps, statics=[] @@ -492,13 +461,13 @@ def get_app(self): def test_main_access_without_login(self): response = self.fetch("/cookie_secret", method="GET") - assert 200 == response.code + assert response.code == 200 assert "Unknown user..." in response.body.decode() def test_accessing_login_form_get(self): response = self.fetch("/cookie_secret/login", method="GET") - assert 200 == response.code + assert response.code == 200 assert "This is a login form" in response.body.decode() def test_login(self): @@ -507,7 +476,7 @@ def test_login(self): response = self.fetch("/cookie_secret/login", method="POST", body=body) - assert 200 == response.code + assert response.code == 200 assert "Logged in" in response.body.decode() shutil.rmtree(self._dirpath) @@ -526,7 +495,11 @@ def test_get_secure_cookie(tmp_path): core = mock.Mock() http_server = actor.HttpServer( - config=config, core=core, sockets=[], apps=[], statics=[] + config=config, + core=core, + sockets=[], + apps=[], + statics=[], ) # first secret, generating diff --git a/tests/internal/test_deps.py b/tests/internal/test_deps.py index e30e95b728..57d48f9e69 100644 --- a/tests/internal/test_deps.py +++ b/tests/internal/test_deps.py @@ -1,103 +1,139 @@ import platform import sys -import unittest +from importlib import metadata +from pathlib import Path from unittest import mock -import pkg_resources - from mopidy.internal import deps from mopidy.internal.gi import Gst, gi -class DepsTest(unittest.TestCase): +class TestDeps: def test_format_dependency_list(self): adapters = [ - lambda: dict(name="Python", version="FooPython 2.7.3"), - lambda: dict(name="Platform", version="Loonix 4.0.1"), - lambda: dict( - name="Pykka", version="1.1", path="/foo/bar", other="Quux" + deps.DepInfo( + name="Python", + version="FooPython 3.12.3", + ), + deps.DepInfo( + name="Platform", + version="Loonix 7.0.1", + ), + deps.DepInfo( + name="pykka", + version="5.1", + path=Path("/foo/bar"), + other="Quux", + ), + deps.DepInfo( + name="foo", ), - lambda: dict(name="Foo"), - lambda: dict( - name="Mopidy", - version="0.13", + deps.DepInfo( + name="mopidy", + version="4.13", dependencies=[ - dict( + deps.DepInfo( name="pylast", version="0.5", - dependencies=[dict(name="setuptools", version="0.6")], + dependencies=[ + deps.DepInfo( + name="setuptools", + version="0.6", + ), + ], ) ], ), ] result = deps.format_dependency_list(adapters) - - assert "Python: FooPython 2.7.3" in result - - assert "Platform: Loonix 4.0.1" in result - - assert "Pykka: 1.1 from /foo/bar" in result + assert "Python: FooPython 3.12.3" in result + assert "Platform: Loonix 7.0.1" in result + assert "pykka: 5.1 from /foo/bar" in result assert "/baz.py" not in result assert "Detailed information: Quux" in result - - assert "Foo: not found" in result - - assert "Mopidy: 0.13" in result + assert "foo: not found" in result + assert "mopidy: 4.13" in result assert " pylast: 0.5" in result assert " setuptools: 0.6" in result + def test_format_dependency_list_real(self): + result = deps.format_dependency_list() + assert "Python 3." in result + assert "mopidy:" in result + assert "setuptools: 6" in result + def test_executable_info(self): result = deps.executable_info() - assert "Executable" == result["name"] - assert sys.argv[0] in result["version"] + assert result.name == "Executable" + assert result.version + assert sys.argv[0] in result.version def test_platform_info(self): result = deps.platform_info() - assert "Platform" == result["name"] - assert platform.platform() in result["version"] + assert result.name == "Platform" + assert result.version + assert platform.platform() in result.version def test_python_info(self): result = deps.python_info() - assert "Python" == result["name"] - assert platform.python_implementation() in result["version"] - assert platform.python_version() in result["version"] - assert "python" in result["path"] - assert "platform.py" not in result["path"] + assert result.name == "Python" + assert result.version + assert platform.python_implementation() in result.version + assert platform.python_version() in result.version + assert "python" in str(result.path) + assert "platform.py" not in str(result.path) def test_gstreamer_info(self): result = deps.gstreamer_info() - assert "GStreamer" == result["name"] - assert ".".join(map(str, Gst.version())) == result["version"] - assert "gi" in result["path"] - assert "__init__.py" not in result["path"] - assert "Python wrapper: python-gi" in result["other"] - assert gi.__version__ in result["other"] - assert "Relevant elements:" in result["other"] - - @mock.patch("pkg_resources.get_distribution") + assert result.name == "GStreamer" + assert result.version + assert ".".join(map(str, Gst.version())) == result.version + assert "gi" in str(result.path) + assert "__init__.py" not in str(result.path) + assert result.other + assert "Python wrapper: python-gi" in result.other + assert gi.__version__ in result.other + assert "Relevant elements:" in result.other + + def test_gstreamer_check_elements(self): + with mock.patch( + "mopidy.internal.deps._gstreamer_check_elements", + return_val=("test1", True), + ): + result = deps.gstreamer_info() + assert result.other + assert " none" in result.other + + @mock.patch.object(metadata, "distribution") def test_pkg_info(self, get_distribution_mock): - dist_setuptools = mock.Mock() - dist_setuptools.project_name = "setuptools" + dist_setuptools = mock.MagicMock() + dist_setuptools.name = "setuptools" dist_setuptools.version = "0.6" - dist_setuptools.location = "/tmp/example/setuptools" - dist_setuptools.requires.return_value = [] + dist_setuptools.locate_file = mock.MagicMock( + return_value=Path("/tmp/example/setuptools/main.py") + ) + dist_setuptools.requires = [] dist_pykka = mock.Mock() - dist_pykka.project_name = "Pykka" + dist_pykka.name = "Pykka" dist_pykka.version = "1.1" - dist_pykka.location = "/tmp/example/pykka" - dist_pykka.requires.return_value = [dist_setuptools] + dist_pykka.locate_file = mock.MagicMock( + return_value=Path("/tmp/example/pykka/main.py") + ) + dist_pykka.requires = [f"{dist_setuptools.name}==0.6"] dist_mopidy = mock.Mock() - dist_mopidy.project_name = "Mopidy" + dist_mopidy.name = "Mopidy" dist_mopidy.version = "0.13" - dist_mopidy.location = "/tmp/example/mopidy" - dist_mopidy.requires.return_value = [dist_pykka] + dist_mopidy.locate_file = mock.MagicMock( + return_value=Path("/tmp/example/mopidy/no_name.py") + ) + dist_mopidy.requires = [f"{dist_pykka.name}==1.1"] get_distribution_mock.side_effect = [ dist_mopidy, @@ -105,36 +141,33 @@ def test_pkg_info(self, get_distribution_mock): dist_setuptools, ] - result = deps.pkg_info() + mopidy_dep = deps.pkg_info(pkg_name="mopidy", seen_pkgs=set()) - assert "Mopidy" == result["name"] - assert "0.13" == result["version"] - assert "mopidy" in result["path"] + assert mopidy_dep.name == "mopidy" + assert mopidy_dep.version == "0.13" + assert "mopidy" in str(mopidy_dep.path) - dep_info_pykka = result["dependencies"][0] - assert "Pykka" == dep_info_pykka["name"] - assert "1.1" == dep_info_pykka["version"] + pykka_dep = mopidy_dep.dependencies[0] + assert pykka_dep.name == "pykka" + assert pykka_dep.version == "1.1" - dep_info_setuptools = dep_info_pykka["dependencies"][0] - assert "setuptools" == dep_info_setuptools["name"] - assert "0.6" == dep_info_setuptools["version"] + setuptools_dep = pykka_dep.dependencies[0] + assert setuptools_dep.name == "setuptools" + assert setuptools_dep.version == "0.6" - @mock.patch("pkg_resources.get_distribution") + @mock.patch.object(metadata, "distribution") def test_pkg_info_for_missing_dist(self, get_distribution_mock): - get_distribution_mock.side_effect = pkg_resources.DistributionNotFound - - result = deps.pkg_info() - - assert "Mopidy" == result["name"] - assert "version" not in result - assert "path" not in result + get_distribution_mock.side_effect = metadata.PackageNotFoundError("test") - @mock.patch("pkg_resources.get_distribution") - def test_pkg_info_for_wrong_dist_version(self, get_distribution_mock): - get_distribution_mock.side_effect = pkg_resources.VersionConflict + result = deps.pkg_info(pkg_name="mopidy", seen_pkgs=set()) - result = deps.pkg_info() + assert result.name == "mopidy" + assert result.version is None + assert result.path is None - assert "Mopidy" == result["name"] - assert "version" not in result - assert "path" not in result + def test_pkg_info_real(self): + result = deps.pkg_info( + pkg_name="mopidy", + seen_pkgs=set(), + ) + assert result diff --git a/tests/internal/test_http.py b/tests/internal/test_http.py index 3c01665f61..5e598f77c8 100644 --- a/tests/internal/test_http.py +++ b/tests/internal/test_http.py @@ -3,7 +3,6 @@ import pytest import requests import responses - from mopidy.internal import http TIMEOUT = 1000 @@ -11,12 +10,12 @@ BODY = "This is the contents of foo.txt." -@pytest.fixture +@pytest.fixture() def session(): return requests.Session() -@pytest.fixture +@pytest.fixture() def session_mock(): return mock.Mock(spec=requests.Session) diff --git a/tests/internal/test_jsonrpc.py b/tests/internal/test_jsonrpc.py index 8b650136f1..144ab8cb44 100644 --- a/tests/internal/test_jsonrpc.py +++ b/tests/internal/test_jsonrpc.py @@ -3,7 +3,7 @@ from unittest import mock import pykka - +import pytest from mopidy import core, models from mopidy.internal import deprecation, jsonrpc @@ -47,12 +47,15 @@ def fail(self): class JsonRpcTestBase(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.backend = dummy_backend.create_proxy() self.calc = Calculator() with deprecation.ignore(): - self.core = core.Core.start(backends=[self.backend]).proxy() + self.core = core.Core.start( + config={}, + backends=[self.backend], + ).proxy() self.jrw = jsonrpc.JsonRpcWrapper( objects={ @@ -67,13 +70,13 @@ def setUp(self): # noqa: N802 decoders=[models.model_json_decoder], ) - def tearDown(self): # noqa: N802 + def tearDown(self): pykka.ActorRegistry.stop_all() class JsonRpcSetupTest(JsonRpcTestBase): def test_empty_object_mounts_is_not_allowed(self): - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): jsonrpc.JsonRpcWrapper(objects={"": Calculator()}) @@ -95,9 +98,7 @@ def test_handle_json_decodes_mopidy_models(self): request = '{"foo": {"__model__": "Artist", "name": "bar"}}' self.jrw.handle_json(request) - self.jrw.handle_data.assert_called_once_with( - {"foo": models.Artist(name="bar")} - ) + self.jrw.handle_data.assert_called_once_with({"foo": models.Artist(name="bar")}) def test_handle_json_encodes_mopidy_models(self): self.jrw.handle_data = mock.Mock() @@ -417,9 +418,7 @@ def test_invalid_params_value_causes_invalid_request_error(self): error = response["error"] assert error["code"] == (-32600) assert error["message"] == "Invalid Request" - assert ( - error["data"] == "'params', if given, must be an array or an object" - ) + assert error["data"] == "'params', if given, must be an array or an object" def test_method_on_without_object_causes_unknown_method_error(self): request = { @@ -432,10 +431,7 @@ def test_method_on_without_object_causes_unknown_method_error(self): error = response["error"] assert error["code"] == (-32601) assert error["message"] == "Method not found" - assert ( - error["data"] - == "Could not find object mount in method name 'bogus'" - ) + assert error["data"] == "Could not find object mount in method name 'bogus'" def test_method_on_unknown_object_causes_unknown_method_error(self): request = { @@ -534,7 +530,7 @@ def test_batch_with_invalid_commands_causes_invalid_request_error(self): assert error["message"] == "Invalid Request" assert error["data"] == "Request must be an object" - def test_batch_of_both_successfull_and_failing_requests(self): + def test_batch_of_both_successful_and_failing_requests(self): request = [ # Call with positional params { @@ -585,7 +581,7 @@ def test_batch_of_both_successfull_and_failing_requests(self): class JsonRpcInspectorTest(JsonRpcTestBase): def test_empty_object_mounts_is_not_allowed(self): - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): jsonrpc.JsonRpcInspector(objects={"": Calculator}) def test_can_describe_method_on_root(self): @@ -603,8 +599,7 @@ def test_inspector_can_describe_an_object_with_methods(self): assert "calc.add" in methods assert ( - methods["calc.add"]["description"] - == "Returns the sum of the given numbers" + methods["calc.add"]["description"] == "Returns the sum of the given numbers" ) assert "calc.sub" in methods @@ -660,6 +655,4 @@ def test_inspector_can_describe_a_bunch_of_large_classes(self): assert len(methods["core.playlists.as_list"]["params"]) == 0 assert "core.tracklist.filter" in methods - assert ( - methods["core.tracklist.filter"]["params"][0]["name"] == "criteria" - ) + assert methods["core.tracklist.filter"]["params"][0]["name"] == "criteria" diff --git a/tests/internal/test_log.py b/tests/internal/test_log.py index d5b7d48692..88fcc3c0de 100644 --- a/tests/internal/test_log.py +++ b/tests/internal/test_log.py @@ -1,9 +1,8 @@ import pytest - from mopidy.internal import log -@pytest.fixture +@pytest.fixture() def config(): return { "verbosity": 2, diff --git a/tests/internal/test_models.py b/tests/internal/test_models.py index ab60d7a7fc..13bcc62528 100644 --- a/tests/internal/test_models.py +++ b/tests/internal/test_models.py @@ -1,6 +1,7 @@ import json import unittest +import pytest from mopidy.internal.models import ( HistoryState, HistoryTrack, @@ -8,13 +9,7 @@ PlaybackState, TracklistState, ) -from mopidy.models import ( - ModelJSONEncoder, - Ref, - TlTrack, - Track, - model_json_decoder, -) +from mopidy.models import ModelJSONEncoder, Ref, TlTrack, Track, model_json_decoder class HistoryTrackTest(unittest.TestCase): @@ -22,14 +17,14 @@ def test_track(self): track = Ref.track() result = HistoryTrack(track=track) assert result.track == track - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.track = None def test_timestamp(self): timestamp = 1234 result = HistoryTrack(timestamp=timestamp) assert result.timestamp == timestamp - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.timestamp = None def test_to_json_and_back(self): @@ -44,12 +39,12 @@ def test_history_list(self): history = (HistoryTrack(), HistoryTrack()) result = HistoryState(history=history) assert result.history == history - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.history = None def test_history_string_fail(self): history = "not_a_valid_history" - with self.assertRaises(TypeError): + with pytest.raises(TypeError): HistoryState(history=history) def test_to_json_and_back(self): @@ -64,26 +59,26 @@ def test_volume(self): volume = 37 result = MixerState(volume=volume) assert result.volume == volume - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.volume = None def test_volume_invalid(self): volume = 105 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): MixerState(volume=volume) def test_mute_false(self): mute = False result = MixerState(mute=mute) assert result.mute == mute - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.mute = None def test_mute_true(self): mute = True result = MixerState(mute=mute) assert result.mute == mute - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.mute = False def test_mute_default(self): @@ -102,43 +97,43 @@ def test_position(self): time_position = 123456 result = PlaybackState(time_position=time_position) assert result.time_position == time_position - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.time_position = None def test_position_invalid(self): time_position = -1 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): PlaybackState(time_position=time_position) def test_tl_track(self): tlid = 42 result = PlaybackState(tlid=tlid) assert result.tlid == tlid - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.tlid = None def test_tl_track_none(self): tlid = None result = PlaybackState(tlid=tlid) assert result.tlid == tlid - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.tl_track = None def test_tl_track_invalid(self): tl_track = Track() - with self.assertRaises(TypeError): + with pytest.raises(TypeError): PlaybackState(tlid=tl_track) def test_state(self): state = "playing" result = PlaybackState(state=state) assert result.state == state - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.state = None def test_state_invalid(self): state = "not_a_state" - with self.assertRaises(TypeError): + with pytest.raises(TypeError): PlaybackState(state=state) def test_to_json_and_back(self): @@ -153,64 +148,64 @@ def test_repeat_true(self): repeat = True result = TracklistState(repeat=repeat) assert result.repeat == repeat - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.repeat = None def test_repeat_false(self): repeat = False result = TracklistState(repeat=repeat) assert result.repeat == repeat - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.repeat = None def test_repeat_invalid(self): repeat = 33 - with self.assertRaises(TypeError): + with pytest.raises(TypeError): TracklistState(repeat=repeat) def test_consume_true(self): val = True result = TracklistState(consume=val) assert result.consume == val - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.repeat = None def test_random_true(self): val = True result = TracklistState(random=val) assert result.random == val - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.random = None def test_single_true(self): val = True result = TracklistState(single=val) assert result.single == val - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.single = None def test_next_tlid(self): val = 654 result = TracklistState(next_tlid=val) assert result.next_tlid == val - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.next_tlid = None def test_next_tlid_invalid(self): val = -1 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): TracklistState(next_tlid=val) def test_tracks(self): tracks = (TlTrack(), TlTrack()) result = TracklistState(tl_tracks=tracks) assert result.tl_tracks == tracks - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.tl_tracks = None def test_tracks_invalid(self): tracks = (Track(), Track()) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): TracklistState(tl_tracks=tracks) def test_to_json_and_back(self): diff --git a/tests/internal/test_network.py b/tests/internal/test_network.py index db38667b82..cca6c95a9f 100644 --- a/tests/internal/test_network.py +++ b/tests/internal/test_network.py @@ -12,7 +12,7 @@ def test_system_that_claims_no_ipv6_support(self): @patch("socket.has_ipv6", True) @patch("socket.socket") def test_system_with_broken_ipv6(self, socket_mock): - socket_mock.side_effect = IOError() + socket_mock.side_effect = OSError() assert not network.try_ipv6_socket() @patch("socket.has_ipv6", True) diff --git a/tests/internal/test_path.py b/tests/internal/test_path.py index 0e76294dd3..07f697b533 100644 --- a/tests/internal/test_path.py +++ b/tests/internal/test_path.py @@ -4,16 +4,15 @@ import unittest import pytest - from mopidy.internal import path from mopidy.internal.gi import GLib class GetOrCreateDirTest(unittest.TestCase): - def setUp(self): # noqa: N802 - self.parent = pathlib.Path(tempfile.mkdtemp()) + def setUp(self): + self.parent = pathlib.Path(tempfile.mkdtemp()).resolve() - def tearDown(self): # noqa: N802 + def tearDown(self): if self.parent.is_dir(): shutil.rmtree(str(self.parent)) @@ -54,14 +53,14 @@ def test_create_dir_with_name_of_existing_file_throws_oserror(self): def test_create_dir_with_none(self): with pytest.raises(TypeError): - path.get_or_create_dir(None) + path.get_or_create_dir(None) # pyright: ignore[reportArgumentType] class GetOrCreateFileTest(unittest.TestCase): - def setUp(self): # noqa: N802 - self.parent = pathlib.Path(tempfile.mkdtemp()) + def setUp(self): + self.parent = pathlib.Path(tempfile.mkdtemp()).resolve() - def tearDown(self): # noqa: N802 + def tearDown(self): if self.parent.is_dir(): shutil.rmtree(str(self.parent)) @@ -101,7 +100,7 @@ def test_create_file_with_name_of_existing_dir_throws_error(self): def test_create_file_with_none_filename_throws_type_error(self): with pytest.raises(TypeError): - path.get_or_create_file(None) + path.get_or_create_file(None) # pyright: ignore[reportArgumentType] def test_create_dir_without_mkdir(self): file_path = self.parent / "foo" / "bar" @@ -125,9 +124,8 @@ def test_create_dir_with_unicode_content(self): class GetUnixSocketPathTest(unittest.TestCase): def test_correctly_matched_socket_path(self): - assert ( - path.get_unix_socket_path("unix:/tmp/mopidy.socket") - == "/tmp/mopidy.socket" + assert path.get_unix_socket_path("unix:/tmp/mopidy.socket") == pathlib.Path( + "/tmp/mopidy.socket" ) def test_correctly_no_match_socket_path(self): @@ -177,12 +175,12 @@ class ExpandPathTest(unittest.TestCase): def test_empty_path(self): result = path.expand_path("") - assert result == pathlib.Path(".").resolve() + assert result == pathlib.Path().resolve() def test_absolute_path(self): result = path.expand_path("/tmp/foo") - assert result == pathlib.Path("/tmp/foo") + assert result == pathlib.Path("/tmp/foo").resolve() def test_home_dir_expansion(self): result = path.expand_path("~/foo") @@ -201,9 +199,12 @@ def test_xdg_subsititution(self): assert str(result) == expected def test_xdg_subsititution_unknown(self): - result = path.expand_path("/tmp/$XDG_INVALID_DIR/foo") + with pytest.raises(ValueError) as exc_info: + path.expand_path("/tmp/$XDG_INVALID_DIR/foo") - assert result is None + assert str(exc_info.value) == ( + "Unexpanded '$...' in path '/tmp/$XDG_INVALID_DIR/foo'" + ) def test_invalid_utf8_bytes(self): result = path.expand_path(b"ab\xc3\x12") diff --git a/tests/internal/test_playlists.py b/tests/internal/test_playlists.py index f3c1ff23f0..bc19cbaff1 100644 --- a/tests/internal/test_playlists.py +++ b/tests/internal/test_playlists.py @@ -1,5 +1,4 @@ import pytest - from mopidy.internal import playlists BAD = b"foobarbaz" @@ -85,7 +84,7 @@ @pytest.mark.parametrize( - "detect_fn, data", + ("detect_fn", "data"), [ (playlists.detect_extm3u_header, EXTM3U), (playlists.detect_pls_header, PLS), @@ -104,7 +103,6 @@ def test_detect_from_valid_header(detect_fn, data): playlists.detect_extm3u_header, playlists.detect_pls_header, playlists.detect_asx_header, - playlists.detect_asx_header, playlists.detect_xspf_header, ], ) @@ -113,7 +111,7 @@ def test_detect_from_invalid_header(detect_fn): @pytest.mark.parametrize( - "parse_fn, data", + ("parse_fn", "data"), [ (playlists.parse_extm3u, EXTM3U), (playlists.parse_pls, PLS), diff --git a/tests/internal/test_validation.py b/tests/internal/test_validation.py index fb29ce7930..259973f450 100644 --- a/tests/internal/test_validation.py +++ b/tests/internal/test_validation.py @@ -1,5 +1,4 @@ -from pytest import raises - +import pytest from mopidy import exceptions from mopidy.internal import validation @@ -10,15 +9,15 @@ def test_check_boolean_with_valid_values(): def test_check_boolean_with_other_values(): - for value in 1, 0, None, "", list(), tuple(): - with raises(exceptions.ValidationError): + for value in 1, 0, None, "", [], (): + with pytest.raises(exceptions.ValidationError): validation.check_boolean(value) def test_check_boolean_error_message(): - with raises(exceptions.ValidationError) as excinfo: + with pytest.raises(exceptions.ValidationError) as excinfo: validation.check_boolean(1234) - assert "Expected a boolean, not 1234" == str(excinfo.value) + assert str(excinfo.value) == "Expected a boolean, not 1234" def test_check_choice_with_valid_values(): @@ -28,14 +27,14 @@ def test_check_choice_with_valid_values(): def test_check_choice_with_invalid_values(): for value, choices in (5, (1, 2, 3)), ("xyz", ("abc", "def")): - with raises(exceptions.ValidationError): + with pytest.raises(exceptions.ValidationError): validation.check_choice(value, choices) def test_check_choice_error_message(): - with raises(exceptions.ValidationError) as excinfo: + with pytest.raises(exceptions.ValidationError) as excinfo: validation.check_choice(5, (1, 2, 3)) - assert "Expected one of (1, 2, 3), not 5" == str(excinfo.value) + assert str(excinfo.value) == "Expected one of (1, 2, 3), not 5" def test_check_instance_with_valid_choices(): @@ -45,14 +44,14 @@ def test_check_instance_with_valid_choices(): def test_check_instance_with_invalid_values(): for value, cls in (1, str), ("abc", int): - with raises(exceptions.ValidationError): + with pytest.raises(exceptions.ValidationError): validation.check_instance(value, cls) def test_check_instance_error_message(): - with raises(exceptions.ValidationError) as excinfo: + with pytest.raises(exceptions.ValidationError) as excinfo: validation.check_instance(1, dict) - assert "Expected a dict instance, not 1" == str(excinfo.value) + assert str(excinfo.value) == "Expected a dict instance, not 1" def test_check_instances_with_valid_values(): @@ -62,22 +61,22 @@ def test_check_instances_with_valid_values(): def test_check_instances_with_invalid_values(): - with raises(exceptions.ValidationError): + with pytest.raises(exceptions.ValidationError): validation.check_instances("abc", str) - with raises(exceptions.ValidationError): + with pytest.raises(exceptions.ValidationError): validation.check_instances(["abc", 123], str) - with raises(exceptions.ValidationError): + with pytest.raises(exceptions.ValidationError): validation.check_instances(None, str) - with raises(exceptions.ValidationError): + with pytest.raises(exceptions.ValidationError): validation.check_instances([None], str) - with raises(exceptions.ValidationError): + with pytest.raises(exceptions.ValidationError): validation.check_instances(iter(["abc"]), str) def test_check_instances_error_message(): - with raises(exceptions.ValidationError) as excinfo: + with pytest.raises(exceptions.ValidationError) as excinfo: validation.check_instances([1], str) - assert "Expected a list of str, not [1]" == str(excinfo.value) + assert str(excinfo.value) == "Expected a list of str, not [1]" def test_check_query_valid_values(): @@ -86,37 +85,37 @@ def test_check_query_valid_values(): def test_check_query_random_iterables(): - for value in None, tuple(), list(), "abc": - with raises(exceptions.ValidationError): + for value in None, (), [], "abc": + with pytest.raises(exceptions.ValidationError): validation.check_query(value) def test_check_mapping_error_message(): - with raises(exceptions.ValidationError) as excinfo: + with pytest.raises(exceptions.ValidationError) as excinfo: validation.check_query([]) - assert "Expected a query dictionary, not []" == str(excinfo.value) + assert str(excinfo.value) == "Expected a query dictionary, not []" def test_check_query_invalid_fields(): for value in "wrong", "bar", "foo", "tlid": - with raises(exceptions.ValidationError): + with pytest.raises(exceptions.ValidationError): validation.check_query({value: []}) def test_check_field_error_message(): - with raises(exceptions.ValidationError) as excinfo: + with pytest.raises(exceptions.ValidationError) as excinfo: validation.check_query({"wrong": ["abc"]}) assert "Expected query field to be one of " in str(excinfo.value) def test_check_query_invalid_values(): for value in "", None, "foo", 123, [""], [None], iter(["abc"]): - with raises(exceptions.ValidationError): + with pytest.raises(exceptions.ValidationError): validation.check_query({"any": value}) def test_check_values_error_message(): - with raises(exceptions.ValidationError) as excinfo: + with pytest.raises(exceptions.ValidationError) as excinfo: validation.check_query({"any": "abc"}) assert 'Expected "any" to be list of strings, not' in str(excinfo.value) @@ -129,15 +128,15 @@ def test_check_uri_with_valid_values(): def test_check_uri_with_invalid_values(): # Note that tuple catches a potential bug with using "'foo' % arg" for # formatting. - for value in ("foobar", "htt p://example.com", None, 1234, tuple()): - with raises(exceptions.ValidationError): + for value in ("foobar", "htt p://example.com", None, 1234, ()): + with pytest.raises(exceptions.ValidationError): validation.check_uri(value) def test_check_uri_error_message(): - with raises(exceptions.ValidationError) as excinfo: + with pytest.raises(exceptions.ValidationError) as excinfo: validation.check_uri("testing") - assert "Expected a valid URI, not 'testing'" == str(excinfo.value) + assert str(excinfo.value) == "Expected a valid URI, not 'testing'" def test_check_uris_with_valid_values(): @@ -147,19 +146,19 @@ def test_check_uris_with_valid_values(): def test_check_uris_with_invalid_values(): - with raises(exceptions.ValidationError): + with pytest.raises(exceptions.ValidationError): validation.check_uris("foobar:") - with raises(exceptions.ValidationError): + with pytest.raises(exceptions.ValidationError): validation.check_uris(None) - with raises(exceptions.ValidationError): + with pytest.raises(exceptions.ValidationError): validation.check_uris([None]) - with raises(exceptions.ValidationError): + with pytest.raises(exceptions.ValidationError): validation.check_uris(["foobar:", "foobar"]) - with raises(exceptions.ValidationError): + with pytest.raises(exceptions.ValidationError): validation.check_uris(iter(["http://example.com"])) def test_check_uris_error_message(): - with raises(exceptions.ValidationError) as excinfo: + with pytest.raises(exceptions.ValidationError) as excinfo: validation.check_uris("testing") - assert "Expected a list of URIs, not 'testing'" == str(excinfo.value) + assert str(excinfo.value) == "Expected a list of URIs, not 'testing'" diff --git a/tests/internal/test_xdg.py b/tests/internal/test_xdg.py index 1ccb99c27a..b464cdeef7 100644 --- a/tests/internal/test_xdg.py +++ b/tests/internal/test_xdg.py @@ -1,13 +1,12 @@ import os -import pathlib +from pathlib import Path from unittest import mock import pytest - from mopidy.internal import xdg -@pytest.fixture +@pytest.fixture() def environ(): patcher = mock.patch.dict(os.environ, clear=True) yield patcher.start() @@ -15,51 +14,45 @@ def environ(): def test_cache_dir_default(environ): - assert xdg.get_dirs()["XDG_CACHE_DIR"] == ( - pathlib.Path("~/.cache").expanduser() - ) + assert xdg.get_dirs()["XDG_CACHE_DIR"] == Path("~/.cache").expanduser() def test_cache_dir_from_env(environ): os.environ["XDG_CACHE_HOME"] = "/foo/bar" - assert xdg.get_dirs()["XDG_CACHE_DIR"] == pathlib.Path("/foo/bar") + assert xdg.get_dirs()["XDG_CACHE_DIR"] == Path("/foo/bar") def test_config_dir_default(environ): - assert xdg.get_dirs()["XDG_CONFIG_DIR"] == ( - pathlib.Path("~/.config").expanduser() - ) + assert xdg.get_dirs()["XDG_CONFIG_DIR"] == Path("~/.config").expanduser() def test_config_dir_from_env(environ): os.environ["XDG_CONFIG_HOME"] = "/foo/bar" - assert xdg.get_dirs()["XDG_CONFIG_DIR"] == pathlib.Path("/foo/bar") + assert xdg.get_dirs()["XDG_CONFIG_DIR"] == Path("/foo/bar") def test_data_dir_default(environ): - assert xdg.get_dirs()["XDG_DATA_DIR"] == ( - pathlib.Path("~/.local/share").expanduser() - ) + assert xdg.get_dirs()["XDG_DATA_DIR"] == Path("~/.local/share").expanduser() def test_data_dir_from_env(environ): os.environ["XDG_DATA_HOME"] = "/foo/bar" - assert xdg.get_dirs()["XDG_DATA_DIR"] == pathlib.Path("/foo/bar") + assert xdg.get_dirs()["XDG_DATA_DIR"] == Path("/foo/bar") def test_user_dirs(environ, tmpdir): os.environ["XDG_CONFIG_HOME"] = str(tmpdir) - with open(os.path.join(str(tmpdir), "user-dirs.dirs"), "wb") as fh: + with (Path(tmpdir) / "user-dirs.dirs").open("wb") as fh: fh.write(b"# Some comments\n") fh.write(b'XDG_MUSIC_DIR="$HOME/Music2"\n') result = xdg.get_dirs() - assert result["XDG_MUSIC_DIR"] == pathlib.Path("~/Music2").expanduser() + assert result["XDG_MUSIC_DIR"] == Path("~/Music2").expanduser() assert "XDG_DOWNLOAD_DIR" not in result diff --git a/tests/m3u/test_playlists.py b/tests/m3u/test_playlists.py index b442a6aab9..11be34e6c6 100644 --- a/tests/m3u/test_playlists.py +++ b/tests/m3u/test_playlists.py @@ -3,9 +3,9 @@ import shutil import tempfile import unittest +from typing import Any, ClassVar import pykka - from mopidy import core from mopidy.m3u.backend import M3UBackend from mopidy.models import Playlist, Track @@ -16,7 +16,7 @@ class M3UPlaylistsProviderTest(unittest.TestCase): backend_class = M3UBackend - config = { + config: ClassVar[dict[str, dict[str, Any]]] = { "m3u": { "enabled": True, "base_dir": None, @@ -26,16 +26,16 @@ class M3UPlaylistsProviderTest(unittest.TestCase): } } - def setUp(self): # noqa: N802 + def setUp(self): self.config["m3u"]["playlists_dir"] = pathlib.Path(tempfile.mkdtemp()) self.playlists_dir = self.config["m3u"]["playlists_dir"] self.base_dir = self.config["m3u"]["base_dir"] or self.playlists_dir audio = dummy_audio.create_proxy() backend = M3UBackend.start(config=self.config, audio=audio).proxy() - self.core = core.Core(backends=[backend]) + self.core = core.Core(config=self.config, backends=[backend]) - def tearDown(self): # noqa: N802 + def tearDown(self): pykka.ActorRegistry.stop_all() if self.playlists_dir.exists(): @@ -47,13 +47,13 @@ def test_created_playlist_is_persisted(self): assert not path.exists() playlist = self.core.playlists.create("test") - assert "test" == playlist.name + assert playlist.name == "test" assert uri == playlist.uri assert path.exists() def test_create_sanitizes_playlist_name(self): playlist = self.core.playlists.create(" ../../test FOO baR ") - assert "..|..|test FOO baR" == playlist.name + assert playlist.name == "..|..|test FOO baR" path = self.playlists_dir / "..|..|test FOO baR.m3u" assert self.playlists_dir == path.parent assert path.exists() @@ -66,13 +66,13 @@ def test_saved_playlist_is_persisted(self): path2 = self.playlists_dir / "test2.m3u" playlist = self.core.playlists.create("test1") - assert "test1" == playlist.name + assert playlist.name == "test1" assert uri1 == playlist.uri assert path1.exists() assert not path2.exists() playlist = self.core.playlists.save(playlist.replace(name="test2")) - assert "test2" == playlist.name + assert playlist.name == "test2" assert uri2 == playlist.uri assert not path1.exists() assert path2.exists() @@ -84,7 +84,7 @@ def test_deleted_playlist_is_removed(self): assert not path.exists() playlist = self.core.playlists.create("test") - assert "test" == playlist.name + assert playlist.name == "test" assert uri == playlist.uri assert path.exists() @@ -169,7 +169,7 @@ def test_load_playlist_with_nonfilesystem_encoding_of_filename(self): assert len(self.core.playlists.as_list()) == 1 result = self.core.playlists.as_list() - assert "���" == result[0].name + assert result[0].name == "���" @unittest.SkipTest def test_playlists_dir_is_created(self): @@ -282,7 +282,7 @@ def test_playlist_with_unknown_track(self): assert len(self.core.playlists.as_list()) == 1 result = self.core.playlists.lookup("m3u:test.m3u") - assert "m3u:test.m3u" == result.uri + assert result.uri == "m3u:test.m3u" assert playlist.name == result.name assert track.uri == result.tracks[0].uri @@ -295,7 +295,7 @@ def test_playlist_with_absolute_path(self): assert len(self.core.playlists.as_list()) == 1 result = self.core.playlists.lookup("m3u:test.m3u") - assert "m3u:test.m3u" == result.uri + assert result.uri == "m3u:test.m3u" assert playlist.name == result.name assert filepath.as_uri() == result.tracks[0].uri @@ -308,9 +308,9 @@ def test_playlist_with_relative_path(self): assert len(self.core.playlists.as_list()) == 1 result = self.core.playlists.lookup("m3u:test.m3u") - assert "m3u:test.m3u" == result.uri + assert result.uri == "m3u:test.m3u" assert playlist.name == result.name - assert filepath.as_uri() == result.tracks[0].uri + assert filepath.resolve().as_uri() == result.tracks[0].uri def test_playlist_sort_order(self): def check_order(playlists, names): @@ -360,6 +360,6 @@ def test_get_items_from_file_outside_playlist_dir_returns_none(self): class M3UPlaylistsProviderBaseDirectoryTest(M3UPlaylistsProviderTest): - def setUp(self): # noqa: N802 + def setUp(self): self.config["m3u"]["base_dir"] = pathlib.Path(tempfile.mkdtemp()) super().setUp() diff --git a/tests/m3u/test_translator.py b/tests/m3u/test_translator.py index 34b9667564..c34b91059f 100644 --- a/tests/m3u/test_translator.py +++ b/tests/m3u/test_translator.py @@ -2,7 +2,6 @@ import pathlib import pytest - from mopidy.m3u import translator from mopidy.m3u.translator import path_to_uri from mopidy.models import Playlist, Ref, Track @@ -19,7 +18,7 @@ def dumps(items): @pytest.mark.parametrize( - "path,scheme,expected", + ("path", "scheme", "expected"), [ ("test", None, "m3u:test"), ("test.m3u", None, "m3u:test.m3u"), @@ -51,7 +50,7 @@ def test_utf8_path_to_uri(): @pytest.mark.parametrize( - "path,expected", + ("path", "expected"), [ ("test", "test"), ("test.m3u", "test"), @@ -72,7 +71,7 @@ def test_path_from_name(): @pytest.mark.parametrize( - "path,expected", + ("path", "expected"), [ ("test.m3u", ("m3u:test.m3u", "test")), ("Test Playlist.m3u", ("m3u:Test%20Playlist.m3u", "Test Playlist")), @@ -86,7 +85,7 @@ def test_path_to_ref(path, expected): @pytest.mark.parametrize( - "contents,basedir,expected", + ("contents", "basedir", "expected"), [ ("", ".", None), ("test.mp3", "/playlists", ("file:///playlists/test.mp3", "test")), @@ -118,16 +117,16 @@ def test_dump_items(): assert dumps([]) == "" assert dumps([Ref.track(uri="file:///test.mp3")]) == ("file:///test.mp3\n") assert dumps([Ref.track(uri="file:///test.mp3", name="test")]) == ( - "#EXTM3U\n" "#EXTINF:-1,test\n" "file:///test.mp3\n" + "#EXTM3U\n#EXTINF:-1,test\nfile:///test.mp3\n" ) assert dumps([Track(uri="file:///test.mp3", name="test", length=42)]) == ( - "#EXTM3U\n" "#EXTINF:-1,test\n" "file:///test.mp3\n" + "#EXTM3U\n#EXTINF:-1,test\nfile:///test.mp3\n" ) assert dumps([Track(uri="http://example.com/stream")]) == ( "http://example.com/stream\n" ) assert dumps([Track(uri="http://example.com/stream", name="Test")]) == ( - "#EXTM3U\n" "#EXTINF:-1,Test\n" "http://example.com/stream\n" + "#EXTM3U\n#EXTINF:-1,Test\nhttp://example.com/stream\n" ) diff --git a/tests/models/__init__.py b/tests/models/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/models/test_fields.py b/tests/models/test_fields.py index 55b63b8ff1..df3f300ac8 100644 --- a/tests/models/test_fields.py +++ b/tests/models/test_fields.py @@ -1,13 +1,7 @@ import unittest -from mopidy.models.fields import ( - Boolean, - Collection, - Field, - Identifier, - Integer, - String, -) +import pytest +from mopidy.models.fields import Boolean, Collection, Field, Identifier, Integer, String def create_instance(field): @@ -28,7 +22,7 @@ def test_raw_field_accesible_through_class(self): def test_field_knows_its_name(self): instance = create_instance(Field()) - assert "attr" == instance.__class__.attr._name + assert instance.__class__.attr._name == "attr" def test_field_has_none_as_default(self): instance = create_instance(Field()) @@ -41,13 +35,13 @@ def test_field_does_not_store_default(self): def test_field_assigment_and_retrival(self): instance = create_instance(Field()) instance.attr = 1234 - assert 1234 == instance.attr + assert instance.attr == 1234 def test_field_can_be_reassigned(self): instance = create_instance(Field()) instance.attr = 1234 instance.attr = 5678 - assert 5678 == instance.attr + assert instance.attr == 5678 def test_field_can_be_deleted(self): instance = create_instance(Field()) @@ -75,127 +69,127 @@ def test_field_can_be_set_default(self): class FieldTest(unittest.TestCase): def test_default_handling(self): instance = create_instance(Field(default=1234)) - assert 1234 == instance.attr + assert instance.attr == 1234 def test_type_checking(self): instance = create_instance(Field(type=set)) instance.attr = set() - with self.assertRaises(TypeError): + with pytest.raises(TypeError): instance.attr = 1234 def test_choices_checking(self): instance = create_instance(Field(choices=(1, 2, 3))) instance.attr = 1 - with self.assertRaises(TypeError): + with pytest.raises(TypeError): instance.attr = 4 def test_default_respects_type_check(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): create_instance(Field(type=int, default="123")) def test_default_respects_choices_check(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): create_instance(Field(choices=(1, 2, 3), default=5)) class StringTest(unittest.TestCase): def test_default_handling(self): instance = create_instance(String(default="abc")) - assert "abc" == instance.attr + assert instance.attr == "abc" def test_native_str_allowed(self): instance = create_instance(String()) instance.attr = "abc" - assert "abc" == instance.attr + assert instance.attr == "abc" def test_unicode_allowed(self): instance = create_instance(String()) instance.attr = "abc" - assert "abc" == instance.attr + assert instance.attr == "abc" def test_other_disallowed(self): instance = create_instance(String()) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): instance.attr = 1234 def test_empty_string(self): instance = create_instance(String()) instance.attr = "" - assert "" == instance.attr + assert instance.attr == "" class IdentifierTest(unittest.TestCase): def test_default_handling(self): instance = create_instance(Identifier(default="abc")) - assert "abc" == instance.attr + assert instance.attr == "abc" def test_native_str_allowed(self): instance = create_instance(Identifier()) instance.attr = "abc" - assert "abc" == instance.attr + assert instance.attr == "abc" def test_unicode_allowed(self): instance = create_instance(Identifier()) instance.attr = "abc" - assert "abc" == instance.attr + assert instance.attr == "abc" def test_unicode_with_nonascii_allowed(self): instance = create_instance(Identifier()) instance.attr = "æøå" - assert "æøå" == instance.attr + assert instance.attr == "æøå" def test_other_disallowed(self): instance = create_instance(Identifier()) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): instance.attr = 1234 def test_empty_string(self): instance = create_instance(Identifier()) instance.attr = "" - assert "" == instance.attr + assert instance.attr == "" class IntegerTest(unittest.TestCase): def test_default_handling(self): instance = create_instance(Integer(default=1234)) - assert 1234 == instance.attr + assert instance.attr == 1234 def test_int_allowed(self): instance = create_instance(Integer()) - instance.attr = int(123) - assert 123 == instance.attr + instance.attr = 123 + assert instance.attr == 123 def test_float_disallowed(self): instance = create_instance(Integer()) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): instance.attr = 123.0 def test_numeric_string_disallowed(self): instance = create_instance(Integer()) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): instance.attr = "123" def test_other_disallowed(self): instance = create_instance(String()) - with self.assertRaises(TypeError): - instance.attr = tuple() + with pytest.raises(TypeError): + instance.attr = () def test_min_validation(self): instance = create_instance(Integer(min=0)) instance.attr = 0 - assert 0 == instance.attr + assert instance.attr == 0 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): instance.attr = -1 def test_max_validation(self): instance = create_instance(Integer(max=10)) instance.attr = 10 - assert 10 == instance.attr + assert instance.attr == 10 - with self.assertRaises(ValueError): + with pytest.raises(ValueError): instance.attr = 11 @@ -216,7 +210,7 @@ def test_false_allowed(self): def test_int_forbidden(self): instance = create_instance(Boolean()) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): instance.attr = 1 @@ -237,15 +231,15 @@ def test_collection_gets_stored_in_container(self): def test_collection_with_wrong_type(self): instance = create_instance(Collection(type=int, container=frozenset)) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): instance.attr = [1, "2", 3] def test_collection_with_string(self): instance = create_instance(Collection(type=int, container=frozenset)) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): instance.attr = "123" def test_strings_should_not_be_considered_a_collection(self): instance = create_instance(Collection(type=str, container=tuple)) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): instance.attr = b"123" diff --git a/tests/models/test_legacy.py b/tests/models/test_legacy.py index 2659705399..029f7cce02 100644 --- a/tests/models/test_legacy.py +++ b/tests/models/test_legacy.py @@ -1,5 +1,6 @@ import unittest +import pytest from mopidy.models import ImmutableObject @@ -30,14 +31,14 @@ def test_copying_model(self): def test_copying_model_with_basic_values(self): model = Model(name="foo", uri="bar") other = model.replace(name="baz") - assert "baz" == other.name - assert "bar" == other.uri + assert other.name == "baz" + assert other.uri == "bar" def test_copying_model_with_missing_values(self): model = Model(uri="bar") other = model.replace(name="baz") - assert "baz" == other.name - assert "bar" == other.uri + assert other.name == "baz" + assert other.uri == "bar" def test_copying_model_with_private_internal_value(self): model = Model(models=[SubModel(name=123)]) @@ -45,7 +46,7 @@ def test_copying_model_with_private_internal_value(self): assert SubModel(name=345) in other.models def test_copying_model_with_invalid_key(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): Model().replace(invalid_key=True) def test_copying_model_to_remove(self): @@ -58,39 +59,37 @@ def test_uri(self): uri = "an_uri" model = Model(uri=uri) assert model.uri == uri - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): model.uri = None def test_name(self): name = "a name" model = Model(name=name) assert model.name == name - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): model.name = None def test_submodels(self): models = [SubModel(name=123), SubModel(name=456)] model = Model(models=models) assert set(model.models) == set(models) - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): model.models = None def test_models_none(self): assert set() == Model(models=None).models def test_invalid_kwarg(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): Model(foo="baz") def test_repr_without_models(self): - assert "Model(name='name', uri='uri')" == repr( - Model(uri="uri", name="name") - ) + assert repr(Model(uri="uri", name="name")) == "Model(name='name', uri='uri')" def test_repr_with_models(self): assert ( - "Model(models=[SubModel(name=123)], name='name', uri='uri')" - == repr(Model(uri="uri", name="name", models=[SubModel(name=123)])) + repr(Model(uri="uri", name="name", models=[SubModel(name=123)])) + == "Model(models=[SubModel(name=123)], name='name', uri='uri')" ) def test_serialize_without_models(self): diff --git a/tests/models/test_models.py b/tests/models/test_models.py index ce5a8375ce..efefa27ce1 100644 --- a/tests/models/test_models.py +++ b/tests/models/test_models.py @@ -1,6 +1,7 @@ import json import unittest +import pytest from mopidy.models import ( Album, Artist, @@ -84,14 +85,14 @@ def test_replace_playlist(self): def test_replace_track_with_basic_values(self): track = Track(name="foo", uri="bar") other = track.replace(name="baz") - assert "baz" == other.name - assert "bar" == other.uri + assert other.name == "baz" + assert other.uri == "bar" def test_replace_track_with_missing_values(self): track = Track(uri="bar") other = track.replace(name="baz") - assert "baz" == other.name - assert "bar" == other.uri + assert other.name == "baz" + assert other.uri == "bar" def test_replace_track_with_private_internal_value(self): artist1 = Artist(name="foo") @@ -101,7 +102,7 @@ def test_replace_track_with_private_internal_value(self): assert artist2 in other.artists def test_replace_track_with_invalid_key(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): Track().replace(invalid_key=True) def test_replace_track_to_remove(self): @@ -114,29 +115,30 @@ def test_uri(self): uri = "an_uri" ref = Ref(uri=uri) assert ref.uri == uri - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): ref.uri = None def test_name(self): name = "a name" ref = Ref(name=name) assert ref.name == name - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): ref.name = None # TODO: add these for the more of the models? def test_del_name(self): ref = Ref(name="foo") - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): del ref.name def test_invalid_kwarg(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): Ref(foo="baz") def test_repr_without_results(self): - assert "Ref(name='foo', type='artist', uri='uri')" == repr( - Ref(uri="uri", name="foo", type="artist") + assert ( + repr(Ref(uri="uri", name="foo", type="artist")) + == "Ref(name='foo', type='artist', uri='uri')" ) def test_serialize_without_results(self): @@ -193,23 +195,23 @@ def test_uri(self): uri = "an_uri" image = Image(uri=uri) assert image.uri == uri - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): image.uri = None def test_width(self): image = Image(width=100) assert image.width == 100 - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): image.width = None def test_height(self): image = Image(height=100) assert image.height == 100 - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): image.height = None def test_invalid_kwarg(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): Image(foo="baz") @@ -218,38 +220,36 @@ def test_uri(self): uri = "an_uri" artist = Artist(uri=uri) assert artist.uri == uri - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): artist.uri = None def test_name(self): name = "a name" artist = Artist(name=name) assert artist.name == name - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): artist.name = None def test_musicbrainz_id(self): mb_id = "mb-id" artist = Artist(musicbrainz_id=mb_id) assert artist.musicbrainz_id == mb_id - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): artist.musicbrainz_id = None def test_invalid_kwarg(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): Artist(foo="baz") def test_invalid_kwarg_with_name_matching_method(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): Artist(replace="baz") - with self.assertRaises(TypeError): + with pytest.raises(TypeError): Artist(serialize="baz") def test_repr(self): - assert "Artist(name='name', uri='uri')" == repr( - Artist(uri="uri", name="name") - ) + assert repr(Artist(uri="uri", name="name")) == "Artist(name='name', uri='uri')" def test_serialize(self): self.assertDictEqual( @@ -273,21 +273,21 @@ def test_to_json_and_back_with_unknown_field(self): artist = Artist(uri="uri", name="name").serialize() artist["foo"] = "foo" serialized = json.dumps(artist) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): json.loads(serialized, object_hook=model_json_decoder) def test_to_json_and_back_with_field_matching_method(self): artist = Artist(uri="uri", name="name").serialize() artist["copy"] = "foo" serialized = json.dumps(artist) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): json.loads(serialized, object_hook=model_json_decoder) def test_to_json_and_back_with_field_matching_internal_field(self): artist = Artist(uri="uri", name="name").serialize() artist["__mro__"] = "foo" serialized = json.dumps(artist) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): json.loads(serialized, object_hook=model_json_decoder) def test_eq_name(self): @@ -350,21 +350,21 @@ def test_uri(self): uri = "an_uri" album = Album(uri=uri) assert album.uri == uri - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): album.uri = None def test_name(self): name = "a name" album = Album(name=name) assert album.name == name - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): album.name = None def test_artists(self): artist = Artist() album = Album(artists=[artist]) assert artist in album.artists - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): album.artists = None def test_artists_none(self): @@ -374,43 +374,41 @@ def test_num_tracks(self): num_tracks = 11 album = Album(num_tracks=num_tracks) assert album.num_tracks == num_tracks - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): album.num_tracks = None def test_num_discs(self): num_discs = 2 album = Album(num_discs=num_discs) assert album.num_discs == num_discs - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): album.num_discs = None def test_date(self): date = "1977-01-01" album = Album(date=date) assert album.date == date - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): album.date = None def test_musicbrainz_id(self): mb_id = "mb-id" album = Album(musicbrainz_id=mb_id) assert album.musicbrainz_id == mb_id - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): album.musicbrainz_id = None def test_invalid_kwarg(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): Album(foo="baz") def test_repr_without_artists(self): - assert "Album(name='name', uri='uri')" == repr( - Album(uri="uri", name="name") - ) + assert repr(Album(uri="uri", name="name")) == "Album(name='name', uri='uri')" def test_repr_with_artists(self): assert ( - "Album(artists=[Artist(name='foo')], name='name', uri='uri')" - == repr(Album(uri="uri", name="name", artists=[Artist(name="foo")])) + repr(Album(uri="uri", name="name", artists=[Artist(name="foo")])) + == "Album(artists=[Artist(name='foo')], name='name', uri='uri')" ) def test_serialize_without_artists(self): @@ -568,21 +566,21 @@ def test_uri(self): uri = "an_uri" track = Track(uri=uri) assert track.uri == uri - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): track.uri = None def test_name(self): name = "a name" track = Track(name=name) assert track.name == name - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): track.name = None def test_artists(self): artists = [Artist(name="name1"), Artist(name="name2")] track = Track(artists=artists) assert set(track.artists) == set(artists) - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): track.artists = None def test_artists_none(self): @@ -592,7 +590,7 @@ def test_composers(self): artists = [Artist(name="name1"), Artist(name="name2")] track = Track(composers=artists) assert set(track.composers) == set(artists) - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): track.composers = None def test_composers_none(self): @@ -602,7 +600,7 @@ def test_performers(self): artists = [Artist(name="name1"), Artist(name="name2")] track = Track(performers=artists) assert set(track.performers) == set(artists) - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): track.performers = None def test_performers_none(self): @@ -612,64 +610,62 @@ def test_album(self): album = Album() track = Track(album=album) assert track.album == album - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): track.album = None def test_track_no(self): track_no = 7 track = Track(track_no=track_no) assert track.track_no == track_no - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): track.track_no = None def test_disc_no(self): disc_no = 2 track = Track(disc_no=disc_no) assert track.disc_no == disc_no - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): track.disc_no = None def test_date(self): date = "1977-01-01" track = Track(date=date) assert track.date == date - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): track.date = None def test_length(self): length = 137000 track = Track(length=length) assert track.length == length - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): track.length = None def test_bitrate(self): bitrate = 160 track = Track(bitrate=bitrate) assert track.bitrate == bitrate - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): track.bitrate = None def test_musicbrainz_id(self): mb_id = "mb-id" track = Track(musicbrainz_id=mb_id) assert track.musicbrainz_id == mb_id - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): track.musicbrainz_id = None def test_invalid_kwarg(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): Track(foo="baz") def test_repr_without_artists(self): - assert "Track(name='name', uri='uri')" == repr( - Track(uri="uri", name="name") - ) + assert repr(Track(uri="uri", name="name")) == "Track(name='name', uri='uri')" def test_repr_with_artists(self): assert ( - "Track(artists=[Artist(name='foo')], name='name', uri='uri')" - == repr(Track(uri="uri", name="name", artists=[Artist(name="foo")])) + repr(Track(uri="uri", name="name", artists=[Artist(name="foo")])) + == "Track(artists=[Artist(name='foo')], name='name', uri='uri')" ) def test_serialize_without_artists(self): @@ -911,18 +907,18 @@ def test_tlid(self): tlid = 123 tl_track = TlTrack(tlid=tlid) assert tl_track.tlid == tlid - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): tl_track.tlid = None def test_track(self): track = Track() tl_track = TlTrack(track=track) assert tl_track.track == track - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): tl_track.track = None def test_invalid_kwarg(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): TlTrack(foo="baz") def test_positional_args(self): @@ -941,8 +937,9 @@ def test_iteration(self): assert track2 == track def test_repr(self): - assert "TlTrack(tlid=123, track=Track(uri='uri'))" == repr( - TlTrack(tlid=123, track=Track(uri="uri")) + assert ( + repr(TlTrack(tlid=123, track=Track(uri="uri"))) + == "TlTrack(tlid=123, track=Track(uri='uri'))" ) def test_serialize(self): @@ -990,21 +987,21 @@ def test_uri(self): uri = "an_uri" playlist = Playlist(uri=uri) assert playlist.uri == uri - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): playlist.uri = None def test_name(self): name = "a name" playlist = Playlist(name=name) assert playlist.name == name - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): playlist.name = None def test_tracks(self): tracks = [Track(), Track(), Track()] playlist = Playlist(tracks=tracks) assert list(playlist.tracks) == tracks - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): playlist.tracks = None def test_length(self): @@ -1016,7 +1013,7 @@ def test_last_modified(self): last_modified = 1390942873000 playlist = Playlist(last_modified=last_modified) assert playlist.last_modified == last_modified - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): playlist.last_modified = None def test_with_new_uri(self): @@ -1082,20 +1079,18 @@ def test_with_new_last_modified(self): assert new_playlist.last_modified == new_last_modified def test_invalid_kwarg(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): Playlist(foo="baz") def test_repr_without_tracks(self): - assert "Playlist(name='name', uri='uri')" == repr( - Playlist(uri="uri", name="name") + assert ( + repr(Playlist(uri="uri", name="name")) == "Playlist(name='name', uri='uri')" ) def test_repr_with_tracks(self): assert ( - "Playlist(name='name', tracks=[Track(name='foo')], uri='uri')" - == repr( - Playlist(uri="uri", name="name", tracks=[Track(name="foo")]) - ) + repr(Playlist(uri="uri", name="name", tracks=[Track(name="foo")])) + == "Playlist(name='name', tracks=[Track(name='foo')], uri='uri')" ) def test_serialize_without_tracks(self): @@ -1149,12 +1144,8 @@ def test_eq_last_modified(self): def test_eq(self): tracks = [Track()] - playlist1 = Playlist( - uri="uri", name="name", tracks=tracks, last_modified=1 - ) - playlist2 = Playlist( - uri="uri", name="name", tracks=tracks, last_modified=1 - ) + playlist1 = Playlist(uri="uri", name="name", tracks=tracks, last_modified=1) + playlist2 = Playlist(uri="uri", name="name", tracks=tracks, last_modified=1) assert playlist1 == playlist2 assert hash(playlist1) == hash(playlist2) @@ -1210,36 +1201,36 @@ def test_uri(self): uri = "an_uri" result = SearchResult(uri=uri) assert result.uri == uri - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.uri = None def test_tracks(self): tracks = [Track(), Track(), Track()] result = SearchResult(tracks=tracks) assert list(result.tracks) == tracks - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.tracks = None def test_artists(self): artists = [Artist(), Artist(), Artist()] result = SearchResult(artists=artists) assert list(result.artists) == artists - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.artists = None def test_albums(self): albums = [Album(), Album(), Album()] result = SearchResult(albums=albums) assert list(result.albums) == albums - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): result.albums = None def test_invalid_kwarg(self): - with self.assertRaises(TypeError): + with pytest.raises(TypeError): SearchResult(foo="baz") def test_repr_without_results(self): - assert "SearchResult(uri='uri')" == repr(SearchResult(uri="uri")) + assert repr(SearchResult(uri="uri")) == "SearchResult(uri='uri')" def test_serialize_without_results(self): self.assertDictEqual( diff --git a/tests/stream/test_library.py b/tests/stream/test_library.py index 24af43506b..9cb2ac0d66 100644 --- a/tests/stream/test_library.py +++ b/tests/stream/test_library.py @@ -1,7 +1,6 @@ from unittest import mock import pytest - from mopidy.internal import path from mopidy.models import Track from mopidy.stream import actor @@ -9,7 +8,7 @@ from tests import path_to_data_dir -@pytest.fixture +@pytest.fixture() def config(): return { "proxy": {}, @@ -22,12 +21,12 @@ def config(): } -@pytest.fixture +@pytest.fixture() def audio(): return mock.Mock() -@pytest.fixture +@pytest.fixture() def track_uri(): return path.path_to_uri(path_to_data_dir("song1.wav")) diff --git a/tests/stream/test_playback.py b/tests/stream/test_playback.py index 34e63f2c1e..1846cb98e7 100644 --- a/tests/stream/test_playback.py +++ b/tests/stream/test_playback.py @@ -1,11 +1,10 @@ import logging -import os +from pathlib import Path from unittest import mock import pytest import requests.exceptions import responses - from mopidy import exceptions from mopidy.audio import scan from mopidy.stream import actor @@ -20,7 +19,7 @@ """.strip() -@pytest.fixture +@pytest.fixture() def config(): return { "proxy": {}, @@ -33,24 +32,24 @@ def config(): } -@pytest.fixture +@pytest.fixture() def audio(): return mock.Mock() -@pytest.fixture +@pytest.fixture() def scanner(): patcher = mock.patch.object(scan, "Scanner") yield patcher.start()() patcher.stop() -@pytest.fixture +@pytest.fixture() def backend(audio, config, scanner): return actor.StreamBackend(audio=audio, config=config) -@pytest.fixture +@pytest.fixture() def provider(backend): return backend.playback @@ -69,10 +68,7 @@ def test_audio_stream_returns_same_uri(self, scanner, provider): assert result == STREAM_URI @responses.activate - def test_playable_ogg_stream_is_not_considered_a_playlist( - self, scanner, provider - ): - + def test_playable_ogg_stream_is_not_considered_a_playlist(self, scanner, provider): scanner.scan.side_effect = [ # Set playable to True to ignore detection as possible playlist mock.Mock(mime="application/ogg", playable=True), @@ -85,7 +81,6 @@ def test_playable_ogg_stream_is_not_considered_a_playlist( @responses.activate def test_text_playlist_with_mpeg_stream(self, scanner, provider, caplog): - caplog.set_level(logging.DEBUG) scanner.scan.side_effect = [ # Scanning playlist @@ -112,10 +107,7 @@ def test_text_playlist_with_mpeg_stream(self, scanner, provider, caplog): assert f"Unwrapping stream from URI: {PLAYLIST_URI}" in caplog.text assert f"Parsed playlist ({PLAYLIST_URI})" in caplog.text assert f"Unwrapping stream from URI: {STREAM_URI}" in caplog.text - assert ( - f"Unwrapped potential audio/mpeg stream: {STREAM_URI}" - in caplog.text - ) + assert f"Unwrapped potential audio/mpeg stream: {STREAM_URI}" in caplog.text # Check proper Requests session setup assert ( @@ -148,10 +140,7 @@ def test_xml_playlist_with_mpeg_stream(self, scanner, provider): assert result == STREAM_URI @responses.activate - def test_scan_fails_but_playlist_parsing_succeeds( - self, scanner, provider, caplog - ): - + def test_scan_fails_but_playlist_parsing_succeeds(self, scanner, provider, caplog): caplog.set_level(logging.DEBUG) scanner.scan.side_effect = [ # Scanning playlist @@ -171,17 +160,11 @@ def test_scan_fails_but_playlist_parsing_succeeds( assert f"Unwrapping stream from URI: {PLAYLIST_URI}" in caplog.text assert f"GStreamer failed scanning URI ({PLAYLIST_URI})" in caplog.text assert f"Parsed playlist ({PLAYLIST_URI})" in caplog.text - assert ( - f"Unwrapped potential audio/mpeg stream: {STREAM_URI}" - in caplog.text - ) + assert f"Unwrapped potential audio/mpeg stream: {STREAM_URI}" in caplog.text assert result == STREAM_URI @responses.activate - def test_scan_fails_and_playlist_parsing_fails( - self, scanner, provider, caplog - ): - + def test_scan_fails_and_playlist_parsing_fails(self, scanner, provider, caplog): caplog.set_level(logging.DEBUG) scanner.scan.side_effect = exceptions.ScannerError("some failure") responses.add( @@ -245,9 +228,7 @@ def test_playlist_references_itself(self, scanner, provider, caplog): assert result is None @responses.activate - def test_playlist_with_relative_mpeg_stream( - self, scanner, provider, caplog - ): + def test_playlist_with_relative_mpeg_stream(self, scanner, provider, caplog): caplog.set_level(logging.DEBUG) scanner.scan.side_effect = [ # Scanning playlist @@ -258,7 +239,7 @@ def test_playlist_with_relative_mpeg_stream( responses.add( responses.GET, PLAYLIST_URI, - body=BODY.replace(STREAM_URI, os.path.basename(STREAM_URI)), + body=BODY.replace(STREAM_URI, Path(STREAM_URI).name), content_type="audio/x-mpegurl", ) @@ -272,6 +253,6 @@ def test_playlist_with_relative_mpeg_stream( assert ( f"Parsed playlist ({PLAYLIST_URI}) and found new URI: " - f"{os.path.basename(STREAM_URI)}" + f"{Path(STREAM_URI).name}" ) in caplog.text assert f"Unwrapping stream from URI: {STREAM_URI}" in caplog.text diff --git a/tests/test_commands.py b/tests/test_commands.py index af9a501929..32fed90a61 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -2,6 +2,7 @@ import unittest from unittest import mock +import pytest from mopidy import commands @@ -19,21 +20,21 @@ def test_empty_override(self): assert expected == commands.config_override_type("section/key= ") def test_invalid_override(self): - with self.assertRaises(argparse.ArgumentTypeError): + with pytest.raises(argparse.ArgumentTypeError): commands.config_override_type("section/key") - with self.assertRaises(argparse.ArgumentTypeError): + with pytest.raises(argparse.ArgumentTypeError): commands.config_override_type("section=") - with self.assertRaises(argparse.ArgumentTypeError): + with pytest.raises(argparse.ArgumentTypeError): commands.config_override_type("section") class CommandParsingTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.exit_patcher = mock.patch.object(commands.Command, "exit") self.exit_mock = self.exit_patcher.start() self.exit_mock.side_effect = SystemExit - def tearDown(self): # noqa: N802 + def tearDown(self): self.exit_patcher.stop() def test_command_parsing_returns_namespace(self): @@ -47,12 +48,12 @@ def test_command_parsing_does_not_contain_args(self): def test_unknown_options_bails(self): cmd = commands.Command() - with self.assertRaises(SystemExit): + with pytest.raises(SystemExit): cmd.parse(["--foobar"]) def test_invalid_sub_command_bails(self): cmd = commands.Command() - with self.assertRaises(SystemExit): + with pytest.raises(SystemExit): cmd.parse(["foo"]) def test_command_arguments(self): @@ -122,7 +123,7 @@ def test_invalid_type(self): cmd = commands.Command() cmd.add_argument("--bar", type=int) - with self.assertRaises(SystemExit): + with pytest.raises(SystemExit): cmd.parse(["--bar", "zero"], prog="foo") self.exit_mock.assert_called_once_with( @@ -138,14 +139,14 @@ def test_command_error_usage_prog(self, argv_mock): cmd = commands.Command() cmd.add_argument("--bar", required=True) - with self.assertRaises(SystemExit): + with pytest.raises(SystemExit): cmd.parse([]) self.exit_mock.assert_called_once_with( mock.ANY, mock.ANY, "usage: foo --bar BAR" ) self.exit_mock.reset_mock() - with self.assertRaises(SystemExit): + with pytest.raises(SystemExit): cmd.parse([], prog="baz") self.exit_mock.assert_called_once_with( @@ -156,7 +157,7 @@ def test_missing_required(self): cmd = commands.Command() cmd.add_argument("--bar", required=True) - with self.assertRaises(SystemExit): + with pytest.raises(SystemExit): cmd.parse([], prog="foo") self.exit_mock.assert_called_once_with( @@ -169,7 +170,7 @@ def test_missing_positionals(self): cmd = commands.Command() cmd.add_argument("bar") - with self.assertRaises(SystemExit): + with pytest.raises(SystemExit): cmd.parse([], prog="foo") self.exit_mock.assert_called_once_with( @@ -185,7 +186,7 @@ def test_missing_positionals_subcommand(self): cmd = commands.Command() cmd.add_child("bar", child) - with self.assertRaises(SystemExit): + with pytest.raises(SystemExit): cmd.parse(["bar"], prog="foo") self.exit_mock.assert_called_once_with( @@ -197,7 +198,7 @@ def test_missing_positionals_subcommand(self): def test_unknown_command(self): cmd = commands.Command() - with self.assertRaises(SystemExit): + with pytest.raises(SystemExit): cmd.parse(["--help"], prog="foo") self.exit_mock.assert_called_once_with( @@ -208,7 +209,7 @@ def test_invalid_subcommand(self): cmd = commands.Command() cmd.add_child("baz", commands.Command()) - with self.assertRaises(SystemExit): + with pytest.raises(SystemExit): cmd.parse(["bar"], prog="foo") self.exit_mock.assert_called_once_with( @@ -249,7 +250,7 @@ def test_help_action_works(self): cmd.add_argument("-h", action="help") cmd.format_help = mock.Mock() - with self.assertRaises(SystemExit): + with pytest.raises(SystemExit): cmd.parse(["-h"]) cmd.format_help.assert_called_once_with(mock.ANY) @@ -261,32 +262,32 @@ class UsageTest(unittest.TestCase): def test_prog_name_default_and_override(self, argv_mock): argv_mock.__getitem__.return_value = "/usr/bin/foo" cmd = commands.Command() - assert "usage: foo" == cmd.format_usage().strip() - assert "usage: baz" == cmd.format_usage("baz").strip() + assert cmd.format_usage().strip() == "usage: foo" + assert cmd.format_usage("baz").strip() == "usage: baz" def test_basic_usage(self): cmd = commands.Command() - assert "usage: foo" == cmd.format_usage("foo").strip() + assert cmd.format_usage("foo").strip() == "usage: foo" cmd.add_argument("-h", "--help", action="store_true") - assert "usage: foo [-h]" == cmd.format_usage("foo").strip() + assert cmd.format_usage("foo").strip() == "usage: foo [-h]" cmd.add_argument("bar") - assert "usage: foo [-h] bar" == cmd.format_usage("foo").strip() + assert cmd.format_usage("foo").strip() == "usage: foo [-h] bar" def test_nested_usage(self): child = commands.Command() cmd = commands.Command() cmd.add_child("bar", child) - assert "usage: foo" == cmd.format_usage("foo").strip() - assert "usage: foo bar" == cmd.format_usage("foo bar").strip() + assert cmd.format_usage("foo").strip() == "usage: foo" + assert cmd.format_usage("foo bar").strip() == "usage: foo bar" cmd.add_argument("-h", "--help", action="store_true") - assert "usage: foo bar" == child.format_usage("foo bar").strip() + assert child.format_usage("foo bar").strip() == "usage: foo bar" child.add_argument("-h", "--help", action="store_true") - assert "usage: foo bar [-h]" == child.format_usage("foo bar").strip() + assert child.format_usage("foo bar").strip() == "usage: foo bar [-h]" class HelpTest(unittest.TestCase): @@ -294,31 +295,23 @@ class HelpTest(unittest.TestCase): def test_prog_name_default_and_override(self, argv_mock): argv_mock.__getitem__.return_value = "/usr/bin/foo" cmd = commands.Command() - assert "usage: foo" == cmd.format_help().strip() - assert "usage: bar" == cmd.format_help("bar").strip() + assert cmd.format_help().strip() == "usage: foo" + assert cmd.format_help("bar").strip() == "usage: bar" def test_command_without_documenation_or_options(self): cmd = commands.Command() - assert "usage: bar" == cmd.format_help("bar").strip() + assert cmd.format_help("bar").strip() == "usage: bar" def test_command_with_option(self): cmd = commands.Command() - cmd.add_argument( - "-h", "--help", action="store_true", help="show this message" - ) + cmd.add_argument("-h", "--help", action="store_true", help="show this message") - expected = ( - "usage: foo [-h]\n\n" - "OPTIONS:\n\n" - " -h, --help show this message" - ) + expected = "usage: foo [-h]\n\nOPTIONS:\n\n -h, --help show this message" assert expected == cmd.format_help("foo").strip() def test_command_with_option_and_positional(self): cmd = commands.Command() - cmd.add_argument( - "-h", "--help", action="store_true", help="show this message" - ) + cmd.add_argument("-h", "--help", action="store_true", help="show this message") cmd.add_argument("bar", help="some help text") expected = ( @@ -333,17 +326,13 @@ def test_command_with_documentation(self): cmd = commands.Command() cmd.help = "some text about everything this command does." - expected = ( - "usage: foo\n\n" "some text about everything this command does." - ) + expected = "usage: foo\n\nsome text about everything this command does." assert expected == cmd.format_help("foo").strip() def test_command_with_documentation_and_option(self): cmd = commands.Command() cmd.help = "some text about everything this command does." - cmd.add_argument( - "-h", "--help", action="store_true", help="show this message" - ) + cmd.add_argument("-h", "--help", action="store_true", help="show this message") expected = ( "usage: foo [-h]\n\n" @@ -358,7 +347,7 @@ def test_subcommand_without_documentation_or_options(self): cmd = commands.Command() cmd.add_child("bar", child) - assert "usage: foo" == cmd.format_help("foo").strip() + assert cmd.format_help("foo").strip() == "usage: foo" def test_subcommand_with_documentation_shown(self): child = commands.Command() @@ -471,9 +460,7 @@ def test_command_with_option_and_subcommand_with_option(self): child.add_argument("--test", help="the great and wonderful") cmd = commands.Command() - cmd.add_argument( - "-h", "--help", action="store_true", help="show this message" - ) + cmd.add_argument("-h", "--help", action="store_true", help="show this message") cmd.add_child("bar", child) expected = ( @@ -493,9 +480,7 @@ def test_command_with_options_doc_and_subcommand_with_option_and_doc(self): cmd = commands.Command() cmd.help = "some text about everything this command does." - cmd.add_argument( - "-h", "--help", action="store_true", help="show this message" - ) + cmd.add_argument("-h", "--help", action="store_true", help="show this message") cmd.add_child("bar", child) expected = ( @@ -513,8 +498,8 @@ def test_command_with_options_doc_and_subcommand_with_option_and_doc(self): class RunTest(unittest.TestCase): def test_default_implmentation_raises_error(self): - with self.assertRaises(NotImplementedError): - commands.Command().run() + with pytest.raises(NotImplementedError): + commands.Command().run(args=None, config=None) class RootCommandTest(unittest.TestCase): diff --git a/tests/test_ext.py b/tests/test_ext.py index 04468d7c97..604769aab7 100644 --- a/tests/test_ext.py +++ b/tests/test_ext.py @@ -1,9 +1,8 @@ import pathlib +from importlib import metadata from unittest import mock -import pkg_resources import pytest - from mopidy import config, exceptions, ext from tests import IsA, any_unicode @@ -13,6 +12,7 @@ class DummyExtension(ext.Extension): dist_name = "Mopidy-Foobar" ext_name = "foobar" version = "1.2.3" + location = __file__ def get_default_config(self): return "[foobar]\nenabled = true" @@ -22,7 +22,7 @@ def get_default_config(self): class TestExtension: - @pytest.fixture + @pytest.fixture() def extension(self): class MyExtension(ext.Extension): dist_name = "Mopidy-Foo" @@ -72,24 +72,25 @@ def test_get_data_dir_raises_error(self, extension): class TestLoadExtensions: - @pytest.fixture + @pytest.fixture() def iter_entry_points_mock(self, request): - patcher = mock.patch("pkg_resources.iter_entry_points") + patcher = mock.patch.object(metadata, "entry_points") iter_entry_points = patcher.start() iter_entry_points.return_value = [] yield iter_entry_points patcher.stop() + @pytest.fixture() + def mock_entry_point(self, iter_entry_points_mock): + entry_point = mock.Mock() + entry_point.load = mock.Mock(return_value=DummyExtension) + iter_entry_points_mock.return_value = [entry_point] + return entry_point + def test_no_extensions(self, iter_entry_points_mock): - iter_entry_points_mock.return_value = [] assert ext.load_extensions() == [] - def test_load_extensions(self, iter_entry_points_mock): - mock_entry_point = mock.Mock() - mock_entry_point.resolve.return_value = DummyExtension - - iter_entry_points_mock.return_value = [mock_entry_point] - + def test_load_extensions(self, mock_entry_point): expected = ext.ExtensionData( any_testextension, mock_entry_point, @@ -97,69 +98,47 @@ def test_load_extensions(self, iter_entry_points_mock): any_unicode, None, ) - assert ext.load_extensions() == [expected] - def test_gets_wrong_class(self, iter_entry_points_mock): - class WrongClass: - pass + def test_load_extensions_exception(self, mock_entry_point, caplog): + mock_entry_point.load.side_effect = Exception("test") + ext.load_extensions() + assert "Failed to load extension" in caplog.records[0].message - mock_entry_point = mock.Mock() - mock_entry_point.resolve.return_value = WrongClass + def test_load_extensions_real(self): + installed_extensions = ext.load_extensions() + assert len(installed_extensions) - iter_entry_points_mock.return_value = [mock_entry_point] + def test_gets_wrong_class(self, mock_entry_point): + class WrongClass: + pass + mock_entry_point.load.return_value = WrongClass assert ext.load_extensions() == [] - def test_gets_instance(self, iter_entry_points_mock): - mock_entry_point = mock.Mock() - mock_entry_point.resolve.return_value = DummyExtension() - - iter_entry_points_mock.return_value = [mock_entry_point] - + def test_gets_instance(self, mock_entry_point): + mock_entry_point.load.return_value = DummyExtension() assert ext.load_extensions() == [] - def test_creating_instance_fails(self, iter_entry_points_mock): - mock_extension = mock.Mock(spec=ext.Extension) - mock_extension.side_effect = Exception - - mock_entry_point = mock.Mock() - mock_entry_point.resolve.return_value = mock_extension - - iter_entry_points_mock.return_value = [mock_entry_point] - + def test_creating_instance_fails(self, mock_entry_point): + mock_entry_point.load.return_value = mock.Mock(side_effect=Exception) assert ext.load_extensions() == [] - def test_get_config_schema_fails(self, iter_entry_points_mock): - mock_entry_point = mock.Mock() - mock_entry_point.resolve.return_value = DummyExtension - - iter_entry_points_mock.return_value = [mock_entry_point] - + def test_get_config_schema_fails(self, mock_entry_point): with mock.patch.object(DummyExtension, "get_config_schema") as get: get.side_effect = Exception assert ext.load_extensions() == [] get.assert_called_once_with() - def test_get_default_config_fails(self, iter_entry_points_mock): - mock_entry_point = mock.Mock() - mock_entry_point.resolve.return_value = DummyExtension - - iter_entry_points_mock.return_value = [mock_entry_point] - + def test_get_default_config_fails(self, mock_entry_point): with mock.patch.object(DummyExtension, "get_default_config") as get: get.side_effect = Exception assert ext.load_extensions() == [] get.assert_called_once_with() - def test_get_command_fails(self, iter_entry_points_mock): - mock_entry_point = mock.Mock() - mock_entry_point.resolve.return_value = DummyExtension - - iter_entry_points_mock.return_value = [mock_entry_point] - + def test_get_command_fails(self, mock_entry_point): with mock.patch.object(DummyExtension, "get_command") as get: get.side_effect = Exception @@ -168,40 +147,48 @@ def test_get_command_fails(self, iter_entry_points_mock): class TestValidateExtensionData: - @pytest.fixture + @pytest.fixture() def ext_data(self): extension = DummyExtension() - entry_point = mock.Mock() entry_point.name = extension.ext_name - - schema = extension.get_config_schema() - defaults = extension.get_default_config() - command = extension.get_command() - return ext.ExtensionData( - extension, entry_point, schema, defaults, command + extension, + entry_point, + extension.get_config_schema(), + extension.get_default_config(), + extension.get_command(), ) + def test_real(self): + for dist in ext.load_extensions(): + assert ext.validate_extension_data(dist) + + def test_ok(self, ext_data): + assert ext.validate_extension_data(ext_data) + def test_name_mismatch(self, ext_data): ext_data.entry_point.name = "barfoo" assert not ext.validate_extension_data(ext_data) def test_distribution_not_found(self, ext_data): - error = pkg_resources.DistributionNotFound - ext_data.entry_point.require.side_effect = error + error = metadata.PackageNotFoundError + ext_data.entry_point.load.side_effect = error assert not ext.validate_extension_data(ext_data) + @pytest.mark.skip("Version control missing in metadata") def test_version_conflict(self, ext_data): - error = pkg_resources.VersionConflict + error = metadata.PackageNotFoundError + ext_data.entry_point.require.side_effect = error + assert not ext.validate_extension_data(ext_data) ext_data.entry_point.require.side_effect = error assert not ext.validate_extension_data(ext_data) def test_entry_point_require_exception(self, ext_data): - ext_data.entry_point.require.side_effect = Exception + ext_data.entry_point.load.side_effect = Exception("Some extension error") # Hope that entry points are well behaved, so exception will bubble. - with pytest.raises(Exception): + with pytest.raises(Exception, match="Some extension error"): assert not ext.validate_extension_data(ext_data) def test_extenions_validate_environment_error(self, ext_data): @@ -249,7 +236,7 @@ def test_get_cache_dir(self, ext_data): with mock.patch.object(ext.path, "get_or_create_dir"): cache_dir = extension.get_cache_dir(config) - expected = pathlib.Path(core_cache_dir) / extension.ext_name + expected = pathlib.Path(core_cache_dir).resolve() / extension.ext_name assert cache_dir == expected def test_get_config_dir(self, ext_data): @@ -260,7 +247,7 @@ def test_get_config_dir(self, ext_data): with mock.patch.object(ext.path, "get_or_create_dir"): config_dir = extension.get_config_dir(config) - expected = pathlib.Path(core_config_dir) / extension.ext_name + expected = pathlib.Path(core_config_dir).resolve() / extension.ext_name assert config_dir == expected def test_get_data_dir(self, ext_data): @@ -271,5 +258,15 @@ def test_get_data_dir(self, ext_data): with mock.patch.object(ext.path, "get_or_create_dir"): data_dir = extension.get_data_dir(config) - expected = pathlib.Path(core_data_dir) / extension.ext_name + expected = pathlib.Path(core_data_dir).resolve() / extension.ext_name assert data_dir == expected + + +class TestRegistry: + def test_registry(self): + reg = ext.Registry() + assert not len(reg) + + # __iter__ is implemented + for _entry in reg: + pass diff --git a/tests/test_help.py b/tests/test_help.py index 4a52b175ef..058f7b0784 100644 --- a/tests/test_help.py +++ b/tests/test_help.py @@ -1,26 +1,19 @@ -import os import subprocess import sys import unittest +from pathlib import Path import mopidy class HelpTest(unittest.TestCase): def test_help_has_mopidy_options(self): - mopidy_dir = os.path.dirname(mopidy.__file__) - args = [sys.executable, mopidy_dir, "--help"] + mopidy_dir = Path(mopidy.__file__).parent + args = [sys.executable, "-m", "mopidy", "--help"] process = subprocess.Popen( args, - env={ - "PYTHONPATH": ":".join( - [ - os.path.join(mopidy_dir, ".."), - os.environ.get("PYTHONPATH", ""), - ] - ) - }, stdout=subprocess.PIPE, + cwd=mopidy_dir.parent, ) output = process.communicate()[0] assert b"--version" in output diff --git a/tests/test_httpclient.py b/tests/test_httpclient.py index f32ebcdbf9..49ed93c47b 100644 --- a/tests/test_httpclient.py +++ b/tests/test_httpclient.py @@ -1,12 +1,11 @@ import re import pytest - from mopidy import httpclient @pytest.mark.parametrize( - "config,expected", + ("config", "expected"), [ ({}, None), ({"hostname": ""}, None), @@ -36,7 +35,7 @@ def test_format_proxy_without_auth(): @pytest.mark.parametrize( - "name,expected", + ("name", "expected"), [ (None, r"^Mopidy/[^ ]+ CPython|/[^ ]+$"), ("Foo", r"^Foo Mopidy/[^ ]+ CPython|/[^ ]+$"), diff --git a/tests/test_mixer.py b/tests/test_mixer.py index 8a97475bfa..d873964f56 100644 --- a/tests/test_mixer.py +++ b/tests/test_mixer.py @@ -5,7 +5,7 @@ class MixerListenerTest(unittest.TestCase): - def setUp(self): # noqa: N802 + def setUp(self): self.listener = mixer.MixerListener() def test_on_event_forwards_to_specific_handler(self): diff --git a/tests/test_version.py b/tests/test_version.py deleted file mode 100644 index 02109eec69..0000000000 --- a/tests/test_version.py +++ /dev/null @@ -1,9 +0,0 @@ -import unittest -from distutils.version import StrictVersion - -from mopidy import __version__ - - -class VersionTest(unittest.TestCase): - def test_current_version_is_parsable_as_a_strict_version_number(self): - StrictVersion(__version__) diff --git a/tox.ini b/tox.ini index c8a4308999..4dfbc294b2 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py37, py38, py39, check-manifest, docs, flake8, mypy +envlist = py311, py312, docs, pyright, ruff-lint, ruff-format [testenv] sitepackages = true @@ -10,26 +10,36 @@ commands = --cov=mopidy --cov-report=term-missing \ {posargs} -[testenv:check-manifest] -deps = .[lint] -commands = python -m check_manifest - [testenv:docs] deps = .[docs] changedir = docs commands = python -m sphinx -b html -d {envtmpdir}/doctrees . {envtmpdir}/html -[testenv:flake8] -deps = .[lint] -commands = python -m flake8 --show-source --statistics - [testenv:linkcheck] deps = .[docs] changedir = docs commands = python -m sphinx -b linkcheck -d {envtmpdir}/doctrees . {envtmpdir}/html -[testenv:mypy] +[testenv:pyright] +deps = .[typing] +commands = python -m pyright src + +[testenv:ruff-lint] +deps = .[lint] +commands = python -m ruff check . + +[testenv:ruff-format] +deps = .[lint] +commands = python -m ruff format . + +[testenv:ci] deps = - .[lint] - tornado >= 6 # First version to ship type information -commands = python -m mypy mopidy + {[testenv]deps} + {[testenv:pyright]deps} + {[testenv:ruff-lint]deps} + {[testenv:ruff-format]deps} +commands = + {[testenv]commands} + {[testenv:pyright]commands} + {[testenv:ruff-lint]commands} + {[testenv:ruff-format]commands}