Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Finish out effort of adding and enabling blockbuster in tests #5735

Merged
merged 4 commits into from
Mar 7, 2025
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions supervisor/backups/backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -601,7 +601,9 @@ async def _addon_save(self, addon: Addon) -> asyncio.Task | None:
ATTR_SLUG: addon.slug,
ATTR_NAME: addon.name,
ATTR_VERSION: addon.version,
ATTR_SIZE: addon_file.size,
# Bug - addon_file.size used to give us this information
# It always returns 0 in current securetar. Skipping until fixed
ATTR_SIZE: 0,
}
)

Expand Down Expand Up @@ -640,7 +642,7 @@ async def _addon_restore(self, addon_slug: str) -> asyncio.Task | None:
)

# If exists inside backup
if not addon_file.path.exists():
if not await self.sys_run_in_executor(addon_file.path.exists):
raise BackupError(f"Can't find backup {addon_slug}", _LOGGER.error)

# Perform a restore
Expand Down
37 changes: 32 additions & 5 deletions supervisor/docker/manager.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
"""Manager for Supervisor Docker."""

import asyncio
from contextlib import suppress
from functools import partial
from ipaddress import IPv4Address
import logging
import os
Expand Down Expand Up @@ -105,19 +107,42 @@ class DockerAPI:

def __init__(self, coresys: CoreSys):
"""Initialize Docker base wrapper."""
self.docker: DockerClient = DockerClient(
base_url=f"unix:/{str(SOCKET_DOCKER)}", version="auto", timeout=900
)
self.network: DockerNetwork = DockerNetwork(self.docker)
self._info: DockerInfo = DockerInfo.new(self.docker.info())
self._docker: DockerClient | None = None
self._network: DockerNetwork | None = None
self._info: DockerInfo | None = None
self.config: DockerConfig = DockerConfig()
self._monitor: DockerMonitor = DockerMonitor(coresys)

async def load_config(self) -> Self:
"""Load config in executor."""
self._docker = await asyncio.get_running_loop().run_in_executor(
None,
partial(
DockerClient,
base_url=f"unix:/{str(SOCKET_DOCKER)}",
version="auto",
timeout=900,
),
)
self._network = DockerNetwork(self._docker)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

💡 Verification agent

❓ Verification inconclusive

Wrap Docker client creation in a try-except block.

Asynchronously creating the DockerClient inside run_in_executor is a good step to avoid blocking the event loop. However, you may want to wrap it in a try-except block for DockerException or requests.RequestException to raise a more descriptive error (e.g., DockerAPIError) if Docker is unavailable or the socket is misconfigured. This aligns with your handling of Docker-related exceptions elsewhere in the file.


Wrap Docker client creation with exception handling.

In supervisor/docker/manager.py (lines 116–127), please wrap the Docker client instantiation in a try-except block. This will catch potential DockerException or requests.RequestException errors—raising a more descriptive error (e.g., a custom DockerAPIError) if Docker is unavailable or the socket is misconfigured. This change will also bring the exception handling in line with similar parts of the file.

  • Action Required:
    • Surround the asyncio.get_running_loop().run_in_executor(...) call with a try-except block.
    • Catch exceptions like DockerException and requests.RequestException and re-raise them with a more descriptive error.

self._info = DockerInfo.new(self.docker.info())
await self.config.read_data()
return self

@property
def docker(self) -> DockerClient:
"""Get docker API client."""
if not self._docker:
raise RuntimeError("Docker API Client not initialized!")
return self._docker

@property
def network(self) -> DockerNetwork:
"""Get Docker network."""
if not self._network:
raise RuntimeError("Docker Network not initialized!")
return self._network

@property
def images(self) -> ImageCollection:
"""Return API images."""
Expand All @@ -136,6 +161,8 @@ def api(self) -> APIClient:
@property
def info(self) -> DockerInfo:
"""Return local docker info."""
if not self._info:
raise RuntimeError("Docker Info not initialized!")
return self._info

@property
Expand Down
14 changes: 10 additions & 4 deletions supervisor/host/apparmor.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,16 @@ def _get_profile(self, profile_name: str) -> Path:

async def load(self) -> None:
"""Load available profiles."""
for content in self.sys_config.path_apparmor.iterdir():
if not content.is_file():
continue
self._profiles.add(content.name)

def find_profiles() -> set[str]:
profiles: set[str] = set()
for content in self.sys_config.path_apparmor.iterdir():
if not content.is_file():
continue
profiles.add(content.name)
return profiles

self._profiles = await self.sys_run_in_executor(find_profiles)

_LOGGER.info("Loading AppArmor Profiles: %s", self._profiles)

Expand Down
7 changes: 5 additions & 2 deletions supervisor/mounts/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,9 +292,12 @@ async def _bind_mount(self, mount: Mount, where: PurePath) -> None:
where.as_posix(),
)
path = self.sys_config.path_emergency / mount.name
if not path.exists():
path.mkdir(mode=0o444)

def emergency_mkdir():
if not path.exists():
path.mkdir(mode=0o444)

await self.sys_run_in_executor(emergency_mkdir)
path = self.sys_config.local_to_extern_path(path)

self._bound_mounts[mount.name] = bound_mount = BoundMount(
Expand Down
15 changes: 9 additions & 6 deletions supervisor/resolution/checks/core_security.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,7 @@ async def run_check(self) -> None:
# Security issue < 2021.1.5 & Custom components
try:
if self.sys_homeassistant.version < AwesomeVersion("2021.1.5"):
if Path(
self.sys_config.path_homeassistant, "custom_components"
).exists():
if await self.sys_run_in_executor(self._custom_components_exists):
self.sys_resolution.create_issue(
IssueType.SECURITY,
ContextType.CORE,
Expand All @@ -49,9 +47,14 @@ async def approve_check(self, reference: str | None = None) -> bool:
return False
except AwesomeVersionException:
return True
if not Path(self.sys_config.path_homeassistant, "custom_components").exists():
return False
return True
return await self.sys_run_in_executor(self._custom_components_exists)

def _custom_components_exists(self) -> bool:
"""Return true if custom components folder exists.

Must be run in executor.
"""
return Path(self.sys_config.path_homeassistant, "custom_components").exists()

@property
def issue(self) -> IssueType:
Expand Down
38 changes: 23 additions & 15 deletions tests/backups/test_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -1679,15 +1679,17 @@ async def test_skip_homeassistant_database(
coresys.homeassistant.backups_exclude_database = exclude_db_setting

test_file = coresys.config.path_homeassistant / "configuration.yaml"
(test_db := coresys.config.path_homeassistant / "home-assistant_v2.db").touch()
(
test_db_wal := coresys.config.path_homeassistant / "home-assistant_v2.db-wal"
).touch()
(
test_db_shm := coresys.config.path_homeassistant / "home-assistant_v2.db-shm"
).touch()
test_db = coresys.config.path_homeassistant / "home-assistant_v2.db"
test_db_wal = coresys.config.path_homeassistant / "home-assistant_v2.db-wal"
test_db_shm = coresys.config.path_homeassistant / "home-assistant_v2.db-shm"

write_json_file(test_file, {"default_config": {}})
def setup_1():
test_db.touch()
test_db_wal.touch()
test_db_shm.touch()
write_json_file(test_file, {"default_config": {}})

await coresys.run_in_executor(setup_1)

kwargs = {} if exclude_db_setting else {"homeassistant_exclude_database": True}
if partial_backup:
Expand All @@ -1697,20 +1699,26 @@ async def test_skip_homeassistant_database(
else:
backup: Backup = await coresys.backups.do_backup_full(**kwargs)

test_file.unlink()
write_json_file(test_db, {"hello": "world"})
write_json_file(test_db_wal, {"hello": "world"})
def setup_2():
test_file.unlink()
write_json_file(test_db, {"hello": "world"})
write_json_file(test_db_wal, {"hello": "world"})

await coresys.run_in_executor(setup_2)

with (
patch.object(HomeAssistantCore, "update"),
patch.object(HomeAssistantCore, "start"),
):
await coresys.backups.do_restore_partial(backup, homeassistant=True)

assert read_json_file(test_file) == {"default_config": {}}
assert read_json_file(test_db) == {"hello": "world"}
assert read_json_file(test_db_wal) == {"hello": "world"}
assert not test_db_shm.exists()
def test_assertions():
assert read_json_file(test_file) == {"default_config": {}}
assert read_json_file(test_db) == {"hello": "world"}
assert read_json_file(test_db_wal) == {"hello": "world"}
assert not test_db_shm.exists()

await coresys.run_in_executor(test_assertions)


@pytest.mark.usefixtures("tmp_supervisor_data", "path_extern")
Expand Down
19 changes: 7 additions & 12 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,21 +64,16 @@
# pylint: disable=redefined-outer-name, protected-access


# This commented out code is left in intentionally
# Intent is to enable this for all tests at all times as an autouse fixture
# Findings from PR were growing too big so disabling temporarily to create a checkpoint
# @pytest.fixture(autouse=True)
def blockbuster(request: pytest.FixtureRequest) -> BlockBuster:
@pytest.fixture(autouse=True)
def blockbuster() -> BlockBuster:
"""Raise for blocking I/O in event loop."""
# Excluded modules doesn't seem to stop test code from raising for blocking I/O
# Defaulting to only scanning supervisor core code seems like the best we can do easily
# Added a parameter so we could potentially go module by module in test and eliminate blocking I/O
# Then we could tell it to scan everything by default. That will require more follow-up work
# Only scanning supervisor code for now as that's our primary interest
# This will still raise for tests that call utilities in supervisor code that block
# But it will ignore calls to libraries and such that do blocking I/O directly from tests
# Removing that would be nice but a todo for the future

# pylint: disable-next=contextmanager-generator-missing-cleanup
with blockbuster_ctx(
scanned_modules=getattr(request, "param", ["supervisor"])
) as bb:
with blockbuster_ctx(scanned_modules=["supervisor"]) as bb:
yield bb


Expand Down
7 changes: 5 additions & 2 deletions tests/dbus/test_interface.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Test dbus interface."""

import asyncio
from unittest.mock import patch

from dbus_fast.aio.message_bus import MessageBus
Expand Down Expand Up @@ -145,9 +146,11 @@ async def test_proxy_missing_properties_interface(dbus_session_bus: MessageBus):
proxy.object_path = DBUS_OBJECT_BASE
proxy.properties_interface = "test.no.properties.interface"

async def mock_introspect(*args, **kwargs):
def mock_introspect(*args, **kwargs):
"""Return introspection without properties."""
return load_fixture("test_no_properties_interface.xml")
return asyncio.get_running_loop().run_in_executor(
None, load_fixture, "test_no_properties_interface.xml"
)

with (
patch.object(MessageBus, "introspect", new=mock_introspect),
Expand Down
8 changes: 6 additions & 2 deletions tests/resolution/check/test_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,10 @@ async def test_get_checks(coresys: CoreSys):

async def test_dynamic_check_loader(coresys: CoreSys):
"""Test dynamic check loader, this ensures that all checks have defined a setup function."""
coresys.resolution.check.load_modules()
for check in await coresys.run_in_executor(get_valid_modules, "checks"):

def load_modules():
coresys.resolution.check.load_modules()
return get_valid_modules("checks")

for check in await coresys.run_in_executor(load_modules):
assert check in coresys.resolution.check._checks
18 changes: 12 additions & 6 deletions tests/store/test_custom_repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,9 @@ async def test_add_invalid_repository(coresys: CoreSys, store_manager: StoreMana
current + ["http://example.com"], add_with_errors=True
)

assert not store_manager.get_from_url("http://example.com").validate()
assert not await coresys.run_in_executor(
store_manager.get_from_url("http://example.com").validate
)

assert "http://example.com" in coresys.store.repository_urls
assert coresys.resolution.suggestions[-1].type == SuggestionType.EXECUTE_REMOVE
Expand Down Expand Up @@ -176,11 +178,15 @@ async def test_preinstall_valid_repository(
"""Test add core repository valid."""
with patch("supervisor.store.repository.Repository.load", return_value=None):
await store_manager.update_repositories(BUILTIN_REPOSITORIES)
assert store_manager.get("core").validate()
assert store_manager.get("local").validate()
assert store_manager.get("a0d7b954").validate()
assert store_manager.get("5c53de3b").validate()
assert store_manager.get("d5369777").validate()

def validate():
assert store_manager.get("core").validate()
assert store_manager.get("local").validate()
assert store_manager.get("a0d7b954").validate()
assert store_manager.get("5c53de3b").validate()
assert store_manager.get("d5369777").validate()

await coresys.run_in_executor(validate)


@pytest.mark.parametrize("use_update", [True, False])
Expand Down
8 changes: 6 additions & 2 deletions tests/store/test_store_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,9 @@ async def test_update_unavailable_addon(
):
"""Test updating addon when new version not available for system."""
addon_config = dict(
load_yaml_fixture("addons/local/ssh/config.yaml"),
await coresys.run_in_executor(
load_yaml_fixture, "addons/local/ssh/config.yaml"
),
version=AwesomeVersion("10.0.0"),
**config,
)
Expand Down Expand Up @@ -201,7 +203,9 @@ async def test_install_unavailable_addon(
):
"""Test updating addon when new version not available for system."""
addon_config = dict(
load_yaml_fixture("addons/local/ssh/config.yaml"),
await coresys.run_in_executor(
load_yaml_fixture, "addons/local/ssh/config.yaml"
),
version=AwesomeVersion("10.0.0"),
**config,
)
Expand Down
8 changes: 5 additions & 3 deletions tests/test_ingress.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,11 @@ async def test_ingress_save_data(coresys: CoreSys, tmp_supervisor_data: Path):
)
await ingress.save_data()

assert config_file.exists()
data = read_json_file(config_file)
assert data == {
def get_config():
assert config_file.exists()
return read_json_file(config_file)

assert await coresys.run_in_executor(get_config) == {
"session": {session: ANY},
"session_data": {
session: {"user": {"id": "123", "displayname": "Test", "username": "test"}}
Expand Down
4 changes: 2 additions & 2 deletions tests/utils/test_apparmor.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,15 +55,15 @@ async def test_apparmor_multiple_profiles(caplog: pytest.LogCaptureFixture):
)


async def test_apparmor_profile_adjust(tmp_path: Path):
def test_apparmor_profile_adjust(tmp_path: Path):
"""Test apparmor profile adjust."""
profile_out = tmp_path / "apparmor_out.txt"
adjust_profile("test", get_fixture_path("apparmor_valid.txt"), profile_out)

assert profile_out.read_text(encoding="utf-8") == TEST_PROFILE


async def test_apparmor_profile_adjust_mediate(tmp_path: Path):
def test_apparmor_profile_adjust_mediate(tmp_path: Path):
"""Test apparmor profile adjust when name matches a flag."""
profile_out = tmp_path / "apparmor_out.txt"
adjust_profile("test", get_fixture_path("apparmor_valid_mediate.txt"), profile_out)
Expand Down
7 changes: 5 additions & 2 deletions tests/utils/test_dbus.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Test dbus utility."""

import asyncio
from unittest.mock import AsyncMock, Mock, patch

from dbus_fast import ErrorType
Expand Down Expand Up @@ -48,9 +49,11 @@ async def fixture_test_service(dbus_session_bus: MessageBus) -> TestInterface:
async def test_missing_properties_interface(dbus_session_bus: MessageBus):
"""Test introspection missing properties interface."""

async def mock_introspect(*args, **kwargs):
def mock_introspect(*args, **kwargs):
"""Return introspection without properties."""
return load_fixture("test_no_properties_interface.xml")
return asyncio.get_running_loop().run_in_executor(
None, load_fixture, "test_no_properties_interface.xml"
)

with patch.object(MessageBus, "introspect", new=mock_introspect):
service = await DBus.connect(
Expand Down
5 changes: 4 additions & 1 deletion tests/utils/test_sentry.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,13 @@

from unittest.mock import patch

import pytest

from supervisor.bootstrap import initialize_coresys


async def test_sentry_disabled_by_default(supervisor_name):
@pytest.mark.usefixtures("supervisor_name", "docker")
Copy link
Contributor Author

@mdegat01 mdegat01 Mar 7, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is unrelated to the PR, this test is just broken in devcontainer right now. Mocking all docker connections with the docker fixture keeps it working there.

However on the bright side, it did find more blocking I/O. Turns out initializing the docker APIClient object does blocking I/O. Normally this part of setup is all mocked because we can't do that in devcontainer but running this test un-mocked found it 😆

async def test_sentry_disabled_by_default():
"""Test diagnostics off by default."""
with (
patch("supervisor.bootstrap.initialize_system"),
Expand Down
Loading