diff --git a/CODEOWNERS b/CODEOWNERS index cd7d26f38dc869..547845f94214dc 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -530,6 +530,8 @@ build.json @home-assistant/supervisor /tests/components/flo/ @dmulcahey /homeassistant/components/flume/ @ChrisMandich @bdraco @jeeftor /tests/components/flume/ @ChrisMandich @bdraco @jeeftor +/homeassistant/components/fluss/ @fluss +/tests/components/fluss/ @fluss /homeassistant/components/flux_led/ @icemanch /tests/components/flux_led/ @icemanch /homeassistant/components/forecast_solar/ @klaasnicolaas @frenck @@ -1695,8 +1697,8 @@ build.json @home-assistant/supervisor /tests/components/trafikverket_train/ @gjohansson-ST /homeassistant/components/trafikverket_weatherstation/ @gjohansson-ST /tests/components/trafikverket_weatherstation/ @gjohansson-ST -/homeassistant/components/transmission/ @engrbm87 @JPHutchins -/tests/components/transmission/ @engrbm87 @JPHutchins +/homeassistant/components/transmission/ @engrbm87 @JPHutchins @andrew-codechimp +/tests/components/transmission/ @engrbm87 @JPHutchins @andrew-codechimp /homeassistant/components/trend/ @jpbede /tests/components/trend/ @jpbede /homeassistant/components/triggercmd/ @rvmey diff --git a/homeassistant/components/anglian_water/config_flow.py b/homeassistant/components/anglian_water/config_flow.py index 5b870d41d3a7bf..ee8fbe4c59560c 100644 --- a/homeassistant/components/anglian_water/config_flow.py +++ b/homeassistant/components/anglian_water/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any +from typing import TYPE_CHECKING, Any from aiohttp import CookieJar from pyanglianwater import AnglianWater @@ -30,14 +30,11 @@ vol.Required(CONF_PASSWORD): selector.TextSelector( selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD) ), - vol.Required(CONF_ACCOUNT_NUMBER): selector.TextSelector(), } ) -async def validate_credentials( - auth: MSOB2CAuth, account_number: str -) -> str | MSOB2CAuth: +async def validate_credentials(auth: MSOB2CAuth) -> str | MSOB2CAuth: """Validate the provided credentials.""" try: await auth.send_login_request() @@ -46,6 +43,33 @@ async def validate_credentials( except Exception: _LOGGER.exception("Unexpected exception") return "unknown" + return auth + + +def humanize_account_data(account: dict) -> str: + """Convert an account data into a human-readable format.""" + if account["address"]["company_name"] != "": + return f"{account['account_number']} - {account['address']['company_name']}" + if account["address"]["building_name"] != "": + return f"{account['account_number']} - {account['address']['building_name']}" + return f"{account['account_number']} - {account['address']['postcode']}" + + +async def get_accounts(auth: MSOB2CAuth) -> list[selector.SelectOptionDict]: + """Retrieve the list of accounts associated with the authenticated user.""" + _aw = AnglianWater(authenticator=auth) + accounts = await _aw.api.get_associated_accounts() + return [ + selector.SelectOptionDict( + value=str(account["account_number"]), + label=humanize_account_data(account), + ) + for account in accounts["result"]["active"] + ] + + +async def validate_account(auth: MSOB2CAuth, account_number: str) -> str | MSOB2CAuth: + """Validate the provided account number.""" _aw = AnglianWater(authenticator=auth) try: await _aw.validate_smart_meter(account_number) @@ -57,36 +81,91 @@ async def validate_credentials( class AnglianWaterConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Anglian Water.""" + def __init__(self) -> None: + """Initialize the config flow.""" + self.authenticator: MSOB2CAuth | None = None + self.accounts: list[selector.SelectOptionDict] = [] + self.user_input: dict[str, Any] | None = None + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" errors: dict[str, str] = {} if user_input is not None: - validation_response = await validate_credentials( - MSOB2CAuth( - username=user_input[CONF_USERNAME], - password=user_input[CONF_PASSWORD], - session=async_create_clientsession( - self.hass, - cookie_jar=CookieJar(quote_cookie=False), - ), + self.authenticator = MSOB2CAuth( + username=user_input[CONF_USERNAME], + password=user_input[CONF_PASSWORD], + session=async_create_clientsession( + self.hass, + cookie_jar=CookieJar(quote_cookie=False), ), - user_input[CONF_ACCOUNT_NUMBER], ) + validation_response = await validate_credentials(self.authenticator) if isinstance(validation_response, str): errors["base"] = validation_response else: - await self.async_set_unique_id(user_input[CONF_ACCOUNT_NUMBER]) - self._abort_if_unique_id_configured() - return self.async_create_entry( - title=user_input[CONF_ACCOUNT_NUMBER], - data={ - **user_input, - CONF_ACCESS_TOKEN: validation_response.refresh_token, - }, + self.accounts = await get_accounts(self.authenticator) + if len(self.accounts) > 1: + self.user_input = user_input + return await self.async_step_select_account() + account_number = self.accounts[0]["value"] + self.user_input = user_input + return await self.async_step_complete( + { + CONF_ACCOUNT_NUMBER: account_number, + } ) return self.async_show_form( step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors ) + + async def async_step_select_account( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the account selection step.""" + errors = {} + if user_input is not None: + if TYPE_CHECKING: + assert self.authenticator + validation_result = await validate_account( + self.authenticator, + user_input[CONF_ACCOUNT_NUMBER], + ) + if isinstance(validation_result, str): + errors["base"] = validation_result + else: + return await self.async_step_complete(user_input) + return self.async_show_form( + step_id="select_account", + data_schema=vol.Schema( + { + vol.Required(CONF_ACCOUNT_NUMBER): selector.SelectSelector( + selector.SelectSelectorConfig( + options=self.accounts, + multiple=False, + mode=selector.SelectSelectorMode.DROPDOWN, + ) + ) + } + ), + errors=errors, + ) + + async def async_step_complete(self, user_input: dict[str, Any]) -> ConfigFlowResult: + """Handle the final configuration step.""" + await self.async_set_unique_id(user_input[CONF_ACCOUNT_NUMBER]) + self._abort_if_unique_id_configured() + if TYPE_CHECKING: + assert self.authenticator + assert self.user_input + config_entry_data = { + **self.user_input, + CONF_ACCOUNT_NUMBER: user_input[CONF_ACCOUNT_NUMBER], + CONF_ACCESS_TOKEN: self.authenticator.refresh_token, + } + return self.async_create_entry( + title=user_input[CONF_ACCOUNT_NUMBER], + data=config_entry_data, + ) diff --git a/homeassistant/components/anglian_water/strings.json b/homeassistant/components/anglian_water/strings.json index b2c11c1d537dbf..6db91b3b9b02eb 100644 --- a/homeassistant/components/anglian_water/strings.json +++ b/homeassistant/components/anglian_water/strings.json @@ -10,14 +10,21 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "step": { + "select_account": { + "data": { + "account_number": "Billing account number" + }, + "data_description": { + "account_number": "Select the billing account you wish to use." + }, + "description": "Multiple active billing accounts were found with your credentials. Please select the account you wish to use. If this is unexpected, contact Anglian Water to confirm your active accounts." + }, "user": { "data": { - "account_number": "Billing Account Number", "password": "[%key:common::config_flow::data::password%]", "username": "[%key:common::config_flow::data::username%]" }, "data_description": { - "account_number": "Your account number found on your latest bill.", "password": "Your password", "username": "Username or email used to log in to the Anglian Water website." }, diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index 115c6d054af211..d9303a2381b208 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -25,6 +25,7 @@ from homeassistant.helpers import ( config_validation as cv, entity_platform, + entity_registry as er, issue_registry as ir, ) from homeassistant.helpers.device_registry import ( @@ -42,7 +43,12 @@ from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN from .coordinator import BluesoundCoordinator -from .utils import dispatcher_join_signal, dispatcher_unjoin_signal, format_unique_id +from .utils import ( + dispatcher_join_signal, + dispatcher_unjoin_signal, + format_unique_id, + id_to_paired_player, +) if TYPE_CHECKING: from . import BluesoundConfigEntry @@ -83,9 +89,11 @@ async def async_setup_entry( SERVICE_CLEAR_TIMER, None, "async_clear_timer" ) platform.async_register_entity_service( - SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, "async_join" + SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, "async_bluesound_join" + ) + platform.async_register_entity_service( + SERVICE_UNJOIN, None, "async_bluesound_unjoin" ) - platform.async_register_entity_service(SERVICE_UNJOIN, None, "async_unjoin") async_add_entities([bluesound_player], update_before_add=True) @@ -120,6 +128,7 @@ def __init__( self._presets: list[Preset] = coordinator.data.presets self._group_name: str | None = None self._group_list: list[str] = [] + self._group_members: list[str] | None = None self._bluesound_device_name = sync_status.name self._player = player self._last_status_update = dt_util.utcnow() @@ -180,6 +189,7 @@ def _handle_coordinator_update(self) -> None: self._last_status_update = dt_util.utcnow() self._group_list = self.rebuild_bluesound_group() + self._group_members = self.rebuild_group_members() self.async_write_ha_state() @@ -365,11 +375,13 @@ def supported_features(self) -> MediaPlayerEntityFeature: MediaPlayerEntityFeature.VOLUME_STEP | MediaPlayerEntityFeature.VOLUME_SET | MediaPlayerEntityFeature.VOLUME_MUTE + | MediaPlayerEntityFeature.GROUPING ) supported = ( MediaPlayerEntityFeature.CLEAR_PLAYLIST | MediaPlayerEntityFeature.BROWSE_MEDIA + | MediaPlayerEntityFeature.GROUPING ) if not self._status.indexing: @@ -421,8 +433,57 @@ def shuffle(self) -> bool: return shuffle - async def async_join(self, master: str) -> None: + @property + def group_members(self) -> list[str] | None: + """Get list of group members. Leader is always first.""" + return self._group_members + + async def async_join_players(self, group_members: list[str]) -> None: + """Join `group_members` as a player group with the current player.""" + if self.entity_id in group_members: + raise ServiceValidationError("Cannot join player to itself") + + entity_ids_with_sync_status = self._entity_ids_with_sync_status() + + paired_players = [] + for group_member in group_members: + sync_status = entity_ids_with_sync_status.get(group_member) + if sync_status is None: + continue + paired_player = id_to_paired_player(sync_status.id) + if paired_player: + paired_players.append(paired_player) + + if paired_players: + await self._player.add_followers(paired_players) + + async def async_unjoin_player(self) -> None: + """Remove this player from any group.""" + if self._sync_status.leader is not None: + leader_id = f"{self._sync_status.leader.ip}:{self._sync_status.leader.port}" + async_dispatcher_send( + self.hass, dispatcher_unjoin_signal(leader_id), self.host, self.port + ) + + if self._sync_status.followers is not None: + await self._player.remove_follower(self.host, self.port) + + async def async_bluesound_join(self, master: str) -> None: """Join the player to a group.""" + ir.async_create_issue( + self.hass, + DOMAIN, + f"deprecated_service_{SERVICE_JOIN}", + is_fixable=False, + breaks_in_ha_version="2026.7.0", + issue_domain=DOMAIN, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_service_join", + translation_placeholders={ + "name": slugify(self.sync_status.name), + }, + ) + if master == self.entity_id: raise ServiceValidationError("Cannot join player to itself") @@ -431,18 +492,24 @@ async def async_join(self, master: str) -> None: self.hass, dispatcher_join_signal(master), self.host, self.port ) - async def async_unjoin(self) -> None: + async def async_bluesound_unjoin(self) -> None: """Unjoin the player from a group.""" - if self._sync_status.leader is None: - return - - leader_id = f"{self._sync_status.leader.ip}:{self._sync_status.leader.port}" - - _LOGGER.debug("Trying to unjoin player: %s", self.id) - async_dispatcher_send( - self.hass, dispatcher_unjoin_signal(leader_id), self.host, self.port + ir.async_create_issue( + self.hass, + DOMAIN, + f"deprecated_service_{SERVICE_UNJOIN}", + is_fixable=False, + breaks_in_ha_version="2026.7.0", + issue_domain=DOMAIN, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_service_unjoin", + translation_placeholders={ + "name": slugify(self.sync_status.name), + }, ) + await self.async_unjoin_player() + @property def extra_state_attributes(self) -> dict[str, Any] | None: """List members in group.""" @@ -488,6 +555,63 @@ def rebuild_bluesound_group(self) -> list[str]: follower_names.insert(0, leader_sync_status.name) return follower_names + def rebuild_group_members(self) -> list[str] | None: + """Get list of group members. Leader is always first.""" + if self.sync_status.leader is None and self.sync_status.followers is None: + return None + + entity_ids_with_sync_status = self._entity_ids_with_sync_status() + + leader_entity_id = None + followers = None + if self.sync_status.followers is not None: + leader_entity_id = self.entity_id + followers = self.sync_status.followers + elif self.sync_status.leader is not None: + leader_id = f"{self.sync_status.leader.ip}:{self.sync_status.leader.port}" + for entity_id, sync_status in entity_ids_with_sync_status.items(): + if sync_status.id == leader_id: + leader_entity_id = entity_id + followers = sync_status.followers + break + + if leader_entity_id is None or followers is None: + return None + + grouped_entity_ids = [leader_entity_id] + for follower in followers: + follower_id = f"{follower.ip}:{follower.port}" + entity_ids = [ + entity_id + for entity_id, sync_status in entity_ids_with_sync_status.items() + if sync_status.id == follower_id + ] + match entity_ids: + case [entity_id]: + grouped_entity_ids.append(entity_id) + + return grouped_entity_ids + + def _entity_ids_with_sync_status(self) -> dict[str, SyncStatus]: + result = {} + + entity_registry = er.async_get(self.hass) + + config_entries: list[BluesoundConfigEntry] = ( + self.hass.config_entries.async_entries(DOMAIN) + ) + for config_entry in config_entries: + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + for entity_entry in entity_entries: + if entity_entry.domain == "media_player": + result[entity_entry.entity_id] = ( + config_entry.runtime_data.coordinator.data.sync_status + ) + + return result + async def async_add_follower(self, host: str, port: int) -> None: """Add follower to leader.""" await self._player.add_follower(host, port) diff --git a/homeassistant/components/bluesound/strings.json b/homeassistant/components/bluesound/strings.json index 66689003af116b..97cc6ac752de02 100644 --- a/homeassistant/components/bluesound/strings.json +++ b/homeassistant/components/bluesound/strings.json @@ -41,9 +41,17 @@ "description": "Use `button.{name}_clear_sleep_timer` instead.\n\nPlease replace this action and adjust your automations and scripts.", "title": "Detected use of deprecated action bluesound.clear_sleep_timer" }, + "deprecated_service_join": { + "description": "Use the `media_player.join` action instead.\n\nPlease replace this action and adjust your automations and scripts.", + "title": "Detected use of deprecated action bluesound.join" + }, "deprecated_service_set_sleep_timer": { "description": "Use `button.{name}_set_sleep_timer` instead.\n\nPlease replace this action and adjust your automations and scripts.", "title": "Detected use of deprecated action bluesound.set_sleep_timer" + }, + "deprecated_service_unjoin": { + "description": "Use the `media_player.unjoin` action instead.\n\nPlease replace this action and adjust your automations and scripts.", + "title": "Detected use of deprecated action bluesound.unjoin" } }, "services": { diff --git a/homeassistant/components/bluesound/utils.py b/homeassistant/components/bluesound/utils.py index 5df5b32de95e8a..f5ab6a6639f1c2 100644 --- a/homeassistant/components/bluesound/utils.py +++ b/homeassistant/components/bluesound/utils.py @@ -1,5 +1,7 @@ """Utility functions for the Bluesound component.""" +from pyblu import PairedPlayer + from homeassistant.helpers.device_registry import format_mac @@ -19,3 +21,12 @@ def dispatcher_unjoin_signal(leader_id: str) -> str: Id is ip_address:port. This can be obtained from sync_status.id. """ return f"bluesound_unjoin_{leader_id}" + + +def id_to_paired_player(id: str) -> PairedPlayer | None: + """Try to convert id in format 'ip:port' to PairedPlayer. Returns None if unable to do so.""" + match id.rsplit(":", 1): + case [str() as ip, str() as port] if port.isdigit(): + return PairedPlayer(ip, int(port)) + case _: + return None diff --git a/homeassistant/components/esphome/repairs.py b/homeassistant/components/esphome/repairs.py index 3cba8730cd64a2..d40a68dde1af46 100644 --- a/homeassistant/components/esphome/repairs.py +++ b/homeassistant/components/esphome/repairs.py @@ -8,8 +8,7 @@ from homeassistant import data_entry_flow from homeassistant.components.repairs import RepairsFlow -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import issue_registry as ir +from homeassistant.core import HomeAssistant from .manager import async_replace_device @@ -22,13 +21,6 @@ def __init__(self, data: dict[str, str | int | float | None] | None) -> None: self._data = data super().__init__() - @callback - def _async_get_placeholders(self) -> dict[str, str]: - issue_registry = ir.async_get(self.hass) - issue = issue_registry.async_get_issue(self.handler, self.issue_id) - assert issue is not None - return issue.translation_placeholders or {} - class DeviceConflictRepair(ESPHomeRepair): """Handler for an issue fixing device conflict.""" @@ -58,7 +50,6 @@ async def async_step_init( return self.async_show_menu( step_id="init", menu_options=["migrate", "manual"], - description_placeholders=self._async_get_placeholders(), ) async def async_step_migrate( @@ -69,7 +60,6 @@ async def async_step_migrate( return self.async_show_form( step_id="migrate", data_schema=vol.Schema({}), - description_placeholders=self._async_get_placeholders(), ) entry_id = self.entry_id await async_replace_device(self.hass, entry_id, self.stored_mac, self.mac) @@ -84,7 +74,6 @@ async def async_step_manual( return self.async_show_form( step_id="manual", data_schema=vol.Schema({}), - description_placeholders=self._async_get_placeholders(), ) self.hass.config_entries.async_schedule_reload(self.entry_id) return self.async_create_entry(data={}) diff --git a/homeassistant/components/fluss/__init__.py b/homeassistant/components/fluss/__init__.py new file mode 100644 index 00000000000000..c3d4b347ff52a6 --- /dev/null +++ b/homeassistant/components/fluss/__init__.py @@ -0,0 +1,31 @@ +"""The Fluss+ integration.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_API_KEY, Platform +from homeassistant.core import HomeAssistant + +from .coordinator import FlussDataUpdateCoordinator + +PLATFORMS: list[Platform] = [Platform.BUTTON] + + +type FlussConfigEntry = ConfigEntry[FlussDataUpdateCoordinator] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: FlussConfigEntry, +) -> bool: + """Set up Fluss+ from a config entry.""" + coordinator = FlussDataUpdateCoordinator(hass, entry, entry.data[CONF_API_KEY]) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: FlussConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/fluss/button.py b/homeassistant/components/fluss/button.py new file mode 100644 index 00000000000000..bc8a90e66c0eb8 --- /dev/null +++ b/homeassistant/components/fluss/button.py @@ -0,0 +1,40 @@ +"""Support for Fluss Devices.""" + +from homeassistant.components.button import ButtonEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .coordinator import FlussApiClientError, FlussDataUpdateCoordinator +from .entity import FlussEntity + +type FlussConfigEntry = ConfigEntry[FlussDataUpdateCoordinator] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: FlussConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the Fluss Devices, filtering out any invalid payloads.""" + coordinator = entry.runtime_data + devices = coordinator.data + + async_add_entities( + FlussButton(coordinator, device_id, device) + for device_id, device in devices.items() + ) + + +class FlussButton(FlussEntity, ButtonEntity): + """Representation of a Fluss button device.""" + + _attr_name = None + + async def async_press(self) -> None: + """Handle the button press.""" + try: + await self.coordinator.api.async_trigger_device(self.device_id) + except FlussApiClientError as err: + raise HomeAssistantError(f"Failed to trigger device: {err}") from err diff --git a/homeassistant/components/fluss/config_flow.py b/homeassistant/components/fluss/config_flow.py new file mode 100644 index 00000000000000..09c7da62973c57 --- /dev/null +++ b/homeassistant/components/fluss/config_flow.py @@ -0,0 +1,55 @@ +"""Config flow for Fluss+ integration.""" + +from __future__ import annotations + +from typing import Any + +from fluss_api import ( + FlussApiClient, + FlussApiClientAuthenticationError, + FlussApiClientCommunicationError, +) +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_API_KEY +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import DOMAIN, LOGGER + +STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): cv.string}) + + +class FlussConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Fluss+.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + + errors: dict[str, str] = {} + if user_input is not None: + api_key = user_input[CONF_API_KEY] + self._async_abort_entries_match({CONF_API_KEY: api_key}) + client = FlussApiClient( + user_input[CONF_API_KEY], session=async_get_clientsession(self.hass) + ) + try: + await client.async_get_devices() + except FlussApiClientCommunicationError: + errors["base"] = "cannot_connect" + except FlussApiClientAuthenticationError: + errors["base"] = "invalid_auth" + except Exception: # noqa: BLE001 + LOGGER.exception("Unexpected exception occurred") + errors["base"] = "unknown" + if not errors: + return self.async_create_entry( + title="My Fluss+ Devices", data=user_input + ) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/fluss/const.py b/homeassistant/components/fluss/const.py new file mode 100644 index 00000000000000..b66ae7361065db --- /dev/null +++ b/homeassistant/components/fluss/const.py @@ -0,0 +1,9 @@ +"""Constants for the Fluss+ integration.""" + +from datetime import timedelta +import logging + +DOMAIN = "fluss" +LOGGER = logging.getLogger(__name__) +UPDATE_INTERVAL = 60 # seconds +UPDATE_INTERVAL_TIMEDELTA = timedelta(seconds=UPDATE_INTERVAL) diff --git a/homeassistant/components/fluss/coordinator.py b/homeassistant/components/fluss/coordinator.py new file mode 100644 index 00000000000000..6f0bc20e30f6d0 --- /dev/null +++ b/homeassistant/components/fluss/coordinator.py @@ -0,0 +1,50 @@ +"""DataUpdateCoordinator for Fluss+ integration.""" + +from __future__ import annotations + +from typing import Any + +from fluss_api import ( + FlussApiClient, + FlussApiClientAuthenticationError, + FlussApiClientError, +) + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util import slugify + +from .const import LOGGER, UPDATE_INTERVAL_TIMEDELTA + +type FlussConfigEntry = ConfigEntry[FlussDataUpdateCoordinator] + + +class FlussDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """Manages fetching Fluss device data on a schedule.""" + + def __init__( + self, hass: HomeAssistant, config_entry: FlussConfigEntry, api_key: str + ) -> None: + """Initialize the coordinator.""" + self.api = FlussApiClient(api_key, session=async_get_clientsession(hass)) + super().__init__( + hass, + LOGGER, + name=f"Fluss+ ({slugify(api_key[:8])})", + config_entry=config_entry, + update_interval=UPDATE_INTERVAL_TIMEDELTA, + ) + + async def _async_update_data(self) -> dict[str, dict[str, Any]]: + """Fetch data from the Fluss API and return as a dictionary keyed by deviceId.""" + try: + devices = await self.api.async_get_devices() + except FlussApiClientAuthenticationError as err: + raise ConfigEntryError(f"Authentication failed: {err}") from err + except FlussApiClientError as err: + raise UpdateFailed(f"Error fetching Fluss devices: {err}") from err + + return {device["deviceId"]: device for device in devices.get("devices", [])} diff --git a/homeassistant/components/fluss/entity.py b/homeassistant/components/fluss/entity.py new file mode 100644 index 00000000000000..12de23a587bcdb --- /dev/null +++ b/homeassistant/components/fluss/entity.py @@ -0,0 +1,39 @@ +"""Base entities for the Fluss+ integration.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .coordinator import FlussDataUpdateCoordinator + + +class FlussEntity(CoordinatorEntity[FlussDataUpdateCoordinator]): + """Base class for Fluss entities.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: FlussDataUpdateCoordinator, + device_id: str, + device: dict, + ) -> None: + """Initialize the entity with a device ID and device data.""" + super().__init__(coordinator) + self.device_id = device_id + self._attr_unique_id = device_id + self._attr_device_info = DeviceInfo( + identifiers={("fluss", device_id)}, + name=device.get("deviceName"), + manufacturer="Fluss", + model="Fluss+ Device", + ) + + @property + def available(self) -> bool: + """Return if the device is available.""" + return super().available and self.device_id in self.coordinator.data + + @property + def device(self) -> dict: + """Return the stored device data.""" + return self.coordinator.data[self.device_id] diff --git a/homeassistant/components/fluss/manifest.json b/homeassistant/components/fluss/manifest.json new file mode 100644 index 00000000000000..fcd7867ed1a95b --- /dev/null +++ b/homeassistant/components/fluss/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "fluss", + "name": "Fluss+", + "codeowners": ["@fluss"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/fluss", + "iot_class": "cloud_polling", + "loggers": ["fluss-api"], + "quality_scale": "bronze", + "requirements": ["fluss-api==0.1.9.20"] +} diff --git a/homeassistant/components/fluss/quality_scale.yaml b/homeassistant/components/fluss/quality_scale.yaml new file mode 100644 index 00000000000000..c2b4a85a6887b1 --- /dev/null +++ b/homeassistant/components/fluss/quality_scale.yaml @@ -0,0 +1,69 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + No actions present + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: done + docs-installation-parameters: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: todo + test-coverage: todo + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: + status: exempt + comment: | + Not needed + discovery: todo + stale-devices: todo + diagnostics: todo + exception-translations: todo + icon-translations: + status: exempt + comment: | + No icons used + reconfiguration-flow: todo + dynamic-devices: todo + discovery-update-info: todo + repair-issues: + status: exempt + comment: | + No issues to repair + docs-use-cases: done + docs-supported-devices: todo + docs-supported-functions: done + docs-data-update: todo + docs-known-limitations: done + docs-troubleshooting: todo + docs-examples: todo + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: todo diff --git a/homeassistant/components/fluss/strings.json b/homeassistant/components/fluss/strings.json new file mode 100644 index 00000000000000..cf63c7ff91ac64 --- /dev/null +++ b/homeassistant/components/fluss/strings.json @@ -0,0 +1,23 @@ +{ + "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "step": { + "user": { + "data": { + "api_key": "[%key:common::config_flow::data::api_key%]" + }, + "data_description": { + "api_key": "The API key found in the profile page of the Fluss+ app." + }, + "description": "Your Fluss API key, available in the profile page of the Fluss+ app" + } + } + } +} diff --git a/homeassistant/components/homewizard/select.py b/homeassistant/components/homewizard/select.py index 132eac87375d18..53a6e7c3a6f1dd 100644 --- a/homeassistant/components/homewizard/select.py +++ b/homeassistant/components/homewizard/select.py @@ -44,11 +44,16 @@ def __init__( """Initialize the switch.""" super().__init__(coordinator) + batteries = coordinator.data.batteries + battery_count = batteries.battery_count if batteries is not None else None + entity_registry_enabled_default = ( + battery_count is not None and battery_count > 0 + ) description = SelectEntityDescription( key="battery_group_mode", translation_key="battery_group_mode", entity_category=EntityCategory.CONFIG, - entity_registry_enabled_default=False, + entity_registry_enabled_default=entity_registry_enabled_default, options=[ str(mode) for mode in (coordinator.data.device.supported_battery_modes() or []) diff --git a/homeassistant/components/knx/const.py b/homeassistant/components/knx/const.py index bbf93e57b6cbad..0b7c9abdbc5f3d 100644 --- a/homeassistant/components/knx/const.py +++ b/homeassistant/components/knx/const.py @@ -168,6 +168,7 @@ class FanZeroMode(StrEnum): Platform.FAN, Platform.DATETIME, Platform.LIGHT, + Platform.SCENE, Platform.SENSOR, Platform.SWITCH, Platform.TIME, @@ -227,3 +228,9 @@ class FanConf: """Common config keys for fan.""" MAX_STEP: Final = "max_step" + + +class SceneConf: + """Common config keys for scene.""" + + SCENE_NUMBER: Final = "scene_number" diff --git a/homeassistant/components/knx/repairs.py b/homeassistant/components/knx/repairs.py index 598bdc7d0a9ae6..37bdaaa0f42738 100644 --- a/homeassistant/components/knx/repairs.py +++ b/homeassistant/components/knx/repairs.py @@ -110,13 +110,6 @@ def _data_secure_group_key_issue_handler( class DataSecureGroupIssueRepairFlow(RepairsFlow): """Handler for an issue fixing flow for outdated DataSecure keys.""" - @callback - def _async_get_placeholders(self) -> dict[str, str]: - issue_registry = ir.async_get(self.hass) - issue = issue_registry.async_get_issue(self.handler, self.issue_id) - assert issue is not None - return issue.translation_placeholders or {} - async def async_step_init( self, user_input: dict[str, str] | None = None ) -> data_entry_flow.FlowResult: @@ -157,7 +150,6 @@ async def async_step_secure_knxkeys( return self.async_show_form( step_id="secure_knxkeys", data_schema=vol.Schema(fields), - description_placeholders=self._async_get_placeholders(), errors=errors, ) diff --git a/homeassistant/components/knx/scene.py b/homeassistant/components/knx/scene.py index bc997f617b30d6..fd16aba1ef8232 100644 --- a/homeassistant/components/knx/scene.py +++ b/homeassistant/components/knx/scene.py @@ -10,13 +10,23 @@ from homeassistant.components.scene import BaseScene from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.entity_platform import ( + AddConfigEntryEntitiesCallback, + async_get_current_platform, +) from homeassistant.helpers.typing import ConfigType -from .const import KNX_ADDRESS, KNX_MODULE_KEY -from .entity import KnxYamlEntity +from .const import DOMAIN, KNX_ADDRESS, KNX_MODULE_KEY, SceneConf +from .entity import ( + KnxUiEntity, + KnxUiEntityPlatformController, + KnxYamlEntity, + _KnxEntityBase, +) from .knx_module import KNXModule from .schema import SceneSchema +from .storage.const import CONF_ENTITY, CONF_GA_SCENE +from .storage.util import ConfigExtractor async def async_setup_entry( @@ -26,18 +36,53 @@ async def async_setup_entry( ) -> None: """Set up scene(s) for KNX platform.""" knx_module = hass.data[KNX_MODULE_KEY] - config: list[ConfigType] = knx_module.config_yaml[Platform.SCENE] + platform = async_get_current_platform() + knx_module.config_store.add_platform( + platform=Platform.SCENE, + controller=KnxUiEntityPlatformController( + knx_module=knx_module, + entity_platform=platform, + entity_class=KnxUiScene, + ), + ) - async_add_entities(KNXScene(knx_module, entity_config) for entity_config in config) + entities: list[KnxYamlEntity | KnxUiEntity] = [] + if yaml_platform_config := knx_module.config_yaml.get(Platform.SCENE): + entities.extend( + KnxYamlScene(knx_module, entity_config) + for entity_config in yaml_platform_config + ) + if ui_config := knx_module.config_store.data["entities"].get(Platform.SCENE): + entities.extend( + KnxUiScene(knx_module, unique_id, config) + for unique_id, config in ui_config.items() + ) + if entities: + async_add_entities(entities) -class KNXScene(KnxYamlEntity, BaseScene): +class _KnxScene(BaseScene, _KnxEntityBase): """Representation of a KNX scene.""" _device: XknxScene + async def _async_activate(self, **kwargs: Any) -> None: + """Activate the scene.""" + await self._device.run() + + def after_update_callback(self, device: XknxDevice) -> None: + """Call after device was updated.""" + self._async_record_activation() + super().after_update_callback(device) + + +class KnxYamlScene(_KnxScene, KnxYamlEntity): + """Representation of a KNX scene configured from YAML.""" + + _device: XknxScene + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: - """Init KNX scene.""" + """Initialize KNX scene.""" super().__init__( knx_module=knx_module, device=XknxScene( @@ -52,11 +97,28 @@ def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: f"{self._device.scene_value.group_address}_{self._device.scene_number}" ) - async def _async_activate(self, **kwargs: Any) -> None: - """Activate the scene.""" - await self._device.run() - def after_update_callback(self, device: XknxDevice) -> None: - """Call after device was updated.""" - self._async_record_activation() - super().after_update_callback(device) +class KnxUiScene(_KnxScene, KnxUiEntity): + """Representation of a KNX scene configured from the UI.""" + + _device: XknxScene + + def __init__( + self, + knx_module: KNXModule, + unique_id: str, + config: ConfigType, + ) -> None: + """Initialize KNX scene.""" + super().__init__( + knx_module=knx_module, + unique_id=unique_id, + entity_config=config[CONF_ENTITY], + ) + knx_conf = ConfigExtractor(config[DOMAIN]) + self._device = XknxScene( + xknx=knx_module.xknx, + name=config[CONF_ENTITY][CONF_NAME], + group_address=knx_conf.get_write(CONF_GA_SCENE), + scene_number=knx_conf.get(SceneConf.SCENE_NUMBER), + ) diff --git a/homeassistant/components/knx/schema.py b/homeassistant/components/knx/schema.py index 3ded33494cc798..50f56e33099c15 100644 --- a/homeassistant/components/knx/schema.py +++ b/homeassistant/components/knx/schema.py @@ -61,6 +61,7 @@ CoverConf, FanConf, FanZeroMode, + SceneConf, ) from .validation import ( backwards_compatible_xknx_climate_enum_member, @@ -822,7 +823,7 @@ class SceneSchema(KNXPlatformSchema): { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Required(KNX_ADDRESS): ga_list_validator, - vol.Required(CONF_SCENE_NUMBER): vol.All( + vol.Required(SceneConf.SCENE_NUMBER): vol.All( vol.Coerce(int), vol.Range(min=1, max=64) ), vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA, diff --git a/homeassistant/components/knx/storage/const.py b/homeassistant/components/knx/storage/const.py index 813adb5cc53fb9..412caf575cbfad 100644 --- a/homeassistant/components/knx/storage/const.py +++ b/homeassistant/components/knx/storage/const.py @@ -72,5 +72,8 @@ CONF_GA_HUE: Final = "ga_hue" CONF_GA_SATURATION: Final = "ga_saturation" +# Scene +CONF_GA_SCENE: Final = "ga_scene" + # Sensor CONF_ALWAYS_CALLBACK: Final = "always_callback" diff --git a/homeassistant/components/knx/storage/entity_store_schema.py b/homeassistant/components/knx/storage/entity_store_schema.py index 64a576f20c493d..717b44c78d5355 100644 --- a/homeassistant/components/knx/storage/entity_store_schema.py +++ b/homeassistant/components/knx/storage/entity_store_schema.py @@ -40,6 +40,7 @@ CoverConf, FanConf, FanZeroMode, + SceneConf, ) from ..dpt import get_supported_dpts from .const import ( @@ -82,6 +83,7 @@ CONF_GA_RED_BRIGHTNESS, CONF_GA_RED_SWITCH, CONF_GA_SATURATION, + CONF_GA_SCENE, CONF_GA_SENSOR, CONF_GA_SETPOINT_SHIFT, CONF_GA_SPEED, @@ -419,6 +421,25 @@ class LightColorMode(StrEnum): ), ) +SCENE_KNX_SCHEMA = vol.Schema( + { + vol.Required(CONF_GA_SCENE): GASelector( + state=False, + passive=False, + write_required=True, + valid_dpt=["17.001", "18.001"], + ), + vol.Required(SceneConf.SCENE_NUMBER): AllSerializeFirst( + selector.NumberSelector( + selector.NumberSelectorConfig( + min=1, max=64, step=1, mode=selector.NumberSelectorMode.BOX + ) + ), + vol.Coerce(int), + ), + }, +) + SWITCH_KNX_SCHEMA = vol.Schema( { vol.Required(CONF_GA_SWITCH): GASelector(write_required=True, valid_dpt="1"), @@ -694,6 +715,7 @@ def _validate_sensor_attributes(config: dict) -> dict: Platform.DATETIME: DATETIME_KNX_SCHEMA, Platform.FAN: FAN_KNX_SCHEMA, Platform.LIGHT: LIGHT_KNX_SCHEMA, + Platform.SCENE: SCENE_KNX_SCHEMA, Platform.SENSOR: SENSOR_KNX_SCHEMA, Platform.SWITCH: SWITCH_KNX_SCHEMA, Platform.TIME: TIME_KNX_SCHEMA, diff --git a/homeassistant/components/knx/strings.json b/homeassistant/components/knx/strings.json index 9ea12aadb521e2..bccec77a78d046 100644 --- a/homeassistant/components/knx/strings.json +++ b/homeassistant/components/knx/strings.json @@ -774,6 +774,19 @@ } } }, + "scene": { + "description": "A KNX entity can activate a KNX scene and updates when the scene number is received.", + "knx": { + "ga_scene": { + "description": "Group address to activate a scene.", + "label": "Scene" + }, + "scene_number": { + "description": "The scene number this entity is associated with.", + "label": "Scene number" + } + } + }, "sensor": { "description": "Read-only entity for numeric or string datapoints. Temperature, percent etc.", "knx": { diff --git a/homeassistant/components/libre_hardware_monitor/__init__.py b/homeassistant/components/libre_hardware_monitor/__init__.py index 1461b4b449cce5..2a94cda9bac2f2 100644 --- a/homeassistant/components/libre_hardware_monitor/__init__.py +++ b/homeassistant/components/libre_hardware_monitor/__init__.py @@ -32,15 +32,15 @@ async def async_migrate_entry( entity_registry, config_entry.entry_id ) for reg_entry in registry_entries: - new_entity_id = f"{config_entry.entry_id}_{reg_entry.unique_id[4:]}" + new_unique_id = f"{config_entry.entry_id}_{reg_entry.unique_id[4:]}" _LOGGER.debug( "Migrating entity %s unique id from %s to %s", reg_entry.entity_id, reg_entry.unique_id, - new_entity_id, + new_unique_id, ) entity_registry.async_update_entity( - reg_entry.entity_id, new_unique_id=new_entity_id + reg_entry.entity_id, new_unique_id=new_unique_id ) # Migrate device identifiers diff --git a/homeassistant/components/libre_hardware_monitor/config_flow.py b/homeassistant/components/libre_hardware_monitor/config_flow.py index 7ca51abe359f2c..36dd1a21b2496b 100644 --- a/homeassistant/components/libre_hardware_monitor/config_flow.py +++ b/homeassistant/components/libre_hardware_monitor/config_flow.py @@ -46,7 +46,7 @@ async def async_step_user( ) try: - _ = (await api.get_data()).main_device_ids_and_names.values() + computer_name = (await api.get_data()).computer_name except LibreHardwareMonitorConnectionError as exception: _LOGGER.error(exception) errors["base"] = "cannot_connect" @@ -54,7 +54,7 @@ async def async_step_user( errors["base"] = "no_devices" else: return self.async_create_entry( - title=f"{user_input[CONF_HOST]}:{user_input[CONF_PORT]}", + title=f"{computer_name} ({user_input[CONF_HOST]}:{user_input[CONF_PORT]})", data=user_input, ) diff --git a/homeassistant/components/libre_hardware_monitor/coordinator.py b/homeassistant/components/libre_hardware_monitor/coordinator.py index e67602074175ff..9963e1ba55f700 100644 --- a/homeassistant/components/libre_hardware_monitor/coordinator.py +++ b/homeassistant/components/libre_hardware_monitor/coordinator.py @@ -65,7 +65,7 @@ async def _async_update_data(self) -> LibreHardwareMonitorData: lhm_data = await self._api.get_data() except LibreHardwareMonitorConnectionError as err: raise UpdateFailed( - "LibreHardwareMonitor connection failed, will retry" + "LibreHardwareMonitor connection failed, will retry", retry_after=30 ) from err except LibreHardwareMonitorNoDevicesError as err: raise UpdateFailed("No sensor data available, will retry") from err diff --git a/homeassistant/components/libre_hardware_monitor/manifest.json b/homeassistant/components/libre_hardware_monitor/manifest.json index 517fb0684acbdd..b62565c775760e 100644 --- a/homeassistant/components/libre_hardware_monitor/manifest.json +++ b/homeassistant/components/libre_hardware_monitor/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "quality_scale": "silver", - "requirements": ["librehardwaremonitor-api==1.5.0"] + "requirements": ["librehardwaremonitor-api==1.6.0"] } diff --git a/homeassistant/components/libre_hardware_monitor/sensor.py b/homeassistant/components/libre_hardware_monitor/sensor.py index bfebce1f6c3a0a..c56bb75fc10759 100644 --- a/homeassistant/components/libre_hardware_monitor/sensor.py +++ b/homeassistant/components/libre_hardware_monitor/sensor.py @@ -66,7 +66,7 @@ def __init__( # Hardware device self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, f"{entry_id}_{sensor_data.device_id}")}, - name=sensor_data.device_name, + name=f"[{coordinator.data.computer_name}] {sensor_data.device_name}", model=sensor_data.device_type, ) diff --git a/homeassistant/components/neurio_energy/sensor.py b/homeassistant/components/neurio_energy/sensor.py index 7a7ceff338efe7..f4943b856cb91b 100644 --- a/homeassistant/components/neurio_energy/sensor.py +++ b/homeassistant/components/neurio_energy/sensor.py @@ -29,9 +29,13 @@ ACTIVE_NAME = "Energy Usage" DAILY_NAME = "Daily Energy Usage" +ACTIVE_GENERATION_NAME = "Energy Production" +DAILY_GENERATION_NAME = "Daily Energy Production" ACTIVE_TYPE = "active" DAILY_TYPE = "daily" +ACTIVE_GENERATION_TYPE = "active_generation" +DAILY_GENERATION_TYPE = "daily_generation" MIN_TIME_BETWEEN_DAILY_UPDATES = timedelta(seconds=150) @@ -76,6 +80,18 @@ def update_active(): add_entities([NeurioEnergy(data, ACTIVE_NAME, ACTIVE_TYPE, update_active)]) # Daily power sensor add_entities([NeurioEnergy(data, DAILY_NAME, DAILY_TYPE, update_daily)]) + # Active generation sensor + add_entities( + [ + NeurioEnergy( + data, ACTIVE_GENERATION_NAME, ACTIVE_GENERATION_TYPE, update_active + ) + ] + ) + # Daily generation sensor + add_entities( + [NeurioEnergy(data, DAILY_GENERATION_NAME, DAILY_GENERATION_TYPE, update_daily)] + ) class NeurioData: @@ -89,6 +105,8 @@ def __init__(self, api_key, api_secret, sensor_id): self._daily_usage = None self._active_power = None + self._daily_generation = None + self._active_generation = None self._state = None @@ -105,17 +123,29 @@ def active_power(self): """Return latest active power value.""" return self._active_power + @property + def daily_generation(self): + """Return latest daily generation value.""" + return self._daily_generation + + @property + def active_generation(self): + """Return latest active generation value.""" + return self._active_generation + def get_active_power(self) -> None: - """Return current power value.""" + """Update current power values.""" try: sample = self.neurio_client.get_samples_live_last(self.sensor_id) self._active_power = sample["consumptionPower"] + self._active_generation = sample.get("generationPower") except (requests.exceptions.RequestException, ValueError, KeyError): _LOGGER.warning("Could not update current power usage") def get_daily_usage(self) -> None: - """Return current daily power usage.""" + """Update current daily power usage and generation.""" kwh = 0 + gen_kwh = 0 start_time = dt_util.start_of_local_day().astimezone(dt_util.UTC).isoformat() end_time = dt_util.utcnow().isoformat() @@ -131,8 +161,10 @@ def get_daily_usage(self) -> None: for result in history: kwh += result["consumptionEnergy"] / 3600000 + gen_kwh += result.get("generationEnergy", 0) / 3600000 self._daily_usage = round(kwh, 2) + self._daily_generation = round(gen_kwh, 2) class NeurioEnergy(SensorEntity): @@ -156,6 +188,16 @@ def __init__(self, data, name, sensor_type, update_call): self._unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR self._attr_device_class = SensorDeviceClass.ENERGY self._attr_state_class = SensorStateClass.TOTAL_INCREASING + elif sensor_type == ACTIVE_GENERATION_TYPE: + self._attr_icon = "mdi:solar-power" + self._unit_of_measurement = UnitOfPower.WATT + self._attr_device_class = SensorDeviceClass.POWER + self._attr_state_class = SensorStateClass.MEASUREMENT + elif sensor_type == DAILY_GENERATION_TYPE: + self._attr_icon = "mdi:solar-power" + self._unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR + self._attr_device_class = SensorDeviceClass.ENERGY + self._attr_state_class = SensorStateClass.TOTAL_INCREASING @property def name(self): @@ -180,3 +222,7 @@ def update(self) -> None: self._state = self._data.active_power elif self._sensor_type == DAILY_TYPE: self._state = self._data.daily_usage + elif self._sensor_type == ACTIVE_GENERATION_TYPE: + self._state = self._data.active_generation + elif self._sensor_type == DAILY_GENERATION_TYPE: + self._state = self._data.daily_generation diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 1bcbe6b5424be1..a5fc6ee0f885a7 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -20,5 +20,5 @@ "iot_class": "local_push", "loggers": ["reolink_aio"], "quality_scale": "platinum", - "requirements": ["reolink-aio==0.17.1"] + "requirements": ["reolink-aio==0.18.0"] } diff --git a/homeassistant/components/samsungtv/entity.py b/homeassistant/components/samsungtv/entity.py index 2927dcf2683240..59c4f71d9c80d8 100644 --- a/homeassistant/components/samsungtv/entity.py +++ b/homeassistant/components/samsungtv/entity.py @@ -92,10 +92,8 @@ async def async_turn_on(self, **kwargs: Any) -> None: LOGGER.debug("Attempting to turn on %s via automation", self.entity_id) await self._turn_on_action.async_run(self.hass, self._context) elif self._mac: - LOGGER.warning( - "Attempting to turn on %s via Wake-On-Lan; if this does not work, " - "please ensure that Wake-On-Lan is available for your device or use " - "a turn_on automation", + LOGGER.debug( + "Attempting to turn on %s via Wake-On-Lan", self.entity_id, ) await self.hass.async_add_executor_job(self._wake_on_lan) diff --git a/homeassistant/components/sonos/__init__.py b/homeassistant/components/sonos/__init__.py index 0231fca42dd5eb..33d82e072882c7 100644 --- a/homeassistant/components/sonos/__init__.py +++ b/homeassistant/components/sonos/__init__.py @@ -5,6 +5,7 @@ import asyncio import datetime from functools import partial +from http import HTTPStatus from ipaddress import AddressValueError, IPv4Address import logging import socket @@ -12,7 +13,7 @@ from urllib.parse import urlparse from aiohttp import ClientError -from requests.exceptions import Timeout +from requests.exceptions import HTTPError, Timeout from soco import events_asyncio, zonegroupstate import soco.config as soco_config from soco.core import SoCo @@ -54,6 +55,8 @@ SUB_FAIL_ISSUE_ID, SUB_FAIL_URL, SUBSCRIPTION_TIMEOUT, + UPNP_DOCUMENTATION_URL, + UPNP_ISSUE_ID, UPNP_ST, ) from .exception import SonosUpdateError @@ -184,6 +187,32 @@ def is_device_invisible(self, ip_address: str) -> bool: """Check if device at provided IP is known to be invisible.""" return any(x for x in self._known_invisible if x.ip_address == ip_address) + async def _process_http_connection_error( + self, err: HTTPError, ip_address: str + ) -> None: + """Process HTTP Errors when connecting to a Sonos speaker.""" + response = err.response + # When UPnP is disabled, Sonos returns HTTP 403 Forbidden error. + # Create issue advising user to enable UPnP on Sonos system. + if response is not None and response.status_code == HTTPStatus.FORBIDDEN: + ir.async_create_issue( + self.hass, + DOMAIN, + f"{UPNP_ISSUE_ID}_{ip_address}", + is_fixable=False, + severity=ir.IssueSeverity.ERROR, + translation_key="upnp_disabled", + translation_placeholders={ + "device_ip": ip_address, + "documentation_url": UPNP_DOCUMENTATION_URL, + }, + ) + _LOGGER.error( + "HTTP error connecting to Sonos speaker at %s: %s", + ip_address, + err, + ) + async def async_subscribe_to_zone_updates(self, ip_address: str) -> None: """Test subscriptions and create SonosSpeakers based on results.""" try: @@ -195,13 +224,29 @@ async def async_subscribe_to_zone_updates(self, ip_address: str) -> None: ) return soco = SoCo(ip_address) - # Cache now to avoid household ID lookup during first ZoneGroupState processing - await self.hass.async_add_executor_job( - getattr, - soco, - "household_id", - ) - sub = await soco.zoneGroupTopology.subscribe() + try: + # Cache now to avoid household ID lookup during first ZoneGroupState processing + await self.hass.async_add_executor_job( + getattr, + soco, + "household_id", + ) + sub = await soco.zoneGroupTopology.subscribe() + except HTTPError as err: + await self._process_http_connection_error(err, ip_address) + return + except ( + OSError, + SoCoException, + Timeout, + TimeoutError, + ) as err: + _LOGGER.error( + "Error connecting to discovered Sonos speaker at %s: %s", + ip_address, + err, + ) + return @callback def _async_add_visible_zones(subscription_succeeded: bool = False) -> None: @@ -390,6 +435,9 @@ async def async_poll_manual_hosts( sync_get_visible_zones, soco, ) + except HTTPError as err: + await self._process_http_connection_error(err, ip_addr) + continue except ( OSError, SoCoException, diff --git a/homeassistant/components/sonos/const.py b/homeassistant/components/sonos/const.py index 20e079c901d482..31db15f70cc666 100644 --- a/homeassistant/components/sonos/const.py +++ b/homeassistant/components/sonos/const.py @@ -20,6 +20,9 @@ Platform.SWITCH, ] +UPNP_ISSUE_ID = "upnp_disabled" +UPNP_DOCUMENTATION_URL = "https://www.home-assistant.io/integrations/sonos/#403-error-when-setting-up-the-integration" + SUB_FAIL_ISSUE_ID = "subscriptions_failed" SUB_FAIL_URL = "https://www.home-assistant.io/integrations/sonos/#network-requirements" diff --git a/homeassistant/components/sonos/strings.json b/homeassistant/components/sonos/strings.json index 28f6a7c4d61ae2..71b6ffe6c63fb7 100644 --- a/homeassistant/components/sonos/strings.json +++ b/homeassistant/components/sonos/strings.json @@ -132,6 +132,10 @@ "subscriptions_failed": { "description": "Falling back to polling, functionality may be limited.\n\nSonos device at {device_ip} cannot reach Home Assistant at {listener_address}.\n\nSee our [documentation]({sub_fail_url}) for more information on how to solve this issue.", "title": "Networking error: subscriptions failed" + }, + "upnp_disabled": { + "description": "Unable to connect to Sonos speaker at {device_ip}.\n\nPlease ensure UPnP is enabled on your Sonos system.\n\nOpen the Sonos app on your phone or tablet. Go to Account > Privacy and Security > UPnP. Enable the UPnP setting. Once UPnP is enabled, return to Home Assistant and reload the Sonos integration. The connection should now succeed. See our [documentation]({documentation_url}) for steps to resolve this issue.", + "title": "Networking error: UPnP disabled" } }, "services": { diff --git a/homeassistant/components/stiebel_eltron/__init__.py b/homeassistant/components/stiebel_eltron/__init__.py index a196364313af46..0a0cf4c2dcb7cc 100644 --- a/homeassistant/components/stiebel_eltron/__init__.py +++ b/homeassistant/components/stiebel_eltron/__init__.py @@ -1,122 +1,19 @@ """The component for STIEBEL ELTRON heat pumps with ISGWeb Modbus module.""" import logging -from typing import Any from pymodbus.client import ModbusTcpClient from pystiebeleltron.pystiebeleltron import StiebelEltronAPI -import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import ( - CONF_HOST, - CONF_NAME, - CONF_PORT, - DEVICE_DEFAULT_NAME, - Platform, -) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv, issue_registry as ir -from homeassistant.helpers.typing import ConfigType - -from .const import CONF_HUB, DEFAULT_HUB, DOMAIN - -MODBUS_DOMAIN = "modbus" - -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Optional(CONF_NAME, default=DEVICE_DEFAULT_NAME): cv.string, - vol.Optional(CONF_HUB, default=DEFAULT_HUB): cv.string, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) _LOGGER = logging.getLogger(__name__) _PLATFORMS: list[Platform] = [Platform.CLIMATE] -async def _async_import(hass: HomeAssistant, config: ConfigType) -> None: - """Set up the STIEBEL ELTRON component.""" - hub_config: dict[str, Any] | None = None - if MODBUS_DOMAIN in config: - for hub in config[MODBUS_DOMAIN]: - if hub[CONF_NAME] == config[DOMAIN][CONF_HUB]: - hub_config = hub - break - if hub_config is None: - ir.async_create_issue( - hass, - DOMAIN, - "deprecated_yaml_import_issue_missing_hub", - breaks_in_ha_version="2025.11.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=ir.IssueSeverity.WARNING, - translation_key="deprecated_yaml_import_issue_missing_hub", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Stiebel Eltron", - }, - ) - return - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_HOST: hub_config[CONF_HOST], - CONF_PORT: hub_config[CONF_PORT], - CONF_NAME: config[DOMAIN][CONF_NAME], - }, - ) - if ( - result.get("type") is FlowResultType.ABORT - and result.get("reason") != "already_configured" - ): - ir.async_create_issue( - hass, - DOMAIN, - f"deprecated_yaml_import_issue_{result['reason']}", - breaks_in_ha_version="2025.11.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=ir.IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{result['reason']}", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Stiebel Eltron", - }, - ) - return - - ir.async_create_issue( - hass, - DOMAIN, - "deprecated_yaml", - breaks_in_ha_version="2025.11.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=ir.IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Stiebel Eltron", - }, - ) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the STIEBEL ELTRON component.""" - if DOMAIN in config: - hass.async_create_task(_async_import(hass, config)) - return True - - type StiebelEltronConfigEntry = ConfigEntry[StiebelEltronAPI] diff --git a/homeassistant/components/switchbot_cloud/sensor.py b/homeassistant/components/switchbot_cloud/sensor.py index d1aed7f82847e9..197671cc8f1284 100644 --- a/homeassistant/components/switchbot_cloud/sensor.py +++ b/homeassistant/components/switchbot_cloud/sensor.py @@ -180,7 +180,7 @@ class SwitchbotCloudSensorEntityDescription(SensorEntityDescription): HUMIDITY_DESCRIPTION, BATTERY_DESCRIPTION, ), - "Presence Sensor": (BATTERY_DESCRIPTION,), + "Presence Sensor": (BATTERY_DESCRIPTION, LIGHTLEVEL_DESCRIPTION), "Relay Switch 1PM": ( POWER_DESCRIPTION, VOLTAGE_DESCRIPTION, diff --git a/homeassistant/components/transmission/const.py b/homeassistant/components/transmission/const.py index c232f26cefd127..da4cc6ed5c8149 100644 --- a/homeassistant/components/transmission/const.py +++ b/homeassistant/components/transmission/const.py @@ -40,9 +40,12 @@ ATTR_DELETE_DATA = "delete_data" ATTR_TORRENT = "torrent" +ATTR_TORRENTS = "torrents" ATTR_DOWNLOAD_PATH = "download_path" +ATTR_TORRENT_FILTER = "torrent_filter" SERVICE_ADD_TORRENT = "add_torrent" +SERVICE_GET_TORRENTS = "get_torrents" SERVICE_REMOVE_TORRENT = "remove_torrent" SERVICE_START_TORRENT = "start_torrent" SERVICE_STOP_TORRENT = "stop_torrent" @@ -54,3 +57,14 @@ STATE_UP_DOWN = "up_down" STATE_SEEDING = "seeding" STATE_DOWNLOADING = "downloading" + +FILTER_MODES: dict[str, list[str] | None] = { + "started": ["downloading"], + "completed": ["seeding"], + "paused": ["stopped"], + "active": [ + "seeding", + "downloading", + ], + "all": None, +} diff --git a/homeassistant/components/transmission/helpers.py b/homeassistant/components/transmission/helpers.py new file mode 100644 index 00000000000000..4a3ddc28b27a95 --- /dev/null +++ b/homeassistant/components/transmission/helpers.py @@ -0,0 +1,45 @@ +"""Helper functions for Transmission.""" + +from typing import Any + +from transmission_rpc.torrent import Torrent + + +def format_torrent(torrent: Torrent) -> dict[str, Any]: + """Format a single torrent.""" + value: dict[str, Any] = {} + + value["id"] = torrent.id + value["name"] = torrent.name + value["status"] = torrent.status.value + value["percent_done"] = f"{torrent.percent_done * 100:.2f}%" + value["ratio"] = f"{torrent.ratio:.2f}" + value["eta"] = str(torrent.eta) if torrent.eta else None + value["added_date"] = torrent.added_date.isoformat() + value["done_date"] = torrent.done_date.isoformat() if torrent.done_date else None + value["download_dir"] = torrent.download_dir + value["labels"] = torrent.labels + + return value + + +def filter_torrents( + torrents: list[Torrent], statuses: list[str] | None = None +) -> list[Torrent]: + """Filter torrents based on the statuses provided.""" + return [ + torrent + for torrent in torrents + if statuses is None or torrent.status in statuses + ] + + +def format_torrents( + torrents: list[Torrent], +) -> dict[str, dict[str, Any]]: + """Format a list of torrents.""" + value = {} + for torrent in torrents: + value[torrent.name] = format_torrent(torrent) + + return value diff --git a/homeassistant/components/transmission/icons.json b/homeassistant/components/transmission/icons.json index 287f9f501b0db9..20b296e9fc08a2 100644 --- a/homeassistant/components/transmission/icons.json +++ b/homeassistant/components/transmission/icons.json @@ -42,6 +42,9 @@ "add_torrent": { "service": "mdi:download" }, + "get_torrents": { + "service": "mdi:file-arrow-up-down-outline" + }, "remove_torrent": { "service": "mdi:download-off" }, diff --git a/homeassistant/components/transmission/manifest.json b/homeassistant/components/transmission/manifest.json index 69ed258f511d7a..6c6d18517dbd04 100644 --- a/homeassistant/components/transmission/manifest.json +++ b/homeassistant/components/transmission/manifest.json @@ -1,7 +1,7 @@ { "domain": "transmission", "name": "Transmission", - "codeowners": ["@engrbm87", "@JPHutchins"], + "codeowners": ["@engrbm87", "@JPHutchins", "@andrew-codechimp"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/transmission", "integration_type": "service", diff --git a/homeassistant/components/transmission/sensor.py b/homeassistant/components/transmission/sensor.py index f6a0c0f9066239..adf778c0158609 100644 --- a/homeassistant/components/transmission/sensor.py +++ b/homeassistant/components/transmission/sensor.py @@ -7,8 +7,6 @@ from dataclasses import dataclass from typing import Any -from transmission_rpc.torrent import Torrent - from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -20,6 +18,7 @@ from homeassistant.helpers.typing import StateType from .const import ( + FILTER_MODES, STATE_ATTR_TORRENT_INFO, STATE_DOWNLOADING, STATE_SEEDING, @@ -28,20 +27,10 @@ ) from .coordinator import TransmissionConfigEntry, TransmissionDataUpdateCoordinator from .entity import TransmissionEntity +from .helpers import filter_torrents PARALLEL_UPDATES = 0 -MODES: dict[str, list[str] | None] = { - "started_torrents": ["downloading"], - "completed_torrents": ["seeding"], - "paused_torrents": ["stopped"], - "active_torrents": [ - "seeding", - "downloading", - ], - "total_torrents": None, -} - @dataclass(frozen=True, kw_only=True) class TransmissionSensorEntityDescription(SensorEntityDescription): @@ -84,7 +73,7 @@ class TransmissionSensorEntityDescription(SensorEntityDescription): translation_key="active_torrents", val_func=lambda coordinator: coordinator.data.active_torrent_count, extra_state_attr_func=lambda coordinator: _torrents_info_attr( - coordinator=coordinator, key="active_torrents" + coordinator=coordinator, key="active" ), ), TransmissionSensorEntityDescription( @@ -92,7 +81,7 @@ class TransmissionSensorEntityDescription(SensorEntityDescription): translation_key="paused_torrents", val_func=lambda coordinator: coordinator.data.paused_torrent_count, extra_state_attr_func=lambda coordinator: _torrents_info_attr( - coordinator=coordinator, key="paused_torrents" + coordinator=coordinator, key="paused" ), ), TransmissionSensorEntityDescription( @@ -100,27 +89,27 @@ class TransmissionSensorEntityDescription(SensorEntityDescription): translation_key="total_torrents", val_func=lambda coordinator: coordinator.data.torrent_count, extra_state_attr_func=lambda coordinator: _torrents_info_attr( - coordinator=coordinator, key="total_torrents" + coordinator=coordinator, key="total" ), ), TransmissionSensorEntityDescription( key="completed_torrents", translation_key="completed_torrents", val_func=lambda coordinator: len( - _filter_torrents(coordinator.torrents, MODES["completed_torrents"]) + filter_torrents(coordinator.torrents, FILTER_MODES["completed"]) ), extra_state_attr_func=lambda coordinator: _torrents_info_attr( - coordinator=coordinator, key="completed_torrents" + coordinator=coordinator, key="completed" ), ), TransmissionSensorEntityDescription( key="started_torrents", translation_key="started_torrents", val_func=lambda coordinator: len( - _filter_torrents(coordinator.torrents, MODES["started_torrents"]) + filter_torrents(coordinator.torrents, FILTER_MODES["started"]) ), extra_state_attr_func=lambda coordinator: _torrents_info_attr( - coordinator=coordinator, key="started_torrents" + coordinator=coordinator, key="started" ), ), ) @@ -169,21 +158,11 @@ def get_state(upload: int, download: int) -> str: return STATE_IDLE -def _filter_torrents( - torrents: list[Torrent], statuses: list[str] | None = None -) -> list[Torrent]: - return [ - torrent - for torrent in torrents - if statuses is None or torrent.status in statuses - ] - - def _torrents_info_attr( coordinator: TransmissionDataUpdateCoordinator, key: str ) -> dict[str, Any]: infos = {} - torrents = _filter_torrents(coordinator.torrents, MODES[key]) + torrents = filter_torrents(coordinator.torrents, FILTER_MODES.get(key)) torrents = SUPPORTED_ORDER_MODES[coordinator.order](torrents) for torrent in torrents[: coordinator.limit]: info = infos[torrent.name] = { diff --git a/homeassistant/components/transmission/services.py b/homeassistant/components/transmission/services.py index ff03583e470f43..bcce2e10c15643 100644 --- a/homeassistant/components/transmission/services.py +++ b/homeassistant/components/transmission/services.py @@ -1,14 +1,16 @@ """Define services for the Transmission integration.""" +from enum import StrEnum from functools import partial import logging -from typing import cast +from typing import Any, cast +from transmission_rpc import Torrent import voluptuous as vol from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_ID -from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse, callback from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv, selector @@ -16,18 +18,34 @@ ATTR_DELETE_DATA, ATTR_DOWNLOAD_PATH, ATTR_TORRENT, + ATTR_TORRENT_FILTER, + ATTR_TORRENTS, CONF_ENTRY_ID, DEFAULT_DELETE_DATA, DOMAIN, + FILTER_MODES, SERVICE_ADD_TORRENT, + SERVICE_GET_TORRENTS, SERVICE_REMOVE_TORRENT, SERVICE_START_TORRENT, SERVICE_STOP_TORRENT, ) from .coordinator import TransmissionDataUpdateCoordinator +from .helpers import filter_torrents, format_torrents _LOGGER = logging.getLogger(__name__) + +class TorrentFilter(StrEnum): + """TorrentFilter model.""" + + ALL = "all" + STARTED = "started" + COMPLETED = "completed" + PAUSED = "paused" + ACTIVE = "active" + + SERVICE_BASE_SCHEMA = vol.Schema( { vol.Required(CONF_ENTRY_ID): selector.ConfigEntrySelector( @@ -45,6 +63,16 @@ ), ) +SERVICE_GET_TORRENTS_SCHEMA = vol.All( + SERVICE_BASE_SCHEMA.extend( + { + vol.Required(ATTR_TORRENT_FILTER): vol.In( + [x.lower() for x in TorrentFilter] + ), + } + ), +) + SERVICE_REMOVE_TORRENT_SCHEMA = vol.All( SERVICE_BASE_SCHEMA.extend( { @@ -111,6 +139,24 @@ async def _async_add_torrent(service: ServiceCall) -> None: await coordinator.async_request_refresh() +async def _async_get_torrents(service: ServiceCall) -> dict[str, Any] | None: + """Get torrents.""" + coordinator = _get_coordinator_from_service_data(service) + torrent_filter: str = service.data[ATTR_TORRENT_FILTER] + + def get_filtered_torrents() -> list[Torrent]: + """Filter torrents based on the filter provided.""" + all_torrents = coordinator.api.get_torrents() + return filter_torrents(all_torrents, FILTER_MODES[torrent_filter]) + + torrents = await service.hass.async_add_executor_job(get_filtered_torrents) + + info = format_torrents(torrents) + return { + ATTR_TORRENTS: info, + } + + async def _async_start_torrent(service: ServiceCall) -> None: """Start torrent.""" coordinator = _get_coordinator_from_service_data(service) @@ -149,6 +195,14 @@ def async_setup_services(hass: HomeAssistant) -> None: schema=SERVICE_ADD_TORRENT_SCHEMA, ) + hass.services.async_register( + DOMAIN, + SERVICE_GET_TORRENTS, + _async_get_torrents, + schema=SERVICE_GET_TORRENTS_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) + hass.services.async_register( DOMAIN, SERVICE_REMOVE_TORRENT, diff --git a/homeassistant/components/transmission/services.yaml b/homeassistant/components/transmission/services.yaml index cadfbee2f63830..3afc870337ee44 100644 --- a/homeassistant/components/transmission/services.yaml +++ b/homeassistant/components/transmission/services.yaml @@ -16,6 +16,27 @@ add_torrent: selector: text: +get_torrents: + fields: + entry_id: + required: true + selector: + config_entry: + integration: transmission + torrent_filter: + required: true + example: "all" + default: "all" + selector: + select: + options: + - "all" + - "active" + - "started" + - "paused" + - "completed" + translation_key: torrent_filter + remove_torrent: fields: entry_id: diff --git a/homeassistant/components/transmission/strings.json b/homeassistant/components/transmission/strings.json index 903f48885ea51c..6eeadb3dca285a 100644 --- a/homeassistant/components/transmission/strings.json +++ b/homeassistant/components/transmission/strings.json @@ -120,6 +120,15 @@ "oldest_first": "Oldest first", "worst_ratio_first": "Worst ratio first" } + }, + "torrent_filter": { + "options": { + "active": "Active", + "all": "All", + "completed": "Completed", + "paused": "Paused", + "started": "Started" + } } }, "services": { @@ -141,6 +150,20 @@ }, "name": "Add torrent" }, + "get_torrents": { + "description": "Get a list of current torrents", + "fields": { + "entry_id": { + "description": "[%key:component::transmission::services::add_torrent::fields::entry_id::description%]", + "name": "[%key:component::transmission::services::add_torrent::fields::entry_id::name%]" + }, + "torrent_filter": { + "description": "What kind of torrents you want to return, such as All or Active.", + "name": "Torrent filter" + } + }, + "name": "Get torrents" + }, "remove_torrent": { "description": "Removes a torrent.", "fields": { diff --git a/homeassistant/components/unifiprotect/media_source.py b/homeassistant/components/unifiprotect/media_source.py index 1e36b59d6419b9..704228a7bf7a11 100644 --- a/homeassistant/components/unifiprotect/media_source.py +++ b/homeassistant/components/unifiprotect/media_source.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +from calendar import monthrange from datetime import date, datetime, timedelta from enum import Enum from typing import Any, NoReturn, cast @@ -94,11 +95,12 @@ async def async_get_media_source(hass: HomeAssistant) -> MediaSource: @callback def _get_month_start_end(start: datetime) -> tuple[datetime, datetime]: + """Get the first day of the month for start and current time.""" start = dt_util.as_local(start) end = dt_util.now() - start = start.replace(day=1, hour=0, minute=0, second=1, microsecond=0) - end = end.replace(day=1, hour=0, minute=0, second=2, microsecond=0) + start = start.replace(day=1, hour=0, minute=0, second=0, microsecond=0) + end = end.replace(day=1, hour=0, minute=0, second=0, microsecond=0) return start, end @@ -113,20 +115,19 @@ def _bad_identifier(identifier: str, err: Exception | None = None) -> NoReturn: @callback def _format_duration(duration: timedelta) -> str: - formatted = "" seconds = int(duration.total_seconds()) - if seconds > 3600: - hours = seconds // 3600 - formatted += f"{hours}h " - seconds -= hours * 3600 - if seconds > 60: - minutes = seconds // 60 - formatted += f"{minutes}m " - seconds -= minutes * 60 + hours, seconds = divmod(seconds, 3600) + minutes, seconds = divmod(seconds, 60) + + parts = [] + if hours > 0: + parts.append(f"{hours}h") + if minutes > 0: + parts.append(f"{minutes}m") if seconds > 0: - formatted += f"{seconds}s " + parts.append(f"{seconds}s") - return formatted.strip() + return " ".join(parts) if parts else "0s" @callback @@ -593,7 +594,8 @@ async def _build_month( start = max(recording_start, start) recording_end = dt_util.now().date() - end = start.replace(month=start.month + 1) - timedelta(days=1) + + end = start.replace(day=monthrange(start.year, start.month)[1]) end = min(recording_end, end) children = [self._build_days(data, camera_id, event_type, start, is_all=True)] @@ -660,10 +662,9 @@ async def _build_days( tzinfo=dt_util.get_default_time_zone(), ) if is_all: - if start_dt.month < 12: - end_dt = start_dt.replace(month=start_dt.month + 1) - else: - end_dt = start_dt.replace(year=start_dt.year + 1, month=1) + # Move to first day of next month + days_in_month = monthrange(start_dt.year, start_dt.month)[1] + end_dt = start_dt + timedelta(days=days_in_month) else: end_dt = start_dt + timedelta(hours=24) @@ -726,7 +727,7 @@ async def _build_events_type( ] start, end = _get_month_start_end(data.api.bootstrap.recording_start) - while end > start: + while end >= start: children.append(self._build_month(data, camera_id, event_type, end.date())) end = (end - timedelta(days=1)).replace(day=1) diff --git a/homeassistant/components/vesync/__init__.py b/homeassistant/components/vesync/__init__.py index ece7a2d7ad0e0f..133713b33524d1 100644 --- a/homeassistant/components/vesync/__init__.py +++ b/homeassistant/components/vesync/__init__.py @@ -7,15 +7,18 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import DeviceEntry -from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN, SERVICE_UPDATE_DEVS, VS_COORDINATOR, VS_MANAGER +from .const import DOMAIN, VS_COORDINATOR, VS_MANAGER from .coordinator import VeSyncDataCoordinator +from .services import async_setup_services + +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) PLATFORMS = [ Platform.BINARY_SENSOR, @@ -32,6 +35,14 @@ _LOGGER = logging.getLogger(__name__) +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up my integration.""" + + async_setup_services(hass) + + return True + + async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up Vesync as config entry.""" username = config_entry.data[CONF_USERNAME] @@ -62,22 +73,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) - async def async_new_device_discovery(service: ServiceCall) -> None: - """Discover and add new devices.""" - manager = hass.data[DOMAIN][VS_MANAGER] - known_devices = list(manager.devices) - await manager.get_devices() - new_devices = [ - device for device in manager.devices if device not in known_devices - ] - - if new_devices: - async_dispatcher_send(hass, "vesync_new_devices", new_devices) - - hass.services.async_register( - DOMAIN, SERVICE_UPDATE_DEVS, async_new_device_discovery - ) - return True diff --git a/homeassistant/components/vesync/services.py b/homeassistant/components/vesync/services.py new file mode 100644 index 00000000000000..c1a9bc5e638782 --- /dev/null +++ b/homeassistant/components/vesync/services.py @@ -0,0 +1,36 @@ +"""Support for VeSync Services.""" + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.dispatcher import async_dispatcher_send + +from .const import DOMAIN, SERVICE_UPDATE_DEVS, VS_DEVICES, VS_DISCOVERY, VS_MANAGER + + +@callback +def async_setup_services(hass: HomeAssistant) -> None: + """Handle for services.""" + + hass.services.async_register( + DOMAIN, SERVICE_UPDATE_DEVS, async_new_device_discovery + ) + + +async def async_new_device_discovery(call: ServiceCall) -> None: + """Discover and add new devices.""" + + entries = call.hass.config_entries.async_entries(DOMAIN) + entry = entries[0] if entries else None + + if not entry: + raise ServiceValidationError("Entry not found") + if entry.state is not ConfigEntryState.LOADED: + raise ServiceValidationError("Entry not loaded") + manager = call.hass.data[DOMAIN][VS_MANAGER] + known_devices = list(manager.devices) + await manager.get_devices() + new_devices = [device for device in manager.devices if device not in known_devices] + + if new_devices: + async_dispatcher_send(call.hass, VS_DISCOVERY.format(VS_DEVICES), new_devices) diff --git a/homeassistant/components/webdav/__init__.py b/homeassistant/components/webdav/__init__.py index 36a03dce4d7841..62a9ac76240aec 100644 --- a/homeassistant/components/webdav/__init__.py +++ b/homeassistant/components/webdav/__init__.py @@ -13,11 +13,7 @@ from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady from .const import CONF_BACKUP_PATH, DATA_BACKUP_AGENT_LISTENERS, DOMAIN -from .helpers import ( - async_create_client, - async_ensure_path_exists, - async_migrate_wrong_folder_path, -) +from .helpers import async_create_client, async_ensure_path_exists type WebDavConfigEntry = ConfigEntry[Client] @@ -51,7 +47,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: WebDavConfigEntry) -> bo ) path = entry.data.get(CONF_BACKUP_PATH, "/") - await async_migrate_wrong_folder_path(client, path) # Ensure the backup directory exists if not await async_ensure_path_exists(client, path): diff --git a/homeassistant/components/webdav/helpers.py b/homeassistant/components/webdav/helpers.py index 442f69b4d3c74e..7771439e46ecc6 100644 --- a/homeassistant/components/webdav/helpers.py +++ b/homeassistant/components/webdav/helpers.py @@ -3,14 +3,10 @@ import logging from aiowebdav2.client import Client, ClientOptions -from aiowebdav2.exceptions import WebDavError from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN - _LOGGER = logging.getLogger(__name__) @@ -44,25 +40,3 @@ async def async_ensure_path_exists(client: Client, path: str) -> bool: return False return True - - -async def async_migrate_wrong_folder_path(client: Client, path: str) -> None: - """Migrate the wrong encoded folder path to the correct one.""" - wrong_path = path.replace(" ", "%20") - # migrate folder when the old folder exists - if wrong_path != path and await client.check(wrong_path): - try: - await client.move(wrong_path, path) - except WebDavError as err: - raise ConfigEntryNotReady( - translation_domain=DOMAIN, - translation_key="failed_to_migrate_folder", - translation_placeholders={ - "wrong_path": wrong_path, - "correct_path": path, - }, - ) from err - - _LOGGER.debug( - "Migrated wrong encoded folder path from %s to %s", wrong_path, path - ) diff --git a/homeassistant/components/webdav/strings.json b/homeassistant/components/webdav/strings.json index cb5bf060e607e7..0f530f3ce7789a 100644 --- a/homeassistant/components/webdav/strings.json +++ b/homeassistant/components/webdav/strings.json @@ -33,9 +33,6 @@ "cannot_connect": { "message": "Cannot connect to WebDAV server" }, - "failed_to_migrate_folder": { - "message": "Failed to migrate wrong encoded folder \"{wrong_path}\" to \"{correct_path}\"." - }, "invalid_username_password": { "message": "Invalid username or password" } diff --git a/homeassistant/components/wled/__init__.py b/homeassistant/components/wled/__init__.py index a854254a262e70..945b68a74cf66e 100644 --- a/homeassistant/components/wled/__init__.py +++ b/homeassistant/components/wled/__init__.py @@ -2,6 +2,11 @@ from __future__ import annotations +import asyncio +import logging +from typing import TYPE_CHECKING + +from homeassistant.config_entries import SOURCE_IGNORE from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv @@ -13,8 +18,11 @@ WLEDConfigEntry, WLEDDataUpdateCoordinator, WLEDReleasesDataUpdateCoordinator, + normalize_mac_address, ) +_LOGGER = logging.getLogger(__name__) + PLATFORMS = ( Platform.BUTTON, Platform.LIGHT, @@ -63,3 +71,69 @@ async def async_unload_entry(hass: HomeAssistant, entry: WLEDConfigEntry) -> boo coordinator.unsub() return unload_ok + + +async def async_migrate_entry( + hass: HomeAssistant, config_entry: WLEDConfigEntry +) -> bool: + """Migrate old entry.""" + _LOGGER.debug( + "Migrating configuration from version %s.%s", + config_entry.version, + config_entry.minor_version, + ) + + if config_entry.version > 1: + # The user has downgraded from a future version + return False + + if config_entry.version == 1: + if config_entry.minor_version < 2: + # 1.2: Normalize unique ID to be lowercase MAC address without separators. + # This matches the format used by WLED firmware. + if TYPE_CHECKING: + assert config_entry.unique_id + normalized_mac_address = normalize_mac_address(config_entry.unique_id) + duplicate_entries = [ + entry + for entry in hass.config_entries.async_entries(DOMAIN) + if entry.unique_id + and normalize_mac_address(entry.unique_id) == normalized_mac_address + ] + ignored_entries = [ + entry + for entry in duplicate_entries + if entry.entry_id != config_entry.entry_id + and entry.source == SOURCE_IGNORE + ] + if ignored_entries: + _LOGGER.info( + "Found %d ignored WLED config entries with the same MAC address, removing them", + len(ignored_entries), + ) + await asyncio.gather( + *[ + hass.config_entries.async_remove(entry.entry_id) + for entry in ignored_entries + ] + ) + if len(duplicate_entries) - len(ignored_entries) > 1: + _LOGGER.warning( + "Found multiple WLED config entries with the same MAC address, cannot migrate to version 1.2" + ) + return False + + hass.config_entries.async_update_entry( + config_entry, + unique_id=normalized_mac_address, + version=1, + minor_version=2, + ) + + _LOGGER.debug( + "Migration to configuration version %s.%s successful", + config_entry.version, + config_entry.minor_version, + ) + + return True diff --git a/homeassistant/components/wled/config_flow.py b/homeassistant/components/wled/config_flow.py index d7aab20583e75b..2ea9b3d4891743 100644 --- a/homeassistant/components/wled/config_flow.py +++ b/homeassistant/components/wled/config_flow.py @@ -22,7 +22,7 @@ from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo from .const import CONF_KEEP_MAIN_LIGHT, DEFAULT_KEEP_MAIN_LIGHT, DOMAIN -from .coordinator import WLEDConfigEntry +from .coordinator import WLEDConfigEntry, normalize_mac_address def _normalize_host(host: str) -> str: @@ -38,6 +38,7 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a WLED config flow.""" VERSION = 1 + MINOR_VERSION = 2 discovered_host: str discovered_device: Device @@ -64,16 +65,15 @@ async def async_step_user( except WLEDConnectionError: errors["base"] = "cannot_connect" else: - await self.async_set_unique_id( - device.info.mac_address, raise_on_progress=False - ) + mac_address = normalize_mac_address(device.info.mac_address) + await self.async_set_unique_id(mac_address, raise_on_progress=False) if self.source == SOURCE_RECONFIGURE: entry = self._get_reconfigure_entry() self._abort_if_unique_id_mismatch( reason="unique_id_mismatch", description_placeholders={ "expected_mac": format_mac(entry.unique_id).upper(), - "actual_mac": format_mac(self.unique_id).upper(), + "actual_mac": mac_address.upper(), }, ) return self.async_update_reload_and_abort( @@ -111,7 +111,7 @@ async def async_step_zeroconf( """Handle zeroconf discovery.""" # Abort quick if the mac address is provided by discovery info if mac := discovery_info.properties.get(CONF_MAC): - await self.async_set_unique_id(mac) + await self.async_set_unique_id(normalize_mac_address(mac)) self._abort_if_unique_id_configured( updates={CONF_HOST: discovery_info.host} ) @@ -124,7 +124,10 @@ async def async_step_zeroconf( except WLEDConnectionError: return self.async_abort(reason="cannot_connect") - await self.async_set_unique_id(self.discovered_device.info.mac_address) + device_mac_address = normalize_mac_address( + self.discovered_device.info.mac_address + ) + await self.async_set_unique_id(device_mac_address) self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.host}) self.context.update( diff --git a/homeassistant/components/wled/coordinator.py b/homeassistant/components/wled/coordinator.py index af3d1f583cb4dd..eb876985c57412 100644 --- a/homeassistant/components/wled/coordinator.py +++ b/homeassistant/components/wled/coordinator.py @@ -2,6 +2,8 @@ from __future__ import annotations +from typing import TYPE_CHECKING + from wled import ( WLED, Device as WLEDDevice, @@ -32,6 +34,17 @@ type WLEDConfigEntry = ConfigEntry[WLEDDataUpdateCoordinator] +def normalize_mac_address(mac: str) -> str: + """Normalize a MAC address to lowercase without separators. + + This format is used by WLED firmware as well as unique IDs in Home Assistant. + + The homeassistant.helpers.device_registry.format_mac function is preferred but + returns MAC addresses with colons as separators. + """ + return mac.lower().replace(":", "").replace(".", "").replace("-", "").strip() + + class WLEDDataUpdateCoordinator(DataUpdateCoordinator[WLEDDevice]): """Class to manage fetching WLED data from single endpoint.""" @@ -51,6 +64,10 @@ def __init__( self.wled = WLED(entry.data[CONF_HOST], session=async_get_clientsession(hass)) self.unsub: CALLBACK_TYPE | None = None + if TYPE_CHECKING: + assert entry.unique_id + self.config_mac_address = normalize_mac_address(entry.unique_id) + super().__init__( hass, LOGGER, @@ -131,13 +148,14 @@ async def _async_update_data(self) -> WLEDDevice: translation_placeholders={"error": str(error)}, ) from error - if device.info.mac_address != self.config_entry.unique_id: + device_mac_address = normalize_mac_address(device.info.mac_address) + if device_mac_address != self.config_mac_address: raise ConfigEntryError( translation_domain=DOMAIN, translation_key="mac_address_mismatch", translation_placeholders={ - "expected_mac": format_mac(self.config_entry.unique_id).upper(), - "actual_mac": format_mac(device.info.mac_address).upper(), + "expected_mac": format_mac(self.config_mac_address).upper(), + "actual_mac": format_mac(device_mac_address).upper(), }, ) diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 0aa3b8869e32f8..8252e66b627362 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -220,6 +220,7 @@ "flipr", "flo", "flume", + "fluss", "flux_led", "folder_watcher", "forecast_solar", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 24b0d133e5651d..8660db5f8c4f79 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -2092,6 +2092,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "fluss": { + "name": "Fluss+", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "flux": { "name": "Flux", "integration_type": "hub", diff --git a/homeassistant/requirements.py b/homeassistant/requirements.py index 4de5fed5a73977..6023ed7a4e630a 100644 --- a/homeassistant/requirements.py +++ b/homeassistant/requirements.py @@ -14,7 +14,12 @@ from .core import HomeAssistant, callback from .exceptions import HomeAssistantError from .helpers import singleton -from .loader import Integration, IntegrationNotFound, async_get_integration +from .loader import ( + Integration, + IntegrationNotFound, + async_get_integration, + async_suggest_report_issue, +) from .util import package as pkg_util # The default is too low when the internet connection is satellite or high latency @@ -28,6 +33,10 @@ "ssdp": ("ssdp",), "zeroconf": ("zeroconf", "homekit"), } +DEPRECATED_PACKAGES: dict[str, tuple[str, str]] = { + # old_package_name: (reason, breaks_in_ha_version) + "pyserial-asyncio": ("should be replaced by pyserial-asyncio-fast", "2026.7"), +} _LOGGER = logging.getLogger(__name__) @@ -55,14 +64,16 @@ async def async_get_integration_with_requirements( async def async_process_requirements( - hass: HomeAssistant, name: str, requirements: list[str] + hass: HomeAssistant, name: str, requirements: list[str], is_built_in: bool = True ) -> None: """Install the requirements for a component or platform. This method is a coroutine. It will raise RequirementsNotFound if an requirement can't be satisfied. """ - await _async_get_manager(hass).async_process_requirements(name, requirements) + await _async_get_manager(hass).async_process_requirements( + name, requirements, is_built_in + ) async def async_load_installed_versions( @@ -180,7 +191,7 @@ async def _async_process_integration( """Process an integration and requirements.""" if integration.requirements: await self.async_process_requirements( - integration.domain, integration.requirements + integration.domain, integration.requirements, integration.is_built_in ) cache = self.integrations_with_reqs @@ -240,24 +251,46 @@ async def _async_process_integration( raise exceptions[0] async def async_process_requirements( - self, name: str, requirements: list[str] + self, name: str, requirements: list[str], is_built_in: bool ) -> None: """Install the requirements for a component or platform. This method is a coroutine. It will raise RequirementsNotFound if an requirement can't be satisfied. """ - if self.hass.config.skip_pip_packages: - skipped_requirements = { - req - for req in requirements - if Requirement(req).name in self.hass.config.skip_pip_packages + if DEPRECATED_PACKAGES or self.hass.config.skip_pip_packages: + all_requirements = { + requirement_string: Requirement(requirement_string) + for requirement_string in requirements } - - for req in skipped_requirements: - _LOGGER.warning("Skipping requirement %s. This may cause issues", req) - - requirements = [r for r in requirements if r not in skipped_requirements] + if DEPRECATED_PACKAGES: + for requirement_string, requirement_details in all_requirements.items(): + if deprecation := DEPRECATED_PACKAGES.get(requirement_details.name): + reason, breaks_in_ha_version = deprecation + _LOGGER.warning( + "Detected that %sintegration '%s' %s. %s %s", + "" if is_built_in else "custom ", + name, + f"has requirement '{requirement_string}' which {reason}", + f"This will stop working in Home Assistant {breaks_in_ha_version}, please" + if breaks_in_ha_version + else "Please", + async_suggest_report_issue( + self.hass, integration_domain=name + ), + ) + if skip_pip_packages := self.hass.config.skip_pip_packages: + skipped_requirements: set[str] = set() + for requirement_string, requirement_details in all_requirements.items(): + if requirement_details.name in skip_pip_packages: + _LOGGER.warning( + "Skipping requirement %s. This may cause issues", + requirement_string, + ) + skipped_requirements.add(requirement_string) + requirements = [ + r for r in requirements if r not in skipped_requirements + ] if not (missing := self._find_missing_requirements(requirements)): return diff --git a/homeassistant/util/unit_conversion.py b/homeassistant/util/unit_conversion.py index d1c77a89170608..68b27b98cd8d30 100644 --- a/homeassistant/util/unit_conversion.py +++ b/homeassistant/util/unit_conversion.py @@ -155,7 +155,11 @@ def converter_factory_allow_none( return lambda value: value from_ratio, to_ratio = cls._get_from_to_ratio(from_unit, to_unit) if cls._are_unit_inverses(from_unit, to_unit): - return lambda val: None if val is None else to_ratio / (val / from_ratio) + return ( + lambda val: None + if val is None or val == 0 + else to_ratio / (val / from_ratio) + ) return lambda val: None if val is None else (val / from_ratio) * to_ratio @classmethod diff --git a/requirements_all.txt b/requirements_all.txt index eddaf11541f989..ad16bcad149521 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -985,6 +985,9 @@ flexit_bacnet==2.2.3 # homeassistant.components.flipr flipr-api==1.6.1 +# homeassistant.components.fluss +fluss-api==0.1.9.20 + # homeassistant.components.flux_led flux-led==1.2.0 @@ -1388,7 +1391,7 @@ libpyfoscamcgi==0.0.9 libpyvivotek==0.6.1 # homeassistant.components.libre_hardware_monitor -librehardwaremonitor-api==1.5.0 +librehardwaremonitor-api==1.6.0 # homeassistant.components.mikrotik librouteros==3.2.0 @@ -2732,7 +2735,7 @@ renault-api==0.5.2 renson-endura-delta==1.7.2 # homeassistant.components.reolink -reolink-aio==0.17.1 +reolink-aio==0.18.0 # homeassistant.components.idteck_prox rfk101py==0.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index bd3b464b8505f2..575e45611b7140 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -870,6 +870,9 @@ flexit_bacnet==2.2.3 # homeassistant.components.flipr flipr-api==1.6.1 +# homeassistant.components.fluss +fluss-api==0.1.9.20 + # homeassistant.components.flux_led flux-led==1.2.0 @@ -1219,7 +1222,7 @@ libpyfoscamcgi==0.0.9 libpyvivotek==0.6.1 # homeassistant.components.libre_hardware_monitor -librehardwaremonitor-api==1.5.0 +librehardwaremonitor-api==1.6.0 # homeassistant.components.mikrotik librouteros==3.2.0 @@ -2292,7 +2295,7 @@ renault-api==0.5.2 renson-endura-delta==1.7.2 # homeassistant.components.reolink -reolink-aio==0.17.1 +reolink-aio==0.18.0 # homeassistant.components.rflink rflink==0.0.67 diff --git a/tests/components/anglian_water/conftest.py b/tests/components/anglian_water/conftest.py index f206727ad4abfb..a5106f47791d11 100644 --- a/tests/components/anglian_water/conftest.py +++ b/tests/components/anglian_water/conftest.py @@ -1,17 +1,19 @@ """Common fixtures for the Anglian Water tests.""" -from collections.abc import Generator +from collections.abc import AsyncGenerator, Generator from unittest.mock import AsyncMock, MagicMock, patch +from pyanglianwater.api import API from pyanglianwater.meter import SmartMeter import pytest from homeassistant.components.anglian_water.const import CONF_ACCOUNT_NUMBER, DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant from .const import ACCESS_TOKEN, ACCOUNT_NUMBER, PASSWORD, USERNAME -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_load_json_object_fixture @pytest.fixture @@ -67,9 +69,11 @@ def mock_anglian_water_authenticator() -> Generator[MagicMock]: @pytest.fixture -def mock_anglian_water_client( - mock_smart_meter: SmartMeter, mock_anglian_water_authenticator: MagicMock -) -> Generator[AsyncMock]: +async def mock_anglian_water_client( + hass: HomeAssistant, + mock_smart_meter: SmartMeter, + mock_anglian_water_authenticator: MagicMock, +) -> AsyncGenerator[AsyncMock]: """Mock a Anglian Water client.""" # Create a mock instance with our meters and config first. with ( @@ -86,6 +90,12 @@ def mock_anglian_water_client( mock_client.account_config = {"meter_type": "SmartMeter"} mock_client.updated_data_callbacks = [] mock_client.validate_smart_meter.return_value = None + mock_client.api = AsyncMock(spec=API) + mock_client.api.get_associated_accounts.return_value = ( + await async_load_json_object_fixture( + hass, "multi_associated_accounts.json", DOMAIN + ) + ) yield mock_client diff --git a/tests/components/anglian_water/const.py b/tests/components/anglian_water/const.py index 399e7354753de0..b6a5bbfdb7c4b2 100644 --- a/tests/components/anglian_water/const.py +++ b/tests/components/anglian_water/const.py @@ -1,6 +1,6 @@ """Constants for the Anglian Water test suite.""" -ACCOUNT_NUMBER = "12345678" +ACCOUNT_NUMBER = "171266493" ACCESS_TOKEN = "valid_token" USERNAME = "hello@example.com" PASSWORD = "SecurePassword123" diff --git a/tests/components/anglian_water/fixtures/multi_associated_accounts.json b/tests/components/anglian_water/fixtures/multi_associated_accounts.json new file mode 100644 index 00000000000000..2d079c9c1e932f --- /dev/null +++ b/tests/components/anglian_water/fixtures/multi_associated_accounts.json @@ -0,0 +1,65 @@ +{ + "result": { + "property_count": 4, + "active": [ + { + "business_partner_number": 906922831, + "account_number": 171266493, + "address": { + "company_name": "", + "building_name": "", + "sub_building_name": "", + "house_number": "10", + "street": "DOWNING STREET", + "locality": "", + "city": "LONDON", + "postcode": "SW1A 1AA" + } + }, + { + "business_partner_number": 906922832, + "account_number": 171266494, + "address": { + "company_name": "", + "building_name": "Historic Building A", + "sub_building_name": "", + "house_number": "10", + "street": "DOWNING STREET", + "locality": "", + "city": "LONDON", + "postcode": "SW1A 1AA" + } + }, + { + "business_partner_number": 906922832, + "account_number": 171266494, + "address": { + "company_name": "UK Government", + "building_name": "", + "sub_building_name": "", + "house_number": "10", + "street": "DOWNING STREET", + "locality": "", + "city": "LONDON", + "postcode": "SW1A 1AA" + } + } + ], + "inactive": [ + { + "business_partner_number": 100000000, + "account_number": 171200000, + "address": { + "company_name": "", + "building_name": "Finance Office", + "sub_building_name": "Heritage Wing", + "house_number": "50", + "street": "DOWNING STREET", + "locality": "", + "city": "LONDON", + "postcode": "SW1A 1AA" + } + } + ] + } +} diff --git a/tests/components/anglian_water/fixtures/single_associated_accounts.json b/tests/components/anglian_water/fixtures/single_associated_accounts.json new file mode 100644 index 00000000000000..04c2033767f7b9 --- /dev/null +++ b/tests/components/anglian_water/fixtures/single_associated_accounts.json @@ -0,0 +1,37 @@ +{ + "result": { + "property_count": 1, + "active": [ + { + "business_partner_number": 906922831, + "account_number": 171266493, + "address": { + "company_name": "", + "building_name": "", + "sub_building_name": "", + "house_number": "10", + "street": "DOWNING STREET", + "locality": "", + "city": "LONDON", + "postcode": "SW1A 1AA" + } + } + ], + "inactive": [ + { + "business_partner_number": 100000000, + "account_number": 171200000, + "address": { + "company_name": "", + "building_name": "Finance Office", + "sub_building_name": "Heritage Wing", + "house_number": "50", + "street": "DOWNING STREET", + "locality": "", + "city": "LONDON", + "postcode": "SW1A 1AA" + } + } + ] + } +} diff --git a/tests/components/anglian_water/test_config_flow.py b/tests/components/anglian_water/test_config_flow.py index d577a35880ad3a..8ce8710bf8b114 100644 --- a/tests/components/anglian_water/test_config_flow.py +++ b/tests/components/anglian_water/test_config_flow.py @@ -18,16 +18,16 @@ from .const import ACCESS_TOKEN, ACCOUNT_NUMBER, PASSWORD, USERNAME -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_load_json_object_fixture -async def test_full_flow( +async def test_multiple_account_flow( hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_anglian_water_authenticator: AsyncMock, mock_anglian_water_client: AsyncMock, ) -> None: - """Test a full and successful config flow.""" + """Test the config flow when there are multiple accounts.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) @@ -40,6 +40,15 @@ async def test_full_flow( user_input={ CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "select_account" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ CONF_ACCOUNT_NUMBER: ACCOUNT_NUMBER, }, ) @@ -53,6 +62,43 @@ async def test_full_flow( assert result["result"].unique_id == ACCOUNT_NUMBER +async def test_single_account_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_anglian_water_authenticator: AsyncMock, + mock_anglian_water_client: AsyncMock, +) -> None: + """Test the config flow when there is just a single account.""" + mock_anglian_water_client.api.get_associated_accounts.return_value = ( + await async_load_json_object_fixture( + hass, "single_associated_accounts.json", DOMAIN + ) + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result is not None + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: USERNAME, + CONF_PASSWORD: PASSWORD, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == ACCOUNT_NUMBER + assert result["data"][CONF_USERNAME] == USERNAME + assert result["data"][CONF_PASSWORD] == PASSWORD + assert result["data"][CONF_ACCESS_TOKEN] == ACCESS_TOKEN + assert result["data"][CONF_ACCOUNT_NUMBER] == ACCOUNT_NUMBER + assert result["result"].unique_id == ACCOUNT_NUMBER + + async def test_already_configured( hass: HomeAssistant, mock_setup_entry: AsyncMock, @@ -75,6 +121,15 @@ async def test_already_configured( user_input={ CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "select_account" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ CONF_ACCOUNT_NUMBER: ACCOUNT_NUMBER, }, ) @@ -109,7 +164,6 @@ async def test_auth_recover_exception( user_input={ CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, - CONF_ACCOUNT_NUMBER: ACCOUNT_NUMBER, }, ) @@ -126,6 +180,15 @@ async def test_auth_recover_exception( user_input={ CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "select_account" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ CONF_ACCOUNT_NUMBER: ACCOUNT_NUMBER, }, ) @@ -161,19 +224,28 @@ async def test_account_recover_exception( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - mock_anglian_water_client.validate_smart_meter.side_effect = exception_type - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, + }, + ) + + mock_anglian_water_client.validate_smart_meter.side_effect = exception_type + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "select_account" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ CONF_ACCOUNT_NUMBER: ACCOUNT_NUMBER, }, ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" + assert result["step_id"] == "select_account" assert result["errors"] == {"base": expected_error} # Now test we can recover @@ -183,8 +255,6 @@ async def test_account_recover_exception( result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ - CONF_USERNAME: USERNAME, - CONF_PASSWORD: PASSWORD, CONF_ACCOUNT_NUMBER: ACCOUNT_NUMBER, }, ) diff --git a/tests/components/bluesound/snapshots/test_media_player.ambr b/tests/components/bluesound/snapshots/test_media_player.ambr index 24e04160e90c9e..73ae06945a8ed6 100644 --- a/tests/components/bluesound/snapshots/test_media_player.ambr +++ b/tests/components/bluesound/snapshots/test_media_player.ambr @@ -3,6 +3,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'player-name1111', + 'group_members': None, 'is_volume_muted': False, 'master': False, 'media_album_name': 'album', @@ -19,7 +20,7 @@ 'input3', '4', ]), - 'supported_features': , + 'supported_features': , 'volume_level': 0.1, }), 'context': , diff --git a/tests/components/bluesound/test_media_player.py b/tests/components/bluesound/test_media_player.py index b534c7aafb0834..a68957e01a6269 100644 --- a/tests/components/bluesound/test_media_player.py +++ b/tests/components/bluesound/test_media_player.py @@ -13,18 +13,20 @@ from homeassistant.components.bluesound.const import ATTR_MASTER from homeassistant.components.bluesound.media_player import ( SERVICE_CLEAR_TIMER, - SERVICE_JOIN, SERVICE_SET_TIMER, ) from homeassistant.components.media_player import ( + ATTR_GROUP_MEMBERS, ATTR_INPUT_SOURCE, ATTR_MEDIA_VOLUME_LEVEL, DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, SERVICE_MEDIA_NEXT_TRACK, SERVICE_MEDIA_PAUSE, SERVICE_MEDIA_PLAY, SERVICE_MEDIA_PREVIOUS_TRACK, SERVICE_SELECT_SOURCE, + SERVICE_UNJOIN, SERVICE_VOLUME_DOWN, SERVICE_VOLUME_MUTE, SERVICE_VOLUME_SET, @@ -291,7 +293,7 @@ async def test_join( setup_config_entry_secondary: None, player_mocks: PlayerMocks, ) -> None: - """Test the join action.""" + """Test the bluesound.join action.""" await hass.services.async_call( DOMAIN, SERVICE_JOIN, @@ -313,7 +315,7 @@ async def test_unjoin( setup_config_entry_secondary: None, player_mocks: PlayerMocks, ) -> None: - """Test the unjoin action.""" + """Test the bluesound.unjoin action.""" updated_sync_status = dataclasses.replace( player_mocks.player_data.sync_status_long_polling_mock.get(), leader=PairedPlayer("2.2.2.2", 11000), @@ -455,3 +457,100 @@ async def test_volume_up_from_6_to_7( ) player_mocks.player_data.player.volume.assert_called_once_with(level=7) + + +async def test_attr_group_members( + hass: HomeAssistant, + setup_config_entry: None, + setup_config_entry_secondary: None, + player_mocks: PlayerMocks, +) -> None: + """Test the media player grouping for leader.""" + attr_group_members = hass.states.get("media_player.player_name1111").attributes.get( + ATTR_GROUP_MEMBERS + ) + assert attr_group_members is None + + updated_sync_status = dataclasses.replace( + player_mocks.player_data.sync_status_long_polling_mock.get(), + followers=[PairedPlayer("2.2.2.2", 11000)], + ) + player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + attr_group_members = hass.states.get("media_player.player_name1111").attributes.get( + ATTR_GROUP_MEMBERS + ) + + assert attr_group_members == [ + "media_player.player_name1111", + "media_player.player_name2222", + ] + + +async def test_join_players( + hass: HomeAssistant, + setup_config_entry: None, + setup_config_entry_secondary: None, + player_mocks: PlayerMocks, +) -> None: + """Test the media_player.join action.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: "media_player.player_name1111", + ATTR_GROUP_MEMBERS: "media_player.player_name2222", + }, + blocking=True, + ) + + player_mocks.player_data.player.add_followers.assert_called_once_with( + [PairedPlayer("2.2.2.2", 11000)] + ) + + +async def test_join_player_cannot_join_to_self( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test that joining to self is not allowed.""" + with pytest.raises(ServiceValidationError, match="Cannot join player to itself"): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: "media_player.player_name1111", + ATTR_GROUP_MEMBERS: "media_player.player_name1111", + }, + blocking=True, + ) + + +async def test_unjoin_player( + hass: HomeAssistant, + setup_config_entry: None, + setup_config_entry_secondary: None, + player_mocks: PlayerMocks, +) -> None: + """Test the media_player.unjoin action.""" + updated_sync_status = dataclasses.replace( + player_mocks.player_data.sync_status_long_polling_mock.get(), + leader=PairedPlayer("2.2.2.2", 11000), + ) + player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_UNJOIN, + {ATTR_ENTITY_ID: "media_player.player_name1111"}, + blocking=True, + ) + + player_mocks.player_data_secondary.player.remove_follower.assert_called_once_with( + "1.1.1.1", 11000 + ) diff --git a/tests/components/conftest.py b/tests/components/conftest.py index bcefe2d47c5401..7dff435dc9be1a 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -811,6 +811,7 @@ async def _check_config_flow_result_translations( return key_prefix = "" + description_placeholders = result.get("description_placeholders") if isinstance(manager, ConfigEntriesFlowManager): category = "config" integration = flow.handler @@ -823,6 +824,12 @@ async def _check_config_flow_result_translations( issue_id = flow.issue_id issue = ir.async_get(flow.hass).async_get_issue(integration, issue_id) key_prefix = f"{issue.translation_key}.fix_flow." + description_placeholders = { + # Both are used in issue translations, and description_placeholders + # takes precedence over translation_placeholders + **(issue.translation_placeholders or {}), + **(description_placeholders or {}), + } else: return @@ -838,7 +845,7 @@ async def _check_config_flow_result_translations( category, integration, f"{key_prefix}step.{step_id}", - result["description_placeholders"], + description_placeholders, result["data_schema"], ignore_translations_for_mock_domains, ) @@ -852,7 +859,7 @@ async def _check_config_flow_result_translations( category, integration, f"{key_prefix}error.{error}", - result["description_placeholders"], + description_placeholders, ) return @@ -868,7 +875,7 @@ async def _check_config_flow_result_translations( category, integration, f"{key_prefix}abort.{result['reason']}", - result["description_placeholders"], + description_placeholders, ) diff --git a/tests/components/esphome/test_repairs.py b/tests/components/esphome/test_repairs.py index f64cb80695065a..7b83ed894bbb34 100644 --- a/tests/components/esphome/test_repairs.py +++ b/tests/components/esphome/test_repairs.py @@ -80,13 +80,6 @@ async def async_disconnect(*args, **kwargs) -> None: data = await start_repair_fix_flow(client, DOMAIN, issue_id) flow_id = data["flow_id"] - assert data["description_placeholders"] == { - "ip": "192.168.1.2", - "mac": "11:22:33:44:55:ab", - "model": "esp32-iso-poe", - "name": "test", - "stored_mac": "11:22:33:44:55:aa", - } assert data["type"] == FlowResultType.MENU assert data["step_id"] == "init" @@ -95,13 +88,6 @@ async def async_disconnect(*args, **kwargs) -> None: ) flow_id = data["flow_id"] - assert data["description_placeholders"] == { - "ip": "192.168.1.2", - "mac": "11:22:33:44:55:ab", - "model": "esp32-iso-poe", - "name": "test", - "stored_mac": "11:22:33:44:55:aa", - } assert data["type"] == FlowResultType.FORM assert data["step_id"] == "manual" @@ -198,13 +184,6 @@ async def async_disconnect(*args, **kwargs) -> None: data = await start_repair_fix_flow(client, DOMAIN, issue_id) flow_id = data["flow_id"] - assert data["description_placeholders"] == { - "ip": "test.local", - "mac": "11:22:33:44:55:ab", - "model": "esp32-iso-poe", - "name": "test", - "stored_mac": "11:22:33:44:55:aa", - } assert data["type"] == FlowResultType.MENU assert data["step_id"] == "init" @@ -213,13 +192,6 @@ async def async_disconnect(*args, **kwargs) -> None: ) flow_id = data["flow_id"] - assert data["description_placeholders"] == { - "ip": "test.local", - "mac": "11:22:33:44:55:ab", - "model": "esp32-iso-poe", - "name": "test", - "stored_mac": "11:22:33:44:55:aa", - } assert data["type"] == FlowResultType.FORM assert data["step_id"] == "migrate" diff --git a/tests/components/fluss/__init__.py b/tests/components/fluss/__init__.py new file mode 100644 index 00000000000000..1849ed37655eeb --- /dev/null +++ b/tests/components/fluss/__init__.py @@ -0,0 +1,102 @@ +"""Test Script for Fluss+ Initialisation.""" + +from __future__ import annotations + +from unittest.mock import MagicMock, patch + +from fluss_api import ( + FlussApiClient, + FlussApiClientAuthenticationError, + FlussApiClientCommunicationError, + FlussApiClientError, +) +import pytest + +from homeassistant.components.fluss import PLATFORMS +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + +@pytest.mark.parametrize( + ("side_effect", "expected_exception"), + [ + (FlussApiClientAuthenticationError, ConfigEntryAuthFailed), + (FlussApiClientCommunicationError, ConfigEntryNotReady), + (FlussApiClientError, ConfigEntryNotReady), + ], +) +async def test_async_setup_entry_errors( + hass: HomeAssistant, + mock_config_entry: MagicMock, + side_effect: Exception, + expected_exception: type[Exception], +) -> None: + """Test setup errors.""" + with ( + patch("fluss_api.FlussApiClient", side_effect=side_effect), + pytest.raises(expected_exception), + ): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + +@pytest.mark.asyncio +async def test_async_setup_entry_success( + hass: HomeAssistant, + mock_config_entry: MagicMock, + mock_api_client: FlussApiClient, +) -> None: + """Test successful setup.""" + with patch("fluss_api.FlussApiClient", return_value=mock_api_client): + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + assert mock_config_entry.state is ConfigEntryState.LOADED + hass.config_entries.async_forward_entry_setups.assert_called_once_with( + mock_config_entry, PLATFORMS + ) + + +@pytest.mark.asyncio +async def test_async_unload_entry( + hass: HomeAssistant, + mock_config_entry: MagicMock, + mock_api_client: FlussApiClient, +) -> None: + """Test unloading entry.""" + # Set up the config entry first to ensure it's in LOADED state + with patch("fluss_api.FlussApiClient", return_value=mock_api_client): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + assert mock_config_entry.state is ConfigEntryState.LOADED + + # Test unloading + with patch( + "homeassistant.components.fluss.async_unload_platforms", return_value=True + ): + assert await hass.config_entries.async_unload(mock_config_entry.entry_id) + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.asyncio +async def test_platforms_forwarded( + hass: HomeAssistant, + mock_config_entry: MagicMock, + mock_api_client: FlussApiClient, +) -> None: + """Test platforms are forwarded correctly.""" + with patch("fluss_api.FlussApiClient", return_value=mock_api_client): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + assert mock_config_entry.state is ConfigEntryState.LOADED + hass.config_entries.async_forward_entry_setups.assert_called_with( + mock_config_entry, [Platform.BUTTON] + ) diff --git a/tests/components/fluss/conftest.py b/tests/components/fluss/conftest.py new file mode 100644 index 00000000000000..72244f9da871c0 --- /dev/null +++ b/tests/components/fluss/conftest.py @@ -0,0 +1,55 @@ +"""Shared test fixtures for Fluss+ integration.""" + +from __future__ import annotations + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.fluss.const import DOMAIN +from homeassistant.const import CONF_API_KEY + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="My Fluss+ Devices", + data={CONF_API_KEY: "test_api_key"}, + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.fluss.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_api_client() -> Generator[AsyncMock]: + """Mock Fluss API client with single device.""" + with ( + patch( + "homeassistant.components.fluss.coordinator.FlussApiClient", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.fluss.config_flow.FlussApiClient", + new=mock_client, + ), + ): + client = mock_client.return_value + client.async_get_devices.return_value = { + "devices": [ + {"deviceId": "2a303030sdj1", "deviceName": "Device 1"}, + {"deviceId": "ape93k9302j2", "deviceName": "Device 2"}, + ] + } + yield client diff --git a/tests/components/fluss/snapshots/test_button.ambr b/tests/components/fluss/snapshots/test_button.ambr new file mode 100644 index 00000000000000..18d9da96b248d1 --- /dev/null +++ b/tests/components/fluss/snapshots/test_button.ambr @@ -0,0 +1,97 @@ +# serializer version: 1 +# name: test_buttons[button.device_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.device_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'fluss', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '2a303030sdj1', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[button.device_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device 1', + }), + 'context': , + 'entity_id': 'button.device_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[button.device_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.device_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'fluss', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ape93k9302j2', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[button.device_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device 2', + }), + 'context': , + 'entity_id': 'button.device_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/fluss/test_button.py b/tests/components/fluss/test_button.py new file mode 100644 index 00000000000000..f76346046c9ef7 --- /dev/null +++ b/tests/components/fluss/test_button.py @@ -0,0 +1,69 @@ +"""Tests for the Fluss Buttons.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from fluss_api import FlussApiClient, FlussApiClientError +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_buttons( + hass: HomeAssistant, + mock_api_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test setup with multiple devices.""" + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_button_press( + hass: HomeAssistant, + mock_api_client: FlussApiClient, + mock_config_entry: MockConfigEntry, +) -> None: + """Test successful button press.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.device_1"}, + blocking=True, + ) + + mock_api_client.async_trigger_device.assert_called_once_with("2a303030sdj1") + + +async def test_button_press_error( + hass: HomeAssistant, + mock_api_client: FlussApiClient, + mock_config_entry: MockConfigEntry, +) -> None: + """Test button press with API error.""" + await setup_integration(hass, mock_config_entry) + + mock_api_client.async_trigger_device.side_effect = FlussApiClientError("API Boom") + + with pytest.raises(HomeAssistantError, match="Failed to trigger device: API Boom"): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.device_1"}, + blocking=True, + ) diff --git a/tests/components/fluss/test_config_flow.py b/tests/components/fluss/test_config_flow.py new file mode 100644 index 00000000000000..e9717975f9e520 --- /dev/null +++ b/tests/components/fluss/test_config_flow.py @@ -0,0 +1,108 @@ +"""Tests for the Fluss+ config flow.""" + +from unittest.mock import AsyncMock + +from fluss_api import ( + FlussApiClientAuthenticationError, + FlussApiClientCommunicationError, +) +import pytest + +from homeassistant.components.fluss.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_API_KEY +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_flow( + hass: HomeAssistant, mock_api_client: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test full config flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_API_KEY: "valid_api_key"} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "My Fluss+ Devices" + assert result["data"] == {CONF_API_KEY: "valid_api_key"} + + +@pytest.mark.parametrize( + ("exception", "expected_error"), + [ + (FlussApiClientAuthenticationError, "invalid_auth"), + (FlussApiClientCommunicationError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_step_user_errors( + hass: HomeAssistant, + mock_api_client: AsyncMock, + mock_setup_entry: AsyncMock, + exception: Exception, + expected_error: str, +) -> None: + """Test error cases for user step with recovery.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + user_input = {CONF_API_KEY: "some_api_key"} + + mock_api_client.async_get_devices.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_API_KEY: "valid_api_key"} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": expected_error} + + mock_api_client.async_get_devices.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_duplicate_entry( + hass: HomeAssistant, + mock_api_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test error cases for user step with recovery.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "test_api_key"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/fluss/test_init.py b/tests/components/fluss/test_init.py new file mode 100644 index 00000000000000..e7f6b3691dee0b --- /dev/null +++ b/tests/components/fluss/test_init.py @@ -0,0 +1,56 @@ +"""Test script for Fluss+ integration initialization.""" + +from unittest.mock import AsyncMock + +from fluss_api import ( + FlussApiClientAuthenticationError, + FlussApiClientCommunicationError, + FlussApiClientError, +) +import pytest + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_api_client: AsyncMock, +) -> None: + """Test the Fluss configuration entry loading/unloading.""" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + assert len(mock_api_client.async_get_devices.mock_calls) == 1 + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.parametrize( + ("exception", "state"), + [ + (FlussApiClientAuthenticationError, ConfigEntryState.SETUP_ERROR), + (FlussApiClientCommunicationError, ConfigEntryState.SETUP_RETRY), + (FlussApiClientError, ConfigEntryState.SETUP_RETRY), + ], +) +async def test_async_setup_entry_authentication_error( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_api_client: AsyncMock, + exception: Exception, + state: ConfigEntryState, +) -> None: + """Test that an authentication error during setup leads to SETUP_ERROR state.""" + mock_api_client.async_get_devices.side_effect = exception + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is state diff --git a/tests/components/homewizard/fixtures/HWE-P1-no-batteries/batteries.json b/tests/components/homewizard/fixtures/HWE-P1-no-batteries/batteries.json new file mode 100644 index 00000000000000..3e68be43094d7a --- /dev/null +++ b/tests/components/homewizard/fixtures/HWE-P1-no-batteries/batteries.json @@ -0,0 +1,9 @@ +{ + "mode": "zero", + "permissions": ["charge_allowed", "discharge_allowed"], + "battery_count": 0, + "power_w": 0, + "target_power_w": 0, + "max_consumption_w": 0, + "max_production_w": 0 +} diff --git a/tests/components/homewizard/fixtures/HWE-P1-no-batteries/data.json b/tests/components/homewizard/fixtures/HWE-P1-no-batteries/data.json new file mode 100644 index 00000000000000..b221ad6f80470b --- /dev/null +++ b/tests/components/homewizard/fixtures/HWE-P1-no-batteries/data.json @@ -0,0 +1,82 @@ +{ + "wifi_ssid": "My Wi-Fi", + "wifi_strength": 100, + "smr_version": 50, + "meter_model": "ISKRA 2M550T-101", + "unique_id": "00112233445566778899AABBCCDDEEFF", + "active_tariff": 2, + "total_power_import_kwh": 13779.338, + "total_power_import_t1_kwh": 10830.511, + "total_power_import_t2_kwh": 2948.827, + "total_power_import_t3_kwh": 2948.827, + "total_power_import_t4_kwh": 2948.827, + "total_power_export_kwh": 13086.777, + "total_power_export_t1_kwh": 4321.333, + "total_power_export_t2_kwh": 8765.444, + "total_power_export_t3_kwh": 8765.444, + "total_power_export_t4_kwh": 8765.444, + "active_power_w": -123, + "active_power_l1_w": -123, + "active_power_l2_w": 456, + "active_power_l3_w": 123.456, + "active_voltage_l1_v": 230.111, + "active_voltage_l2_v": 230.222, + "active_voltage_l3_v": 230.333, + "active_current_l1_a": -4, + "active_current_l2_a": 2, + "active_current_l3_a": 0, + "active_frequency_hz": 50, + "voltage_sag_l1_count": 1, + "voltage_sag_l2_count": 2, + "voltage_sag_l3_count": 3, + "voltage_swell_l1_count": 4, + "voltage_swell_l2_count": 5, + "voltage_swell_l3_count": 6, + "any_power_fail_count": 4, + "long_power_fail_count": 5, + "total_gas_m3": 1122.333, + "gas_timestamp": 210314112233, + "gas_unique_id": "01FFEEDDCCBBAA99887766554433221100", + "active_power_average_w": 123.0, + "montly_power_peak_w": 1111.0, + "montly_power_peak_timestamp": 230101080010, + "active_liter_lpm": 12.345, + "total_liter_m3": 1234.567, + "external": [ + { + "unique_id": "47303031", + "type": "gas_meter", + "timestamp": 230125220957, + "value": 111.111, + "unit": "m3" + }, + { + "unique_id": "57303031", + "type": "water_meter", + "timestamp": 230125220957, + "value": 222.222, + "unit": "m3" + }, + { + "unique_id": "5757303031", + "type": "warm_water_meter", + "timestamp": 230125220957, + "value": 333.333, + "unit": "m3" + }, + { + "unique_id": "48303031", + "type": "heat_meter", + "timestamp": 230125220957, + "value": 444.444, + "unit": "GJ" + }, + { + "unique_id": "4948303031", + "type": "inlet_heat_meter", + "timestamp": 230125220957, + "value": 555.555, + "unit": "m3" + } + ] +} diff --git a/tests/components/homewizard/fixtures/HWE-P1-no-batteries/device.json b/tests/components/homewizard/fixtures/HWE-P1-no-batteries/device.json new file mode 100644 index 00000000000000..a444aa81c30b69 --- /dev/null +++ b/tests/components/homewizard/fixtures/HWE-P1-no-batteries/device.json @@ -0,0 +1,7 @@ +{ + "product_type": "HWE-P1", + "product_name": "P1 meter", + "serial": "5c2fafabcdef", + "firmware_version": "4.19", + "api_version": "v1" +} diff --git a/tests/components/homewizard/fixtures/HWE-P1-no-batteries/system.json b/tests/components/homewizard/fixtures/HWE-P1-no-batteries/system.json new file mode 100644 index 00000000000000..362491b3519940 --- /dev/null +++ b/tests/components/homewizard/fixtures/HWE-P1-no-batteries/system.json @@ -0,0 +1,3 @@ +{ + "cloud_enabled": true +} diff --git a/tests/components/homewizard/test_select.py b/tests/components/homewizard/test_select.py index c885fcb311ddc3..9ebd815f036d30 100644 --- a/tests/components/homewizard/test_select.py +++ b/tests/components/homewizard/test_select.py @@ -77,7 +77,6 @@ async def test_entities_not_created_for_device( ("HWE-P1", "select.device_battery_group_mode"), ], ) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_select_entity_snapshots( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -115,7 +114,6 @@ async def test_select_entity_snapshots( ("HWE-P1", "select.device_battery_group_mode", "zero", Batteries.Mode.ZERO), ], ) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_select_set_option( hass: HomeAssistant, mock_homewizardenergy: MagicMock, @@ -144,7 +142,6 @@ async def test_select_set_option( ("HWE-P1", "select.device_battery_group_mode", "to_full"), ], ) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_select_request_error( hass: HomeAssistant, mock_homewizardenergy: MagicMock, @@ -174,7 +171,6 @@ async def test_select_request_error( ("HWE-P1", "select.device_battery_group_mode", "to_full"), ], ) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_select_unauthorized_error( hass: HomeAssistant, mock_homewizardenergy: MagicMock, @@ -206,7 +202,6 @@ async def test_select_unauthorized_error( ("select.device_battery_group_mode", "combined"), ], ) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_select_unreachable( hass: HomeAssistant, mock_homewizardenergy: MagicMock, @@ -230,7 +225,6 @@ async def test_select_unreachable( ("HWE-P1", "select.device_battery_group_mode"), ], ) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_select_multiple_state_changes( hass: HomeAssistant, mock_homewizardenergy: MagicMock, @@ -275,7 +269,7 @@ async def test_select_multiple_state_changes( ("device_fixture", "entity_ids"), [ ( - "HWE-P1", + "HWE-P1-no-batteries", [ "select.device_battery_group_mode", ], diff --git a/tests/components/knx/fixtures/config_store_scene.json b/tests/components/knx/fixtures/config_store_scene.json new file mode 100644 index 00000000000000..a1284a6469b000 --- /dev/null +++ b/tests/components/knx/fixtures/config_store_scene.json @@ -0,0 +1,24 @@ +{ + "version": 2, + "minor_version": 2, + "key": "knx/config_store.json", + "data": { + "entities": { + "scene": { + "knx_es_01KCXJ181N1TEDNC81WXEMXRNS": { + "entity": { + "name": "test", + "device_info": null, + "entity_category": null + }, + "knx": { + "ga_scene": { + "write": "1/1/1" + }, + "scene_number": 12 + } + } + } + } + } +} diff --git a/tests/components/knx/snapshots/test_websocket.ambr b/tests/components/knx/snapshots/test_websocket.ambr index eb440c78f5c1ae..e5b68aba38da7b 100644 --- a/tests/components/knx/snapshots/test_websocket.ambr +++ b/tests/components/knx/snapshots/test_websocket.ambr @@ -1576,6 +1576,50 @@ 'type': 'result', }) # --- +# name: test_knx_get_schema[scene] + dict({ + 'id': 1, + 'result': list([ + dict({ + 'name': 'ga_scene', + 'options': dict({ + 'passive': False, + 'state': False, + 'validDPTs': list([ + dict({ + 'main': 17, + 'sub': 1, + }), + dict({ + 'main': 18, + 'sub': 1, + }), + ]), + 'write': dict({ + 'required': True, + }), + }), + 'required': True, + 'type': 'knx_group_address', + }), + dict({ + 'name': 'scene_number', + 'required': True, + 'selector': dict({ + 'number': dict({ + 'max': 64.0, + 'min': 1.0, + 'mode': 'box', + 'step': 1.0, + }), + }), + 'type': 'ha_selector', + }), + ]), + 'success': True, + 'type': 'result', + }) +# --- # name: test_knx_get_schema[sensor] dict({ 'id': 1, diff --git a/tests/components/knx/test_repairs.py b/tests/components/knx/test_repairs.py index 54cc3c90e66540..3ea66d9465cca6 100644 --- a/tests/components/knx/test_repairs.py +++ b/tests/components/knx/test_repairs.py @@ -77,14 +77,13 @@ async def test_data_secure_group_key_issue_repair_flow( knx.receive_data_secure_issue("11/0/0", source="1.0.1") knx.receive_data_secure_issue("1/2/5", source="1.0.10") knx.receive_data_secure_issue("1/2/5", source="1.0.1") - _placeholders = { - "addresses": "`1/2/5` from 1.0.1, 1.0.10\n`11/0/0` from 1.0.1", # check sorting - "interface": "0.0.0", - } issue_registry = ir.async_get(hass) issue = issue_registry.async_get_issue(DOMAIN, REPAIR_ISSUE_DATA_SECURE_GROUP_KEY) assert issue is not None - assert issue.translation_placeholders == _placeholders + assert issue.translation_placeholders == { + "addresses": "`1/2/5` from 1.0.1, 1.0.10\n`11/0/0` from 1.0.1", # check sorting + "interface": "0.0.0", + } issues = await get_repairs(hass, hass_ws_client) assert issues @@ -98,7 +97,6 @@ async def test_data_secure_group_key_issue_repair_flow( flow_id = flow["flow_id"] assert flow["type"] == FlowResultType.FORM assert flow["step_id"] == "secure_knxkeys" - assert flow["description_placeholders"] == _placeholders # test error handling with patch_file_upload( diff --git a/tests/components/knx/test_scene.py b/tests/components/knx/test_scene.py index 7dc850b4843940..4156657f9669a7 100644 --- a/tests/components/knx/test_scene.py +++ b/tests/components/knx/test_scene.py @@ -2,10 +2,16 @@ from homeassistant.components.knx.const import KNX_ADDRESS from homeassistant.components.knx.schema import SceneSchema -from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, EntityCategory +from homeassistant.const import ( + CONF_ENTITY_CATEGORY, + CONF_NAME, + EntityCategory, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from . import KnxEntityGenerator from .conftest import KNXTestKit from tests.common import async_capture_events @@ -56,3 +62,35 @@ async def test_activate_knx_scene( # different scene number - should not be recorded await knx.receive_write("1/1/1", (0x00,)) assert len(events) == 4 + + +async def test_scene_ui_create( + hass: HomeAssistant, + knx: KNXTestKit, + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test creating a scene.""" + await knx.setup_integration() + await create_ui_entity( + platform=Platform.SCENE, + entity_data={"name": "test"}, + knx_data={ + "ga_scene": {"write": "1/1/1"}, + "scene_number": 5, + }, + ) + # activate scene from HA + await hass.services.async_call( + "scene", "turn_on", {"entity_id": "scene.test"}, blocking=True + ) + await knx.assert_write("1/1/1", (0x04,)) # raw scene number is 0-based + + +async def test_scene_ui_load(hass: HomeAssistant, knx: KNXTestKit) -> None: + """Test loading a scene from storage.""" + await knx.setup_integration(config_store_fixture="config_store_scene.json") + # activate scene from HA + await hass.services.async_call( + "scene", "turn_on", {"entity_id": "scene.test"}, blocking=True + ) + await knx.assert_write("1/1/1", (0x0B,)) diff --git a/tests/components/libre_hardware_monitor/fixtures/libre_hardware_monitor.json b/tests/components/libre_hardware_monitor/fixtures/libre_hardware_monitor.json index 0e4c6309ba3403..640c507c7da9ff 100644 --- a/tests/components/libre_hardware_monitor/fixtures/libre_hardware_monitor.json +++ b/tests/components/libre_hardware_monitor/fixtures/libre_hardware_monitor.json @@ -8,7 +8,7 @@ "Children": [ { "id": 1, - "Text": "GAMING", + "Text": "GAMING-PC", "Min": "", "Value": "", "Max": "", diff --git a/tests/components/libre_hardware_monitor/snapshots/test_sensor.ambr b/tests/components/libre_hardware_monitor/snapshots/test_sensor.ambr index 705d4f4d887836..e61fe131bd3588 100644 --- a/tests/components/libre_hardware_monitor/snapshots/test_sensor.ambr +++ b/tests/components/libre_hardware_monitor/snapshots/test_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_sensors_are_created[sensor.amd_ryzen_7_7800x3d_core_tctl_tdie_temperature-entry] +# name: test_sensors_are_created[sensor.gaming_pc_amd_ryzen_7_7800x3d_core_tctl_tdie_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -14,7 +14,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_core_tctl_tdie_temperature', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_core_tctl_tdie_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -36,24 +36,24 @@ 'unit_of_measurement': '°C', }) # --- -# name: test_sensors_are_created[sensor.amd_ryzen_7_7800x3d_core_tctl_tdie_temperature-state] +# name: test_sensors_are_created[sensor.gaming_pc_amd_ryzen_7_7800x3d_core_tctl_tdie_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D Core (Tctl/Tdie) Temperature', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D Core (Tctl/Tdie) Temperature', 'max_value': '69.1', 'min_value': '39.4', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_core_tctl_tdie_temperature', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_core_tctl_tdie_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '55.5', }) # --- -# name: test_sensors_are_created[sensor.amd_ryzen_7_7800x3d_cpu_total_load-entry] +# name: test_sensors_are_created[sensor.gaming_pc_amd_ryzen_7_7800x3d_cpu_total_load-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -68,7 +68,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_cpu_total_load', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_cpu_total_load', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -90,24 +90,24 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors_are_created[sensor.amd_ryzen_7_7800x3d_cpu_total_load-state] +# name: test_sensors_are_created[sensor.gaming_pc_amd_ryzen_7_7800x3d_cpu_total_load-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D CPU Total Load', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D CPU Total Load', 'max_value': '55.8', 'min_value': '0.0', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_cpu_total_load', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_cpu_total_load', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '9.1', }) # --- -# name: test_sensors_are_created[sensor.amd_ryzen_7_7800x3d_package_power-entry] +# name: test_sensors_are_created[sensor.gaming_pc_amd_ryzen_7_7800x3d_package_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -122,7 +122,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_package_power', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_package_power', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -144,24 +144,24 @@ 'unit_of_measurement': 'W', }) # --- -# name: test_sensors_are_created[sensor.amd_ryzen_7_7800x3d_package_power-state] +# name: test_sensors_are_created[sensor.gaming_pc_amd_ryzen_7_7800x3d_package_power-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D Package Power', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D Package Power', 'max_value': '70.1', 'min_value': '25.1', 'state_class': , 'unit_of_measurement': 'W', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_package_power', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_package_power', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '39.6', }) # --- -# name: test_sensors_are_created[sensor.amd_ryzen_7_7800x3d_package_temperature-entry] +# name: test_sensors_are_created[sensor.gaming_pc_amd_ryzen_7_7800x3d_package_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -176,7 +176,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_package_temperature', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_package_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -198,24 +198,24 @@ 'unit_of_measurement': '°C', }) # --- -# name: test_sensors_are_created[sensor.amd_ryzen_7_7800x3d_package_temperature-state] +# name: test_sensors_are_created[sensor.gaming_pc_amd_ryzen_7_7800x3d_package_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D Package Temperature', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D Package Temperature', 'max_value': '74.0', 'min_value': '38.4', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_package_temperature', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_package_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '52.8', }) # --- -# name: test_sensors_are_created[sensor.amd_ryzen_7_7800x3d_vddcr_soc_voltage-entry] +# name: test_sensors_are_created[sensor.gaming_pc_amd_ryzen_7_7800x3d_vddcr_soc_voltage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -230,7 +230,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_vddcr_soc_voltage', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_vddcr_soc_voltage', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -252,24 +252,24 @@ 'unit_of_measurement': 'V', }) # --- -# name: test_sensors_are_created[sensor.amd_ryzen_7_7800x3d_vddcr_soc_voltage-state] +# name: test_sensors_are_created[sensor.gaming_pc_amd_ryzen_7_7800x3d_vddcr_soc_voltage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D VDDCR SoC Voltage', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D VDDCR SoC Voltage', 'max_value': '1.306', 'min_value': '1.305', 'state_class': , 'unit_of_measurement': 'V', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_vddcr_soc_voltage', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_vddcr_soc_voltage', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1.305', }) # --- -# name: test_sensors_are_created[sensor.amd_ryzen_7_7800x3d_vddcr_voltage-entry] +# name: test_sensors_are_created[sensor.gaming_pc_amd_ryzen_7_7800x3d_vddcr_voltage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -284,7 +284,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_vddcr_voltage', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_vddcr_voltage', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -306,24 +306,24 @@ 'unit_of_measurement': 'V', }) # --- -# name: test_sensors_are_created[sensor.amd_ryzen_7_7800x3d_vddcr_voltage-state] +# name: test_sensors_are_created[sensor.gaming_pc_amd_ryzen_7_7800x3d_vddcr_voltage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D VDDCR Voltage', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D VDDCR Voltage', 'max_value': '1.173', 'min_value': '0.452', 'state_class': , 'unit_of_measurement': 'V', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_vddcr_voltage', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_vddcr_voltage', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1.083', }) # --- -# name: test_sensors_are_created[sensor.msi_mag_b650m_mortar_wifi_ms_7d76_12v_voltage-entry] +# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_12v_voltage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -338,7 +338,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_12v_voltage', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_12v_voltage', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -360,24 +360,24 @@ 'unit_of_measurement': 'V', }) # --- -# name: test_sensors_are_created[sensor.msi_mag_b650m_mortar_wifi_ms_7d76_12v_voltage-state] +# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_12v_voltage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) +12V Voltage', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) +12V Voltage', 'max_value': '12.096', 'min_value': '12.048', 'state_class': , 'unit_of_measurement': 'V', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_12v_voltage', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_12v_voltage', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '12.072', }) # --- -# name: test_sensors_are_created[sensor.msi_mag_b650m_mortar_wifi_ms_7d76_5v_voltage-entry] +# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_5v_voltage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -392,7 +392,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_5v_voltage', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_5v_voltage', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -414,24 +414,24 @@ 'unit_of_measurement': 'V', }) # --- -# name: test_sensors_are_created[sensor.msi_mag_b650m_mortar_wifi_ms_7d76_5v_voltage-state] +# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_5v_voltage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) +5V Voltage', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) +5V Voltage', 'max_value': '5.050', 'min_value': '5.020', 'state_class': , 'unit_of_measurement': 'V', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_5v_voltage', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_5v_voltage', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '5.030', }) # --- -# name: test_sensors_are_created[sensor.msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan-entry] +# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -446,7 +446,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -468,24 +468,24 @@ 'unit_of_measurement': 'RPM', }) # --- -# name: test_sensors_are_created[sensor.msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan-state] +# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Fan Fan', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Fan Fan', 'max_value': '0', 'min_value': '0', 'state_class': , 'unit_of_measurement': 'RPM', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0', }) # --- -# name: test_sensors_are_created[sensor.msi_mag_b650m_mortar_wifi_ms_7d76_cpu_temperature-entry] +# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -500,7 +500,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_cpu_temperature', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -522,24 +522,24 @@ 'unit_of_measurement': '°C', }) # --- -# name: test_sensors_are_created[sensor.msi_mag_b650m_mortar_wifi_ms_7d76_cpu_temperature-state] +# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Temperature', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Temperature', 'max_value': '68.0', 'min_value': '39.0', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_cpu_temperature', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '55.0', }) # --- -# name: test_sensors_are_created[sensor.msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan-entry] +# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -554,7 +554,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -576,24 +576,24 @@ 'unit_of_measurement': 'RPM', }) # --- -# name: test_sensors_are_created[sensor.msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan-state] +# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) Pump Fan Fan', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) Pump Fan Fan', 'max_value': '0', 'min_value': '0', 'state_class': , 'unit_of_measurement': 'RPM', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0', }) # --- -# name: test_sensors_are_created[sensor.msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan-entry] +# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -608,7 +608,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -630,23 +630,23 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensors_are_created[sensor.msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan-state] +# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) System Fan #1 Fan', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) System Fan #1 Fan', 'max_value': None, 'min_value': None, 'state_class': , }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'unknown', }) # --- -# name: test_sensors_are_created[sensor.msi_mag_b650m_mortar_wifi_ms_7d76_system_temperature-entry] +# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -661,7 +661,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_system_temperature', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -683,24 +683,24 @@ 'unit_of_measurement': '°C', }) # --- -# name: test_sensors_are_created[sensor.msi_mag_b650m_mortar_wifi_ms_7d76_system_temperature-state] +# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) System Temperature', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) System Temperature', 'max_value': '46.5', 'min_value': '32.5', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_system_temperature', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '45.5', }) # --- -# name: test_sensors_are_created[sensor.msi_mag_b650m_mortar_wifi_ms_7d76_vcore_voltage-entry] +# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_vcore_voltage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -715,7 +715,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_vcore_voltage', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_vcore_voltage', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -737,24 +737,24 @@ 'unit_of_measurement': 'V', }) # --- -# name: test_sensors_are_created[sensor.msi_mag_b650m_mortar_wifi_ms_7d76_vcore_voltage-state] +# name: test_sensors_are_created[sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_vcore_voltage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) Vcore Voltage', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) Vcore Voltage', 'max_value': '1.318', 'min_value': '1.310', 'state_class': , 'unit_of_measurement': 'V', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_vcore_voltage', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_vcore_voltage', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1.312', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_core_clock-entry] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_clock-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -769,7 +769,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_core_clock', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_clock', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -791,24 +791,24 @@ 'unit_of_measurement': 'MHz', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_core_clock-state] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_clock-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Core Clock', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Core Clock', 'max_value': '2805.0', 'min_value': '210.0', 'state_class': , 'unit_of_measurement': 'MHz', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_core_clock', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_clock', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '2805.0', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_core_load-entry] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_load-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -823,7 +823,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_core_load', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_load', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -845,24 +845,24 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_core_load-state] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_load-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Core Load', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Core Load', 'max_value': '19.0', 'min_value': '0.0', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_core_load', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_load', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '5.0', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_core_temperature-entry] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -877,7 +877,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_core_temperature', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -899,24 +899,24 @@ 'unit_of_measurement': '°C', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_core_temperature-state] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Core Temperature', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Core Temperature', 'max_value': '37.0', 'min_value': '25.0', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_core_temperature', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '36.0', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_fan_1_fan-entry] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_fan-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -931,7 +931,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_fan_1_fan', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_fan', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -953,24 +953,24 @@ 'unit_of_measurement': 'RPM', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_fan_1_fan-state] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_fan-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Fan 1 Fan', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 1 Fan', 'max_value': '0', 'min_value': '0', 'state_class': , 'unit_of_measurement': 'RPM', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_fan_1_fan', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_fan', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_fan_2_fan-entry] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_fan-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -985,7 +985,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_fan_2_fan', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_fan', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1007,24 +1007,24 @@ 'unit_of_measurement': 'RPM', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_fan_2_fan-state] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_fan-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Fan 2 Fan', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 2 Fan', 'max_value': '0', 'min_value': '0', 'state_class': , 'unit_of_measurement': 'RPM', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_fan_2_fan', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_fan', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_hot_spot_temperature-entry] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_hot_spot_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1039,7 +1039,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_hot_spot_temperature', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_hot_spot_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1061,24 +1061,24 @@ 'unit_of_measurement': '°C', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_hot_spot_temperature-state] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_hot_spot_temperature-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Hot Spot Temperature', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Hot Spot Temperature', 'max_value': '43.3', 'min_value': '32.5', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_hot_spot_temperature', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_hot_spot_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '43.0', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_memory_clock-entry] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_memory_clock-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1093,7 +1093,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_memory_clock', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_memory_clock', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1115,24 +1115,24 @@ 'unit_of_measurement': 'MHz', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_memory_clock-state] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_memory_clock-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Memory Clock', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Memory Clock', 'max_value': '11502.0', 'min_value': '405.0', 'state_class': , 'unit_of_measurement': 'MHz', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_memory_clock', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_memory_clock', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '11252.0', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_memory_controller_load-entry] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_memory_controller_load-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1147,7 +1147,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_memory_controller_load', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_memory_controller_load', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1169,24 +1169,24 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_memory_controller_load-state] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_memory_controller_load-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Memory Controller Load', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Memory Controller Load', 'max_value': '49.0', 'min_value': '0.0', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_memory_controller_load', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_memory_controller_load', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.0', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_package_power-entry] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_package_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1201,7 +1201,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_package_power', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_package_power', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1223,24 +1223,24 @@ 'unit_of_measurement': 'W', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_package_power-state] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_package_power-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Package Power', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Package Power', 'max_value': '66.6', 'min_value': '4.1', 'state_class': , 'unit_of_measurement': 'W', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_package_power', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_package_power', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '59.6', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_pcie_tx_throughput-entry] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_pcie_tx_throughput-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1255,7 +1255,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_pcie_tx_throughput', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_pcie_tx_throughput', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1277,24 +1277,24 @@ 'unit_of_measurement': 'MB/s', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_pcie_tx_throughput-state] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_pcie_tx_throughput-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU PCIe Tx Throughput', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU PCIe Tx Throughput', 'max_value': '2422.8', 'min_value': '0.0', 'state_class': , 'unit_of_measurement': 'MB/s', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_pcie_tx_throughput', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_pcie_tx_throughput', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '166.1', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_video_engine_load-entry] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_video_engine_load-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1309,7 +1309,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_video_engine_load', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_video_engine_load', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1331,17 +1331,17 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors_are_created[sensor.nvidia_geforce_rtx_4080_super_gpu_video_engine_load-state] +# name: test_sensors_are_created[sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_video_engine_load-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Video Engine Load', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Video Engine Load', 'max_value': '99.0', 'min_value': '0.0', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_video_engine_load', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_video_engine_load', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1352,14 +1352,14 @@ list([ StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) +12V Voltage', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) +12V Voltage', 'max_value': '12.096', 'min_value': '12.048', 'state_class': , 'unit_of_measurement': 'V', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_12v_voltage', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_12v_voltage', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1367,14 +1367,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) +5V Voltage', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) +5V Voltage', 'max_value': '5.050', 'min_value': '5.020', 'state_class': , 'unit_of_measurement': 'V', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_5v_voltage', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_5v_voltage', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1382,14 +1382,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) Vcore Voltage', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) Vcore Voltage', 'max_value': '1.318', 'min_value': '1.310', 'state_class': , 'unit_of_measurement': 'V', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_vcore_voltage', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_vcore_voltage', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1397,14 +1397,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Temperature', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Temperature', 'max_value': '68.0', 'min_value': '39.0', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_cpu_temperature', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1412,14 +1412,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) System Temperature', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) System Temperature', 'max_value': '46.5', 'min_value': '32.5', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_system_temperature', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1427,14 +1427,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Fan Fan', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Fan Fan', 'max_value': '0', 'min_value': '0', 'state_class': , 'unit_of_measurement': 'RPM', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1442,14 +1442,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) Pump Fan Fan', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) Pump Fan Fan', 'max_value': '0', 'min_value': '0', 'state_class': , 'unit_of_measurement': 'RPM', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1457,13 +1457,13 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) System Fan #1 Fan', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) System Fan #1 Fan', 'max_value': None, 'min_value': None, 'state_class': , }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1471,14 +1471,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D VDDCR Voltage', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D VDDCR Voltage', 'max_value': '1.173', 'min_value': '0.452', 'state_class': , 'unit_of_measurement': 'V', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_vddcr_voltage', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_vddcr_voltage', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1486,14 +1486,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D VDDCR SoC Voltage', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D VDDCR SoC Voltage', 'max_value': '1.306', 'min_value': '1.305', 'state_class': , 'unit_of_measurement': 'V', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_vddcr_soc_voltage', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_vddcr_soc_voltage', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1501,14 +1501,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D Package Power', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D Package Power', 'max_value': '70.1', 'min_value': '25.1', 'state_class': , 'unit_of_measurement': 'W', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_package_power', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_package_power', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1516,14 +1516,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D Core (Tctl/Tdie) Temperature', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D Core (Tctl/Tdie) Temperature', 'max_value': '69.1', 'min_value': '39.4', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_core_tctl_tdie_temperature', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_core_tctl_tdie_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1531,14 +1531,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D Package Temperature', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D Package Temperature', 'max_value': '74.0', 'min_value': '38.4', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_package_temperature', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_package_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1546,14 +1546,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D CPU Total Load', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D CPU Total Load', 'max_value': '55.8', 'min_value': '0.0', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_cpu_total_load', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_cpu_total_load', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1561,14 +1561,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Package Power', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Package Power', 'max_value': '66.6', 'min_value': '4.1', 'state_class': , 'unit_of_measurement': 'W', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_package_power', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_package_power', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1576,14 +1576,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Core Clock', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Core Clock', 'max_value': '2805.0', 'min_value': '210.0', 'state_class': , 'unit_of_measurement': 'MHz', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_core_clock', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_clock', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1591,14 +1591,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Memory Clock', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Memory Clock', 'max_value': '11502.0', 'min_value': '405.0', 'state_class': , 'unit_of_measurement': 'MHz', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_memory_clock', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_memory_clock', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1606,14 +1606,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Core Temperature', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Core Temperature', 'max_value': '37.0', 'min_value': '25.0', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_core_temperature', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1621,14 +1621,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Hot Spot Temperature', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Hot Spot Temperature', 'max_value': '43.3', 'min_value': '32.5', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_hot_spot_temperature', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_hot_spot_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1636,14 +1636,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Core Load', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Core Load', 'max_value': '19.0', 'min_value': '0.0', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_core_load', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_load', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1651,14 +1651,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Memory Controller Load', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Memory Controller Load', 'max_value': '49.0', 'min_value': '0.0', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_memory_controller_load', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_memory_controller_load', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1666,14 +1666,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Video Engine Load', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Video Engine Load', 'max_value': '99.0', 'min_value': '0.0', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_video_engine_load', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_video_engine_load', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1681,14 +1681,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Fan 1 Fan', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 1 Fan', 'max_value': '0', 'min_value': '0', 'state_class': , 'unit_of_measurement': 'RPM', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_fan_1_fan', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_fan', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1696,14 +1696,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Fan 2 Fan', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 2 Fan', 'max_value': '0', 'min_value': '0', 'state_class': , 'unit_of_measurement': 'RPM', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_fan_2_fan', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_fan', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1711,14 +1711,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU PCIe Tx Throughput', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU PCIe Tx Throughput', 'max_value': '2422.8', 'min_value': '0.0', 'state_class': , 'unit_of_measurement': 'MB/s', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_pcie_tx_throughput', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_pcie_tx_throughput', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1730,14 +1730,14 @@ list([ StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) +12V Voltage', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) +12V Voltage', 'max_value': '12.096', 'min_value': '12.048', 'state_class': , 'unit_of_measurement': 'V', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_12v_voltage', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_12v_voltage', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1745,14 +1745,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) +5V Voltage', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) +5V Voltage', 'max_value': '5.050', 'min_value': '5.020', 'state_class': , 'unit_of_measurement': 'V', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_5v_voltage', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_5v_voltage', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1760,14 +1760,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) Vcore Voltage', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) Vcore Voltage', 'max_value': '1.318', 'min_value': '1.310', 'state_class': , 'unit_of_measurement': 'V', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_vcore_voltage', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_vcore_voltage', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1775,14 +1775,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Temperature', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Temperature', 'max_value': '68.0', 'min_value': '39.0', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_cpu_temperature', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1790,14 +1790,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) System Temperature', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) System Temperature', 'max_value': '46.5', 'min_value': '32.5', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_system_temperature', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1805,14 +1805,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Fan Fan', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) CPU Fan Fan', 'max_value': '0', 'min_value': '0', 'state_class': , 'unit_of_measurement': 'RPM', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_cpu_fan_fan', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1820,14 +1820,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) Pump Fan Fan', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) Pump Fan Fan', 'max_value': '0', 'min_value': '0', 'state_class': , 'unit_of_measurement': 'RPM', }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_pump_fan_fan', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1835,13 +1835,13 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MSI MAG B650M MORTAR WIFI (MS-7D76) System Fan #1 Fan', + 'friendly_name': '[GAMING-PC] MSI MAG B650M MORTAR WIFI (MS-7D76) System Fan #1 Fan', 'max_value': None, 'min_value': None, 'state_class': , }), 'context': , - 'entity_id': 'sensor.msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan', + 'entity_id': 'sensor.gaming_pc_msi_mag_b650m_mortar_wifi_ms_7d76_system_fan_1_fan', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1849,14 +1849,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D VDDCR Voltage', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D VDDCR Voltage', 'max_value': '1.173', 'min_value': '0.452', 'state_class': , 'unit_of_measurement': 'V', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_vddcr_voltage', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_vddcr_voltage', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1864,14 +1864,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D VDDCR SoC Voltage', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D VDDCR SoC Voltage', 'max_value': '1.306', 'min_value': '1.305', 'state_class': , 'unit_of_measurement': 'V', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_vddcr_soc_voltage', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_vddcr_soc_voltage', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1879,14 +1879,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D Package Power', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D Package Power', 'max_value': '70.1', 'min_value': '25.1', 'state_class': , 'unit_of_measurement': 'W', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_package_power', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_package_power', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1894,14 +1894,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D Core (Tctl/Tdie) Temperature', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D Core (Tctl/Tdie) Temperature', 'max_value': '69.1', 'min_value': '39.4', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_core_tctl_tdie_temperature', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_core_tctl_tdie_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1909,14 +1909,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D Package Temperature', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D Package Temperature', 'max_value': '74.0', 'min_value': '38.4', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_package_temperature', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_package_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1924,14 +1924,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'AMD Ryzen 7 7800X3D CPU Total Load', + 'friendly_name': '[GAMING-PC] AMD Ryzen 7 7800X3D CPU Total Load', 'max_value': '55.8', 'min_value': '0.0', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.amd_ryzen_7_7800x3d_cpu_total_load', + 'entity_id': 'sensor.gaming_pc_amd_ryzen_7_7800x3d_cpu_total_load', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1939,14 +1939,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Package Power', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Package Power', 'max_value': '66.6', 'min_value': '4.1', 'state_class': , 'unit_of_measurement': 'W', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_package_power', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_package_power', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1954,14 +1954,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Core Clock', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Core Clock', 'max_value': '2805.0', 'min_value': '210.0', 'state_class': , 'unit_of_measurement': 'MHz', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_core_clock', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_clock', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1969,14 +1969,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Memory Clock', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Memory Clock', 'max_value': '11502.0', 'min_value': '405.0', 'state_class': , 'unit_of_measurement': 'MHz', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_memory_clock', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_memory_clock', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1984,14 +1984,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Core Temperature', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Core Temperature', 'max_value': '37.0', 'min_value': '25.0', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_core_temperature', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1999,14 +1999,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Hot Spot Temperature', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Hot Spot Temperature', 'max_value': '43.3', 'min_value': '32.5', 'state_class': , 'unit_of_measurement': '°C', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_hot_spot_temperature', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_hot_spot_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -2014,14 +2014,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Core Load', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Core Load', 'max_value': '19.0', 'min_value': '0.0', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_core_load', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_core_load', 'last_changed': , 'last_reported': , 'last_updated': , @@ -2029,14 +2029,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Memory Controller Load', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Memory Controller Load', 'max_value': '49.0', 'min_value': '0.0', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_memory_controller_load', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_memory_controller_load', 'last_changed': , 'last_reported': , 'last_updated': , @@ -2044,14 +2044,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Video Engine Load', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Video Engine Load', 'max_value': '99.0', 'min_value': '0.0', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_video_engine_load', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_video_engine_load', 'last_changed': , 'last_reported': , 'last_updated': , @@ -2059,14 +2059,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Fan 1 Fan', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 1 Fan', 'max_value': '0', 'min_value': '0', 'state_class': , 'unit_of_measurement': 'RPM', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_fan_1_fan', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_1_fan', 'last_changed': , 'last_reported': , 'last_updated': , @@ -2074,14 +2074,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU Fan 2 Fan', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU Fan 2 Fan', 'max_value': '0', 'min_value': '0', 'state_class': , 'unit_of_measurement': 'RPM', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_fan_2_fan', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_fan_2_fan', 'last_changed': , 'last_reported': , 'last_updated': , @@ -2089,14 +2089,14 @@ }), StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'NVIDIA GeForce RTX 4080 SUPER GPU PCIe Tx Throughput', + 'friendly_name': '[GAMING-PC] NVIDIA GeForce RTX 4080 SUPER GPU PCIe Tx Throughput', 'max_value': '2422.8', 'min_value': '0.0', 'state_class': , 'unit_of_measurement': 'MB/s', }), 'context': , - 'entity_id': 'sensor.nvidia_geforce_rtx_4080_super_gpu_pcie_tx_throughput', + 'entity_id': 'sensor.gaming_pc_nvidia_geforce_rtx_4080_super_gpu_pcie_tx_throughput', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/libre_hardware_monitor/test_config_flow.py b/tests/components/libre_hardware_monitor/test_config_flow.py index 9fcab5daeba928..cc82a170d07631 100644 --- a/tests/components/libre_hardware_monitor/test_config_flow.py +++ b/tests/components/libre_hardware_monitor/test_config_flow.py @@ -42,7 +42,7 @@ async def test_create_entry( mock_config_entry = result["result"] assert ( mock_config_entry.title - == f"{VALID_CONFIG[CONF_HOST]}:{VALID_CONFIG[CONF_PORT]}" + == f"GAMING-PC ({VALID_CONFIG[CONF_HOST]}:{VALID_CONFIG[CONF_PORT]})" ) assert mock_config_entry.data == VALID_CONFIG diff --git a/tests/components/libre_hardware_monitor/test_init.py b/tests/components/libre_hardware_monitor/test_init.py index 67f6e96b74f9d0..851fdb768ddcba 100644 --- a/tests/components/libre_hardware_monitor/test_init.py +++ b/tests/components/libre_hardware_monitor/test_init.py @@ -88,3 +88,8 @@ async def test_migration_to_unique_ids( assert ( entity_registry.async_get_entity_id("sensor", DOMAIN, legacy_entity_id) is None ) + + updated_config_entry = hass.config_entries.async_get_entry( + legacy_config_entry_v1.entry_id + ) + assert updated_config_entry.version == 2 diff --git a/tests/components/libre_hardware_monitor/test_sensor.py b/tests/components/libre_hardware_monitor/test_sensor.py index f3388d77b80afe..326ad9581774fe 100644 --- a/tests/components/libre_hardware_monitor/test_sensor.py +++ b/tests/components/libre_hardware_monitor/test_sensor.py @@ -92,7 +92,7 @@ async def test_sensors_are_updated( """Test sensors are updated with properly formatted values.""" await init_integration(hass, mock_config_entry) - entity_id = "sensor.amd_ryzen_7_7800x3d_package_temperature" + entity_id = "sensor.gaming_pc_amd_ryzen_7_7800x3d_package_temperature" state = hass.states.get(entity_id) assert state @@ -128,7 +128,7 @@ async def test_sensor_state_is_unknown_when_no_sensor_data_is_provided( """Test sensor state is unknown when sensor data is missing.""" await init_integration(hass, mock_config_entry) - entity_id = "sensor.amd_ryzen_7_7800x3d_package_temperature" + entity_id = "sensor.gaming_pc_amd_ryzen_7_7800x3d_package_temperature" state = hass.states.get(entity_id) @@ -200,6 +200,7 @@ async def _mock_orphaned_device( previous_data = mock_lhm_client.get_data.return_value mock_lhm_client.get_data.return_value = LibreHardwareMonitorData( + computer_name=mock_lhm_client.get_data.return_value.computer_name, main_device_ids_and_names=MappingProxyType( { device_id: name @@ -230,6 +231,7 @@ async def test_integration_does_not_log_new_devices_on_first_refresh( ) -> None: """Test that initial data update does not cause warning about new devices.""" mock_lhm_client.get_data.return_value = LibreHardwareMonitorData( + computer_name=mock_lhm_client.get_data.return_value.computer_name, main_device_ids_and_names=MappingProxyType( { **mock_lhm_client.get_data.return_value.main_device_ids_and_names, diff --git a/tests/components/sonos/test_init.py b/tests/components/sonos/test_init.py index c1b98b2ec60fc4..0c655de0749283 100644 --- a/tests/components/sonos/test_init.py +++ b/tests/components/sonos/test_init.py @@ -1,22 +1,26 @@ """Tests for the Sonos config flow.""" import asyncio +from http import HTTPStatus import logging from unittest.mock import Mock, PropertyMock, patch from freezegun.api import FrozenDateTimeFactory import pytest +from requests import Response +from requests.exceptions import HTTPError from homeassistant import config_entries from homeassistant.components import sonos from homeassistant.components.sonos.const import ( DISCOVERY_INTERVAL, SONOS_SPEAKER_ACTIVITY, + UPNP_ISSUE_ID, ) from homeassistant.components.sonos.exception import SonosUpdateError from homeassistant.core import HomeAssistant, callback from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo from homeassistant.setup import async_setup_component @@ -24,7 +28,7 @@ from .conftest import MockSoCo, SoCoMockFactory -from tests.common import async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed async def test_creating_entry_sets_up_media_player( @@ -85,6 +89,83 @@ async def test_not_configuring_sonos_not_creates_entry(hass: HomeAssistant) -> N assert len(mock_setup.mock_calls) == 0 +async def test_upnp_disabled_discovery( + hass: HomeAssistant, config_entry: MockConfigEntry, soco: MockSoCo +) -> None: + """Test issue creation when discovery processing fails with 403.""" + + resp = Response() + resp.status_code = HTTPStatus.FORBIDDEN + http_error = HTTPError(response=resp) + + with patch( + "tests.components.sonos.conftest.MockSoCo.household_id", + new_callable=PropertyMock, + create=True, + side_effect=http_error, + ): + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + issue_registry = ir.async_get(hass) + assert ( + issue_registry.async_get_issue( + sonos.DOMAIN, f"{UPNP_ISSUE_ID}_{soco.ip_address}" + ) + is not None + ) + + +async def test_upnp_disabled_manual_hosts( + hass: HomeAssistant, + soco_factory: SoCoMockFactory, +) -> None: + """Test issue creation when manual host processing fails with 403.""" + + resp = Response() + resp.status_code = HTTPStatus.FORBIDDEN + http_error = HTTPError(response=resp) + soco = soco_factory.cache_mock(MockSoCo(), "10.10.10.1", "Bedroom") + + with patch.object( + type(soco), + "household_id", + new_callable=PropertyMock, + create=True, + side_effect=http_error, + ): + await _setup_hass(hass) + + issue_registry = ir.async_get(hass) + issue = issue_registry.async_get_issue( + sonos.DOMAIN, f"{UPNP_ISSUE_ID}_{soco.ip_address}" + ) + assert issue is not None + assert issue.translation_placeholders.get("device_ip") == "10.10.10.1" + + +async def test_discovery_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test exception handling during discovery processing.""" + + with patch( + "tests.components.sonos.conftest.MockSoCo.household_id", + new_callable=PropertyMock, + create=True, + side_effect=OSError("This is a test"), + ): + caplog.set_level(logging.ERROR) + caplog.clear() + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + assert "This is a test" in caplog.text + + async def test_async_poll_manual_hosts_warnings( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, diff --git a/tests/components/stiebel_eltron/conftest.py b/tests/components/stiebel_eltron/conftest.py index 7ee2612efa7390..d2be5c3865313e 100644 --- a/tests/components/stiebel_eltron/conftest.py +++ b/tests/components/stiebel_eltron/conftest.py @@ -5,7 +5,7 @@ import pytest -from homeassistant.components.stiebel_eltron import DOMAIN +from homeassistant.components.stiebel_eltron.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PORT from tests.common import MockConfigEntry diff --git a/tests/components/stiebel_eltron/test_init.py b/tests/components/stiebel_eltron/test_init.py index f8413c41461746..0994017d75b81f 100644 --- a/tests/components/stiebel_eltron/test_init.py +++ b/tests/components/stiebel_eltron/test_init.py @@ -1,177 +1,21 @@ """Tests for the STIEBEL ELTRON integration.""" -from unittest.mock import AsyncMock - -import pytest - -from homeassistant.components.stiebel_eltron.const import CONF_HUB, DEFAULT_HUB, DOMAIN -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.components.stiebel_eltron.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component -@pytest.mark.usefixtures("mock_stiebel_eltron_client") async def test_async_setup_success( hass: HomeAssistant, issue_registry: ir.IssueRegistry, ) -> None: """Test successful async_setup.""" - config = { - DOMAIN: { - CONF_NAME: "Stiebel Eltron", - CONF_HUB: DEFAULT_HUB, - }, - "modbus": [ - { - CONF_NAME: DEFAULT_HUB, - CONF_HOST: "1.1.1.1", - CONF_PORT: 502, - } - ], - } - - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - - # Verify the issue is created - issue = issue_registry.async_get_issue(DOMAIN, "deprecated_yaml") - assert issue - assert issue.active is True - assert issue.severity == ir.IssueSeverity.WARNING - - -@pytest.mark.usefixtures("mock_stiebel_eltron_client") -async def test_async_setup_already_configured( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - mock_config_entry, -) -> None: - """Test we handle already configured.""" - mock_config_entry.add_to_hass(hass) - - config = { - DOMAIN: { - CONF_NAME: "Stiebel Eltron", - CONF_HUB: DEFAULT_HUB, - }, - "modbus": [ - { - CONF_NAME: DEFAULT_HUB, - CONF_HOST: "1.1.1.1", - CONF_PORT: 502, - } - ], - } + config = {} assert await async_setup_component(hass, DOMAIN, config) await hass.async_block_till_done() - # Verify the issue is created + # No issue should be created by the new async_setup issue = issue_registry.async_get_issue(DOMAIN, "deprecated_yaml") - assert issue - assert issue.active is True - assert issue.severity == ir.IssueSeverity.WARNING - - -async def test_async_setup_with_non_existing_hub( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test async_setup with non-existing modbus hub.""" - config = { - DOMAIN: { - CONF_NAME: "Stiebel Eltron", - CONF_HUB: "non_existing_hub", - }, - } - - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - - # Verify the issue is created - issue = issue_registry.async_get_issue( - DOMAIN, "deprecated_yaml_import_issue_missing_hub" - ) - assert issue - assert issue.active is True - assert issue.is_fixable is False - assert issue.is_persistent is False - assert issue.translation_key == "deprecated_yaml_import_issue_missing_hub" - assert issue.severity == ir.IssueSeverity.WARNING - - -async def test_async_setup_import_failure( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - mock_stiebel_eltron_client: AsyncMock, -) -> None: - """Test async_setup with import failure.""" - config = { - DOMAIN: { - CONF_NAME: "Stiebel Eltron", - CONF_HUB: DEFAULT_HUB, - }, - "modbus": [ - { - CONF_NAME: DEFAULT_HUB, - CONF_HOST: "invalid_host", - CONF_PORT: 502, - } - ], - } - - # Simulate an import failure - mock_stiebel_eltron_client.update.side_effect = Exception("Import failure") - - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - - # Verify the issue is created - issue = issue_registry.async_get_issue( - DOMAIN, "deprecated_yaml_import_issue_unknown" - ) - assert issue - assert issue.active is True - assert issue.is_fixable is False - assert issue.is_persistent is False - assert issue.translation_key == "deprecated_yaml_import_issue_unknown" - assert issue.severity == ir.IssueSeverity.WARNING - - -@pytest.mark.usefixtures("mock_modbus") -async def test_async_setup_cannot_connect( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - mock_stiebel_eltron_client: AsyncMock, -) -> None: - """Test async_setup with import failure.""" - config = { - DOMAIN: { - CONF_NAME: "Stiebel Eltron", - CONF_HUB: DEFAULT_HUB, - }, - "modbus": [ - { - CONF_NAME: DEFAULT_HUB, - CONF_HOST: "invalid_host", - CONF_PORT: 502, - } - ], - } - - # Simulate a cannot connect error - mock_stiebel_eltron_client.update.return_value = False - - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - - # Verify the issue is created - issue = issue_registry.async_get_issue( - DOMAIN, "deprecated_yaml_import_issue_cannot_connect" - ) - assert issue - assert issue.active is True - assert issue.is_fixable is False - assert issue.is_persistent is False - assert issue.translation_key == "deprecated_yaml_import_issue_cannot_connect" - assert issue.severity == ir.IssueSeverity.WARNING + assert issue is None diff --git a/tests/components/template/conftest.py b/tests/components/template/conftest.py index 665e33db4c0ede..cee25e5026134f 100644 --- a/tests/components/template/conftest.py +++ b/tests/components/template/conftest.py @@ -203,13 +203,19 @@ async def setup_and_test_unique_id( platform_setup: TemplatePlatformSetup, style: ConfigurationStyle, entity_config: ConfigType | None, + state_template: str | None = None, ) -> None: """Setup 2 entities with the same unique_id and verify only 1 entity is created. The entity_config not provide name or unique_id, those are added automatically. """ - entity_config = {"unique_id": "not-so_-unique-anymore", **(entity_config or {})} if style == ConfigurationStyle.LEGACY: + state_config = {"value_template": state_template} if state_template else {} + entity_config = { + "unique_id": "not-so_-unique-anymore", + **(entity_config or {}), + **state_config, + } if platform_setup.legacy_slug is None: config = [ {"name": "template_entity_1", **entity_config}, @@ -223,7 +229,15 @@ async def setup_and_test_unique_id( await async_setup_legacy_platforms( hass, platform_setup.domain, platform_setup.legacy_slug, 1, config ) - elif style == ConfigurationStyle.MODERN: + return + + state_config = {"state": state_template} if state_template else {} + entity_config = { + "unique_id": "not-so_-unique-anymore", + **(entity_config or {}), + **state_config, + } + if style == ConfigurationStyle.MODERN: await async_setup_modern_state_format( hass, platform_setup.domain, @@ -254,6 +268,7 @@ async def setup_and_test_nested_unique_id( style: ConfigurationStyle, entity_registry: er.EntityRegistry, entity_config: ConfigType | None, + state_template: str | None = None, ) -> None: """Setup 2 entities with unique unique_ids in a template section that contains a unique_id. @@ -262,9 +277,10 @@ async def setup_and_test_nested_unique_id( The entity_config should not provide name or unique_id, those are added automatically. """ + state_config = {"state": state_template} if state_template else {} entities = [ - {"name": "test_a", "unique_id": "a", **(entity_config or {})}, - {"name": "test_b", "unique_id": "b", **(entity_config or {})}, + {"name": "test_a", "unique_id": "a", **(entity_config or {}), **state_config}, + {"name": "test_b", "unique_id": "b", **(entity_config or {}), **state_config}, ] extra_section_config = {"unique_id": "x"} if style == ConfigurationStyle.MODERN: diff --git a/tests/components/template/test_sensor.py b/tests/components/template/test_sensor.py index f0d5c0efce435e..8f3ae730ab330a 100644 --- a/tests/components/template/test_sensor.py +++ b/tests/components/template/test_sensor.py @@ -1,7 +1,7 @@ """The test for the Template sensor platform.""" from asyncio import Event -from datetime import datetime, timedelta +from datetime import datetime from unittest.mock import ANY, patch import pytest @@ -30,18 +30,100 @@ from homeassistant.setup import ATTR_COMPONENT, async_setup_component from homeassistant.util import dt as dt_util -from .conftest import async_get_flow_preview_state +from .conftest import ( + ConfigurationStyle, + TemplatePlatformSetup, + async_get_flow_preview_state, + async_trigger, + make_test_trigger, + setup_and_test_nested_unique_id, + setup_and_test_unique_id, + setup_entity, +) from tests.common import ( MockConfigEntry, assert_setup_component, async_capture_events, - async_fire_time_changed, mock_restore_cache_with_extra_data, ) from tests.conftest import WebSocketGenerator -TEST_NAME = "sensor.test_template_sensor" +TEST_STATE_SENSOR = "sensor.test_state" +TEST_AVAILABILITY_SENSOR = "sensor.availability_sensor" + +TEST_SENSOR = TemplatePlatformSetup( + sensor.DOMAIN, + "sensors", + "test_template_sensor", + make_test_trigger(TEST_STATE_SENSOR, TEST_AVAILABILITY_SENSOR), +) + + +@pytest.fixture +async def setup_sensor( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + config: ConfigType, +) -> None: + """Do setup of sensor integration.""" + await setup_entity(hass, TEST_SENSOR, style, count, config) + + +@pytest.fixture +async def setup_state_sensor( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + state_template: str, + config: ConfigType, +) -> None: + """Do setup of sensor integration using a state template.""" + await setup_entity(hass, TEST_SENSOR, style, count, config, state_template) + + +@pytest.fixture +async def setup_single_attribute_state_sensor( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + state_template: str | None, + attribute: str, + attribute_template: str, + extra_config: dict, +) -> None: + """Do setup of sensor integration testing a single attribute.""" + config = {attribute: attribute_template} if attribute and attribute_template else {} + await setup_entity( + hass, + TEST_SENSOR, + style, + count, + config, + state_template, + extra_config, + ) + + +@pytest.fixture +async def setup_attributes_state_sensor( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + state_template: str | None, + attributes: dict, +) -> None: + """Do setup of sensor integration testing a single attribute.""" + await setup_entity( + hass, + TEST_SENSOR, + style, + count, + {}, + state_template, + attributes=attributes, + ) @pytest.mark.parametrize( @@ -94,170 +176,148 @@ async def test_setup_config_entry( assert state == snapshot -@pytest.mark.parametrize(("count", "domain"), [(1, sensor.DOMAIN)]) @pytest.mark.parametrize( - "config", + ("count", "config", "state_template"), + [(1, {}, "It {{ states.sensor.test_state.state }}.")], +) +@pytest.mark.parametrize( + ("style", "initial_state"), [ - { - "sensor": { - "platform": "template", - "sensors": { - "test_template_sensor": { - "value_template": "It {{ states.sensor.test_state.state }}." - } - }, - }, - }, + (ConfigurationStyle.LEGACY, "It ."), + (ConfigurationStyle.MODERN, "It ."), + (ConfigurationStyle.TRIGGER, STATE_UNKNOWN), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_legacy(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("setup_state_sensor") +async def test_sensor_state(hass: HomeAssistant, initial_state: str) -> None: """Test template.""" - assert hass.states.get(TEST_NAME).state == "It ." + assert hass.states.get(TEST_SENSOR.entity_id).state == initial_state - hass.states.async_set("sensor.test_state", "Works") - await hass.async_block_till_done() - assert hass.states.get(TEST_NAME).state == "It Works." + await async_trigger(hass, TEST_STATE_SENSOR, "Works") + assert hass.states.get(TEST_SENSOR.entity_id).state == "It Works." -@pytest.mark.parametrize(("count", "domain"), [(1, sensor.DOMAIN)]) @pytest.mark.parametrize( - "config", + ("count", "extra_config", "state_template"), + [(1, {}, "{{ states('sensor.test_state') }}")], +) +@pytest.mark.parametrize( + ("style", "attribute"), [ - { - "sensor": { - "platform": "template", - "sensors": { - "test_template_sensor": { - "value_template": "{{ states.sensor.test_state.state }}", - "icon_template": "{% if states.sensor.test_state.state == " - "'Works' %}" - "mdi:check" - "{% endif %}", - } - }, - }, - }, + (ConfigurationStyle.LEGACY, "icon_template"), + (ConfigurationStyle.MODERN, "icon"), + (ConfigurationStyle.TRIGGER, "icon"), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_icon_template(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("attribute_template", "before_update", "after_update"), + [ + ( + "{{ 'mdi:check' if is_state('sensor.test_state', 'Works') else '' }}", + "", + "mdi:check", + ), + ( + "{{ 'mdi:check' }}", + "mdi:check", + "mdi:check", + ), + ], +) +@pytest.mark.usefixtures("setup_single_attribute_state_sensor") +async def test_icon_template( + hass: HomeAssistant, before_update: str | None, after_update: str | None +) -> None: """Test icon template.""" - assert hass.states.get(TEST_NAME).attributes.get("icon") == "" + await async_trigger(hass, TEST_STATE_SENSOR, "") + assert hass.states.get(TEST_SENSOR.entity_id).attributes["icon"] == before_update - hass.states.async_set("sensor.test_state", "Works") - await hass.async_block_till_done() - assert hass.states.get(TEST_NAME).attributes["icon"] == "mdi:check" + await async_trigger(hass, TEST_STATE_SENSOR, "Works") + assert hass.states.get(TEST_SENSOR.entity_id).attributes["icon"] == after_update -@pytest.mark.parametrize(("count", "domain"), [(1, sensor.DOMAIN)]) @pytest.mark.parametrize( - "config", + ("count", "extra_config", "state_template"), + [(1, {}, "{{ states('sensor.test_state') }}")], +) +@pytest.mark.parametrize( + ("style", "attribute"), [ - { - "sensor": { - "platform": "template", - "sensors": { - "test_template_sensor": { - "value_template": "{{ states.sensor.test_state.state }}", - "entity_picture_template": "{% if states.sensor.test_state.state == " - "'Works' %}" - "/local/sensor.png" - "{% endif %}", - } - }, - }, - }, + (ConfigurationStyle.LEGACY, "entity_picture_template"), + (ConfigurationStyle.MODERN, "picture"), + (ConfigurationStyle.TRIGGER, "picture"), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_entity_picture_template(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("attribute_template", "before_update", "after_update"), + [ + ( + "{{ '/local/sensor.png' if is_state('sensor.test_state', 'Works') else '' }}", + "", + "/local/sensor.png", + ), + ( + "{{ '/local/sensor.png' }}", + "/local/sensor.png", + "/local/sensor.png", + ), + ], +) +@pytest.mark.usefixtures("setup_single_attribute_state_sensor") +async def test_entity_picture_template( + hass: HomeAssistant, before_update: str | None, after_update: str | None +) -> None: """Test entity_picture template.""" - assert hass.states.get(TEST_NAME).attributes.get("entity_picture") == "" + await async_trigger(hass, TEST_STATE_SENSOR, "") + assert ( + hass.states.get(TEST_SENSOR.entity_id).attributes["entity_picture"] + == before_update + ) - hass.states.async_set("sensor.test_state", "Works") - await hass.async_block_till_done() + await async_trigger(hass, TEST_STATE_SENSOR, "Works") assert ( - hass.states.get(TEST_NAME).attributes["entity_picture"] == "/local/sensor.png" + hass.states.get(TEST_SENSOR.entity_id).attributes["entity_picture"] + == after_update ) -@pytest.mark.parametrize(("count", "domain"), [(1, sensor.DOMAIN)]) @pytest.mark.parametrize( - ("attribute", "config", "expected"), + ("count", "extra_config", "state_template"), + [(1, {}, "{{ states('sensor.test_state') }}")], +) +@pytest.mark.parametrize( + ("style", "attribute", "entity_id"), + [ + (ConfigurationStyle.LEGACY, "friendly_name_template", TEST_SENSOR.entity_id), + (ConfigurationStyle.MODERN, "name", TEST_SENSOR.entity_id), + (ConfigurationStyle.TRIGGER, "name", "sensor.unnamed_device"), + ], +) +@pytest.mark.parametrize( + ("attribute_template", "after_update"), [ ( - "friendly_name", - { - "sensor": { - "platform": "template", - "sensors": { - "test_template_sensor": { - "value_template": "{{ states.sensor.test_state.state }}", - "friendly_name_template": "It {{ states.sensor.test_state.state }}.", - } - }, - }, - }, - ("It .", "It Works."), - ), - ( - "friendly_name", - { - "sensor": { - "platform": "template", - "sensors": { - "test_template_sensor": { - "value_template": "{{ states.sensor.test_state.state }}", - "friendly_name_template": "{{ 'It ' + states.sensor.test_state.state + '.'}}", - } - }, - }, - }, - (None, "It Works."), - ), - ( - "friendly_name", - { - "sensor": { - "platform": "template", - "sensors": { - "test_template_sensor": { - "value_template": "{{ states.fourohfour.state }}", - "friendly_name_template": "It {{ states.sensor.test_state.state }}.", - } - }, - }, - }, - ("It .", "It Works."), + "{{ 'It Works.' if is_state('sensor.test_state', 'Works') else 'test_template_sensor' }}", + "It Works.", ), ( - "test_attribute", - { - "sensor": { - "platform": "template", - "sensors": { - "test_template_sensor": { - "value_template": "{{ states.sensor.test_state.state }}", - "attribute_templates": { - "test_attribute": "It {{ states.sensor.test_state.state }}." - }, - } - }, - }, - }, - ("It .", "It Works."), + "{{ 'test_template_sensor' }}", + "test_template_sensor", ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_friendly_name_template(hass: HomeAssistant, attribute, expected) -> None: +@pytest.mark.usefixtures("setup_single_attribute_state_sensor") +async def test_name_template( + hass: HomeAssistant, entity_id: str, after_update: str | None +) -> None: """Test friendly_name template with an unknown value_template.""" - assert hass.states.get(TEST_NAME).attributes.get(attribute) == expected[0] + await async_trigger(hass, TEST_STATE_SENSOR, "") + assert ( + hass.states.get(entity_id).attributes["friendly_name"] == TEST_SENSOR.object_id + ) - hass.states.async_set("sensor.test_state", "Works") - await hass.async_block_till_done() - assert hass.states.get(TEST_NAME).attributes[attribute] == expected[1] + await async_trigger(hass, TEST_STATE_SENSOR, "Works") + assert hass.states.get(entity_id).attributes["friendly_name"] == after_update @pytest.mark.parametrize(("count", "domain"), [(0, sensor.DOMAIN)]) @@ -321,62 +381,60 @@ async def test_friendly_name_template(hass: HomeAssistant, attribute, expected) ], ) @pytest.mark.usefixtures("start_ha") -async def test_template_syntax_error(hass: HomeAssistant) -> None: +async def test_legacy_template_syntax_error(hass: HomeAssistant) -> None: """Test setup with invalid device_class.""" assert hass.states.async_all("sensor") == [] -@pytest.mark.parametrize(("count", "domain"), [(1, sensor.DOMAIN)]) @pytest.mark.parametrize( - "config", - [ - { - "sensor": { - "platform": "template", - "sensors": { - "test_template_sensor": { - "value_template": "It {{ states.sensor.test_state" - ".attributes.missing }}." - } - }, - }, - }, - ], + ("count", "config", "state_template"), + [(1, {}, "{{ x - 12 }}")], ) -@pytest.mark.usefixtures("start_ha") -async def test_template_attribute_missing(hass: HomeAssistant) -> None: - """Test missing attribute template.""" - assert hass.states.get(TEST_NAME).state == STATE_UNAVAILABLE +@pytest.mark.parametrize( + "style", + [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER], +) +@pytest.mark.usefixtures("setup_state_sensor") +async def test_bad_template_unavailable(hass: HomeAssistant) -> None: + """Test a bad template creates an unavailable sensor.""" + await async_trigger(hass, TEST_STATE_SENSOR) + assert hass.states.get(TEST_SENSOR.entity_id).state == STATE_UNAVAILABLE -@pytest.mark.parametrize(("count", "domain"), [(1, sensor.DOMAIN)]) @pytest.mark.parametrize( - "config", + ("count", "state_template"), + [(1, "{{ states('sensor.test_sensor') | float(0) }}")], +) +@pytest.mark.parametrize( + "style", + [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER], +) +@pytest.mark.parametrize( + ("config", "expected_device_class"), [ - { - "sensor": { - "platform": "template", - "sensors": { - "test1": { - "value_template": "{{ states.sensor.test_sensor.state }}", - "unit_of_measurement": "°C", - "device_class": "temperature", - }, - "test2": { - "value_template": "{{ states.sensor.test_sensor.state }}" - }, - }, + ( + { + "unit_of_measurement": "°C", + "device_class": "temperature", }, - }, + "temperature", + ), + ( + {}, + None, + ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_setup_valid_device_class(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("setup_state_sensor") +async def test_setup_valid_device_class( + hass: HomeAssistant, expected_device_class: str | None +) -> None: """Test setup with valid device_class.""" - hass.states.async_set("sensor.test_sensor", "75") - await hass.async_block_till_done() - assert hass.states.get("sensor.test1").attributes["device_class"] == "temperature" - assert "device_class" not in hass.states.get("sensor.test2").attributes + await async_trigger(hass, TEST_STATE_SENSOR, "75") + assert ( + hass.states.get(TEST_SENSOR.entity_id).attributes.get("device_class") + == expected_device_class + ) @pytest.mark.parametrize("load_registries", [False]) @@ -419,257 +477,189 @@ async def set_after_dep_event(event): ), ): await async_from_config_dict( - {"sensor": {"platform": "template", "sensors": {}}, "group": {}}, hass + { + "template": [{"sensor": {"name": "foo", "state": "{{ 'bar' }}"}}], + "group": {}, + }, + hass, ) await hass.async_block_till_done() assert order == ["group", "sensor.template"] -@pytest.mark.parametrize(("count", "domain"), [(1, sensor.DOMAIN)]) @pytest.mark.parametrize( - "config", + ("count", "extra_config", "state_template", "attribute_template"), [ - { - "sensor": { - "platform": "template", - "sensors": { - "test_template_sensor": { - "value_template": "{{ states.sensor.test_sensor.state }}", - "availability_template": "{{ is_state('sensor.availability_sensor', 'on') }}", - } - }, - }, - }, + ( + 1, + {}, + "{{ states('sensor.test_state') }}", + "{{ is_state('sensor.availability_sensor', 'on') }}", + ) ], ) -@pytest.mark.usefixtures("start_ha") +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "availability_template"), + (ConfigurationStyle.MODERN, "availability"), + (ConfigurationStyle.TRIGGER, "availability"), + ], +) +@pytest.mark.usefixtures("setup_single_attribute_state_sensor") async def test_available_template_with_entities(hass: HomeAssistant) -> None: """Test availability tempalates with values from other entities.""" - hass.states.async_set("sensor.availability_sensor", STATE_OFF) - # When template returns true.. - hass.states.async_set("sensor.availability_sensor", STATE_ON) - await hass.async_block_till_done() + await async_trigger(hass, TEST_AVAILABILITY_SENSOR, STATE_ON) # Device State should not be unavailable - assert hass.states.get(TEST_NAME).state != STATE_UNAVAILABLE + assert hass.states.get(TEST_SENSOR.entity_id).state != STATE_UNAVAILABLE # When Availability template returns false - hass.states.async_set("sensor.availability_sensor", STATE_OFF) - await hass.async_block_till_done() + await async_trigger(hass, TEST_AVAILABILITY_SENSOR, STATE_OFF) # device state should be unavailable - assert hass.states.get(TEST_NAME).state == STATE_UNAVAILABLE + assert hass.states.get(TEST_SENSOR.entity_id).state == STATE_UNAVAILABLE -@pytest.mark.parametrize(("count", "domain"), [(1, sensor.DOMAIN)]) @pytest.mark.parametrize( - "config", + ("count", "extra_config", "state_template", "attribute_template"), [ - { - "sensor": { - "platform": "template", - "sensors": { - "invalid_template": { - "value_template": "{{ states.sensor.test_sensor.state }}", - "attribute_templates": { - "test_attribute": "{{ states.sensor.unknown.attributes.picture }}" - }, - } - }, - }, - }, + ( + 1, + {}, + "{{ 'something' }}", + "{{ x - 12 }}", + ) ], ) -@pytest.mark.usefixtures("start_ha") -async def test_invalid_attribute_template( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, caplog_setup_text -) -> None: - """Test that errors are logged if rendering template fails.""" - hass.states.async_set("sensor.test_sensor", "startup") - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 2 - - hass.bus.async_fire(EVENT_HOMEASSISTANT_START) - await hass.async_block_till_done() - await async_update_entity(hass, "sensor.invalid_template") - assert "TemplateError" in caplog_setup_text - assert ( - "Template variable error: 'None' has no attribute 'attributes' when rendering" - in caplog.text - ) - assert hass.states.get("sensor.invalid_template").state == "startup" - - -@pytest.mark.parametrize(("count", "domain"), [(1, sensor.DOMAIN)]) @pytest.mark.parametrize( - "config", + ("style", "attribute"), [ - { - "sensor": { - "platform": "template", - "sensors": { - "my_sensor": { - "value_template": "{{ states.sensor.test_state.state }}", - "availability_template": "{{ x - 12 }}", - } - }, - }, - }, + (ConfigurationStyle.LEGACY, "availability_template"), + (ConfigurationStyle.MODERN, "availability"), + (ConfigurationStyle.TRIGGER, "availability"), ], ) -@pytest.mark.usefixtures("start_ha") +@pytest.mark.usefixtures("setup_single_attribute_state_sensor") async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, caplog_setup_text + hass: HomeAssistant, caplog_setup_text, caplog: pytest.LogCaptureFixture ) -> None: """Test that an invalid availability keeps the device available.""" - assert hass.states.get("sensor.my_sensor").state != STATE_UNAVAILABLE - assert "UndefinedError: 'x' is undefined" in caplog_setup_text + await async_trigger(hass, TEST_STATE_SENSOR) + assert hass.states.get(TEST_SENSOR.entity_id) != STATE_UNAVAILABLE + err = "'x' is undefined" + assert err in caplog_setup_text or err in caplog.text -async def test_no_template_match_all( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture +@pytest.mark.parametrize( + "style", + [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER], +) +@pytest.mark.parametrize( + ("count", "state_template", "attributes", "before", "after"), + [ + ( + 1, + "{{ states('sensor.test_state') }}", + {"test_attribute": "It {{ states('sensor.test_state') }}."}, + "It .", + "It Works.", + ), + ( + 1, + "{{ states('sensor.test_state') }}", + {"test_attribute": "{{ 'It static' }}."}, + "It static.", + "It static.", + ), + ], +) +@pytest.mark.usefixtures("setup_attributes_state_sensor") +async def test_attribute_templates( + hass: HomeAssistant, before: str, after: str ) -> None: - """Test that we allow static templates.""" - hass.states.async_set("sensor.test_sensor", "startup") + """Test attribute_templates template.""" + await async_trigger(hass, TEST_STATE_SENSOR, "") + state = hass.states.get(TEST_SENSOR.entity_id) + assert state.attributes["test_attribute"] == before - hass.set_state(CoreState.not_running) + await async_trigger(hass, TEST_STATE_SENSOR, "Works") + await async_update_entity(hass, TEST_SENSOR.entity_id) + state = hass.states.get(TEST_SENSOR.entity_id) + assert state.attributes["test_attribute"] == after - await async_setup_component( - hass, - sensor.DOMAIN, - { - "sensor": { - "platform": "template", - "sensors": { - "invalid_state": {"value_template": "{{ 1 + 1 }}"}, - "invalid_icon": { - "value_template": "{{ states.sensor.test_sensor.state }}", - "icon_template": "{{ 1 + 1 }}", - }, - "invalid_entity_picture": { - "value_template": "{{ states.sensor.test_sensor.state }}", - "entity_picture_template": "{{ 1 + 1 }}", - }, - "invalid_friendly_name": { - "value_template": "{{ states.sensor.test_sensor.state }}", - "friendly_name_template": "{{ 1 + 1 }}", - }, - "invalid_attribute": { - "value_template": "{{ states.sensor.test_sensor.state }}", - "attribute_templates": {"test_attribute": "{{ 1 + 1 }}"}, - }, - }, - } - }, - ) - await hass.async_block_till_done() - - assert hass.states.get("sensor.invalid_state").state == "unknown" - assert hass.states.get("sensor.invalid_icon").state == "unknown" - assert hass.states.get("sensor.invalid_entity_picture").state == "unknown" - assert hass.states.get("sensor.invalid_friendly_name").state == "unknown" - - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 6 - - assert hass.states.get("sensor.invalid_state").state == "unknown" - assert hass.states.get("sensor.invalid_icon").state == "unknown" - assert hass.states.get("sensor.invalid_entity_picture").state == "unknown" - assert hass.states.get("sensor.invalid_friendly_name").state == "unknown" - assert hass.states.get("sensor.invalid_attribute").state == "unknown" - - hass.bus.async_fire(EVENT_HOMEASSISTANT_START) - await hass.async_block_till_done() - - assert hass.states.get("sensor.invalid_state").state == "2" - assert hass.states.get("sensor.invalid_icon").state == "startup" - assert hass.states.get("sensor.invalid_entity_picture").state == "startup" - assert hass.states.get("sensor.invalid_friendly_name").state == "startup" - assert hass.states.get("sensor.invalid_attribute").state == "startup" - hass.states.async_set("sensor.test_sensor", "hello") - await hass.async_block_till_done() - - assert hass.states.get("sensor.invalid_state").state == "2" - # Will now process because we have at least one valid template - assert hass.states.get("sensor.invalid_icon").state == "hello" - assert hass.states.get("sensor.invalid_entity_picture").state == "hello" - assert hass.states.get("sensor.invalid_friendly_name").state == "hello" - assert hass.states.get("sensor.invalid_attribute").state == "hello" - - await async_update_entity(hass, "sensor.invalid_state") - await async_update_entity(hass, "sensor.invalid_icon") - await async_update_entity(hass, "sensor.invalid_entity_picture") - await async_update_entity(hass, "sensor.invalid_friendly_name") - await async_update_entity(hass, "sensor.invalid_attribute") - - assert hass.states.get("sensor.invalid_state").state == "2" - assert hass.states.get("sensor.invalid_icon").state == "hello" - assert hass.states.get("sensor.invalid_entity_picture").state == "hello" - assert hass.states.get("sensor.invalid_friendly_name").state == "hello" - assert hass.states.get("sensor.invalid_attribute").state == "hello" - - -@pytest.mark.parametrize(("count", "domain"), [(1, "template")]) @pytest.mark.parametrize( - "config", + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) +@pytest.mark.parametrize( + ("count", "state_template", "attributes"), [ - { - "template": { - "unique_id": "group-id", - "sensor": {"name": "top-level", "unique_id": "sensor-id", "state": "5"}, - }, - "sensor": { - "platform": "template", - "sensors": { - "test_template_sensor_01": { - "unique_id": "not-so-unique-anymore", - "value_template": "{{ true }}", - }, - "test_template_sensor_02": { - "unique_id": "not-so-unique-anymore", - "value_template": "{{ false }}", - }, - }, - }, - }, + ( + 1, + "{{ states('sensor.test_state') }}", + {"test_attribute": "{{ states.sensor.unknown.attributes.picture }}"}, + ) ], ) -@pytest.mark.usefixtures("start_ha") +@pytest.mark.usefixtures("setup_attributes_state_sensor") +async def test_invalid_attribute_template( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, caplog_setup_text +) -> None: + """Test that errors are logged if rendering template fails.""" + await async_trigger(hass, TEST_STATE_SENSOR, "Works") + error = ( + "Template variable error: 'None' has no attribute 'attributes' when rendering" + ) + assert error in caplog.text or error in caplog_setup_text + assert hass.states.get(TEST_SENSOR.entity_id).state == "Works" + + +@pytest.mark.parametrize( + "style", + [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER], +) async def test_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + style: ConfigurationStyle, ) -> None: - """Test unique_id option only creates one sensor per id.""" - assert len(hass.states.async_all()) == 2 + """Test unique_id option only creates one vacuum per id.""" + await setup_and_test_unique_id(hass, TEST_SENSOR, style, {}, "{{ 'foo' }}") - assert len(entity_registry.entities) == 2 - assert entity_registry.async_get_entity_id( - "sensor", "template", "group-id-sensor-id" - ) - assert entity_registry.async_get_entity_id( - "sensor", "template", "not-so-unique-anymore" + +@pytest.mark.parametrize( + "style", [ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER] +) +async def test_nested_unique_id( + hass: HomeAssistant, + style: ConfigurationStyle, + entity_registry: er.EntityRegistry, +) -> None: + """Test a template unique_id propagates to vacuum unique_ids.""" + await setup_and_test_nested_unique_id( + hass, TEST_SENSOR, style, entity_registry, {}, "{{ 'foo' }}" ) -@pytest.mark.parametrize(("count", "domain"), [(1, sensor.DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, template.DOMAIN)]) @pytest.mark.parametrize( "config", [ { - "sensor": { - "platform": "template", - "sensors": { - "solar_angle": { - "friendly_name": "Sun angle", + "template": { + "sensor": [ + { + "name": "solar_angle", "unit_of_measurement": "degrees", - "value_template": "{{ state_attr('sun.sun', 'elevation') }}", + "state": "{{ state_attr('sun.sun', 'elevation') }}", }, - "sunrise": { - "value_template": "{{ state_attr('sun.sun', 'next_rising') }}" + { + "name": "sunrise", + "state": "{{ state_attr('sun.sun', 'next_rising') }}", }, - }, + ], } }, ], @@ -713,46 +703,49 @@ def _record_async_render(self, *args, **kwargs): } -@pytest.mark.parametrize(("count", "domain"), [(1, "template")]) @pytest.mark.parametrize( - "config", + ("style", "before", "after"), [ - { - "template": { - "sensor": { - "name": "test_template_sensor", - "state": "{{ this.attributes.test }}: {{ this.entity_id }}", - "attributes": {"test": "It {{ states.sensor.test_state.state }}"}, - }, - }, - }, - { - "template": { - "trigger": { - "platform": "state", - "entity_id": [ - "sensor.test_state", - "sensor.test_template_sensor", - ], - }, - "sensor": { - "name": "test_template_sensor", - "state": "{{ this.attributes.test }}: {{ this.entity_id }}", - "attributes": {"test": "It {{ states.sensor.test_state.state }}"}, - }, - }, - }, + ( + ConfigurationStyle.LEGACY, + "It: " + TEST_SENSOR.entity_id, + "Works: " + TEST_SENSOR.entity_id, + ), + ( + ConfigurationStyle.MODERN, + "It: " + TEST_SENSOR.entity_id, + "Works: " + TEST_SENSOR.entity_id, + ), + ( + # Trigger based template entities only resolve when triggered + # therefore the templates will be 1 resolution behind when + # dealing with the this object + ConfigurationStyle.TRIGGER, + ": " + TEST_SENSOR.entity_id, + "It: " + TEST_SENSOR.entity_id, + ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_this_variable(hass: HomeAssistant) -> None: - """Test template.""" - assert hass.states.get(TEST_NAME).state == "It: " + TEST_NAME - - hass.states.async_set("sensor.test_state", "Works") +@pytest.mark.parametrize( + ("count", "state_template", "attributes"), + [ + ( + 1, + "{{ this.attributes.test }}: {{ this.entity_id }}", + {"test": "{{ states('sensor.test_state') }}"}, + ), + ], +) +@pytest.mark.usefixtures("setup_attributes_state_sensor") +async def test_this_variable(hass: HomeAssistant, before: str, after: str) -> None: + """Test this variable.""" + await async_trigger(hass, TEST_STATE_SENSOR, "It") await hass.async_block_till_done() + assert hass.states.get(TEST_SENSOR.entity_id).state == before + + await async_trigger(hass, TEST_STATE_SENSOR, "Works") await hass.async_block_till_done() - assert hass.states.get(TEST_NAME).state == "It Works: " + TEST_NAME + assert hass.states.get(TEST_SENSOR.entity_id).state == after @pytest.mark.parametrize(("count", "domain"), [(1, "template")]) @@ -874,149 +867,87 @@ async def test_this_variable_early_hass_running( } -@pytest.mark.parametrize(("count", "domain"), [(1, sensor.DOMAIN)]) @pytest.mark.parametrize( - "config", - [ - { - "sensor": { - "platform": "template", - "sensors": { - "test": { - "value_template": "{{ ((states.sensor.test.state or 0) | int) + 1 }}", - }, - }, - } - }, - ], + ("count", "config", "state_template"), + [(1, {}, "{{ ((states.sensor.test_template_sensor.state or 0) | int) + 1 }}")], ) -@pytest.mark.usefixtures("start_ha") -async def test_self_referencing_sensor_loop( - hass: HomeAssistant, caplog_setup_text -) -> None: - """Test a self referencing sensor does not loop forever.""" - assert len(hass.states.async_all()) == 1 - await hass.async_block_till_done() - await hass.async_block_till_done() - assert "Template loop detected" in caplog_setup_text - assert int(hass.states.get("sensor.test").state) == 2 - await hass.async_block_till_done() - assert int(hass.states.get("sensor.test").state) == 2 - - -@pytest.mark.parametrize(("count", "domain"), [(1, sensor.DOMAIN)]) @pytest.mark.parametrize( - "config", - [ - { - "sensor": { - "platform": "template", - "sensors": { - "test": { - "value_template": "{{ ((states.sensor.test.state or 0) | int) + 1 }}", - "icon_template": "{% if ((states.sensor.test.state or 0) | int) >= 1 %}mdi:greater{% else %}mdi:less{% endif %}", - }, - }, - } - }, - ], + "style", + [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN], ) -@pytest.mark.usefixtures("start_ha") -async def test_self_referencing_sensor_with_icon_loop( +@pytest.mark.usefixtures("setup_state_sensor") +async def test_self_referencing_sensor_loop( hass: HomeAssistant, caplog_setup_text ) -> None: - """Test a self referencing sensor loops forever with a valid self referencing icon.""" + """Test a self referencing sensor does not loop forever.""" assert len(hass.states.async_all()) == 1 await hass.async_block_till_done() await hass.async_block_till_done() assert "Template loop detected" in caplog_setup_text - - state = hass.states.get("sensor.test") - assert int(state.state) == 3 - assert state.attributes[ATTR_ICON] == "mdi:greater" + assert int(hass.states.get(TEST_SENSOR.entity_id).state) == 2 await hass.async_block_till_done() - state = hass.states.get("sensor.test") - assert int(state.state) == 3 + assert int(hass.states.get(TEST_SENSOR.entity_id).state) == 2 -@pytest.mark.parametrize(("count", "domain"), [(1, sensor.DOMAIN)]) +@pytest.mark.parametrize(("count", "style"), [(1, ConfigurationStyle.MODERN)]) @pytest.mark.parametrize( - "config", + ("config", "attributes", "expected_state"), [ - { - "sensor": { - "platform": "template", - "sensors": { - "test": { - "value_template": "{{ ((states.sensor.test.state or 0) | int) + 1 }}", - "icon_template": "{% if ((states.sensor.test.state or 0) | int) > 3 %}mdi:greater{% else %}mdi:less{% endif %}", - "entity_picture_template": "{% if ((states.sensor.test.state or 0) | int) >= 1 %}bigpic{% else %}smallpic{% endif %}", - }, - }, - } - }, + ( + { + "state": "{{ ((states.sensor.test_template_sensor.state or 0) | int) + 1 }}", + "icon": "{% if ((states.sensor.test_template_sensor.state or 0) | int) >= 1 %}mdi:greater{% else %}mdi:less{% endif %}", + }, + ((ATTR_ICON, "mdi:greater"),), + 3, + ), + ( + { + "state": "{{ ((states.sensor.test_template_sensor.state or 0) | int) + 1 }}", + "icon": "{% if ((states.sensor.test_template_sensor.state or 0) | int) > 3 %}mdi:greater{% else %}mdi:less{% endif %}", + "picture": "{% if ((states.sensor.test_template_sensor.state or 0) | int) >= 1 %}bigpic{% else %}smallpic{% endif %}", + }, + ( + (ATTR_ICON, "mdi:less"), + (ATTR_ENTITY_PICTURE, "bigpic"), + ), + 4, + ), + ( + { + "default_entity_id": TEST_SENSOR.entity_id, + "state": "{{ 1 }}", + "picture": "{{ ((states.sensor.test_template_sensor.attributes['entity_picture'] or 0) | int) + 1 }}", + "name": "{{ ((states.sensor.test_template_sensor.attributes['friendly_name'] or 0) | int) + 1 }}", + }, + ( + (ATTR_ENTITY_PICTURE, "3"), + (ATTR_FRIENDLY_NAME, "3"), + ), + 1, + ), ], ) -@pytest.mark.usefixtures("start_ha") -async def test_self_referencing_sensor_with_icon_and_picture_entity_loop( - hass: HomeAssistant, caplog_setup_text +@pytest.mark.usefixtures("setup_sensor") +async def test_self_referencing( + hass: HomeAssistant, + attributes: tuple[tuple[str, str]], + expected_state: int, + caplog_setup_text, ) -> None: - """Test a self referencing sensor loop forevers with a valid self referencing icon.""" + """Test a self referencing sensor loops forever.""" assert len(hass.states.async_all()) == 1 await hass.async_block_till_done() await hass.async_block_till_done() assert "Template loop detected" in caplog_setup_text - state = hass.states.get("sensor.test") - assert int(state.state) == 4 - assert state.attributes[ATTR_ICON] == "mdi:less" - assert state.attributes[ATTR_ENTITY_PICTURE] == "bigpic" - - await hass.async_block_till_done() - assert int(state.state) == 4 - - -@pytest.mark.parametrize(("count", "domain"), [(1, sensor.DOMAIN)]) -@pytest.mark.parametrize( - "config", - [ - { - "sensor": { - "platform": "template", - "sensors": { - "test": { - "value_template": "{{ 1 }}", - "entity_picture_template": "{{ ((states.sensor.test.attributes['entity_picture'] or 0) | int) + 1 }}", - "friendly_name_template": "{{ ((states.sensor.test.attributes['friendly_name'] or 0) | int) + 1 }}", - }, - }, - } - }, - ], -) -@pytest.mark.usefixtures("start_ha") -async def test_self_referencing_entity_picture_loop( - hass: HomeAssistant, caplog_setup_text -) -> None: - """Test a self referencing sensor does not loop forever with a looping self referencing entity picture.""" - assert len(hass.states.async_all()) == 1 - next_time = dt_util.utcnow() + timedelta(seconds=1.2) - with patch( - "homeassistant.helpers.ratelimit.time.time", return_value=next_time.timestamp() - ): - async_fire_time_changed(hass, next_time) - await hass.async_block_till_done() - await hass.async_block_till_done() - - assert "Template loop detected" in caplog_setup_text - - state = hass.states.get("sensor.test") - assert int(state.state) == 1 - assert state.attributes[ATTR_ENTITY_PICTURE] == "3" - assert state.attributes[ATTR_FRIENDLY_NAME] == "3" - + state = hass.states.get(TEST_SENSOR.entity_id) + assert int(state.state) == expected_state + for attr, expected in attributes: + assert state.attributes[attr] == expected await hass.async_block_till_done() - assert int(state.state) == 1 + state = hass.states.get(TEST_SENSOR.entity_id) + assert int(state.state) == expected_state async def test_self_referencing_icon_with_no_loop( @@ -1059,17 +990,19 @@ async def test_self_referencing_icon_with_no_loop( await async_setup_component( hass, - sensor.DOMAIN, + template.DOMAIN, { - "sensor": { - "platform": "template", - "sensors": { - "heartworm_risk": { - "value_template": value_template_str, - "icon_template": icon_template_str, - }, - }, - } + "template": [ + { + "sensor": [ + { + "name": "heartworm_risk", + "state": value_template_str, + "icon": icon_template_str, + } + ], + } + ] }, ) @@ -1096,140 +1029,36 @@ async def test_self_referencing_icon_with_no_loop( assert "Template loop detected" not in caplog.text -@pytest.mark.parametrize(("count", "domain"), [(1, sensor.DOMAIN)]) @pytest.mark.parametrize( - "config", + ("count", "extra_config", "state_template", "attribute", "attribute_template"), [ - { - "sensor": { - "platform": "template", - "sensors": { - "test_template_sensor": { - "value_template": "{{ states.sensor.test_state.state }}", - "friendly_name_template": "{{ states.sensor.test_state.state }}", - } - }, - } - }, + ( + 1, + {"default_entity_id": TEST_SENSOR.entity_id}, + "{{ states('sensor.test_state') }}", + "name", + "{{ states('sensor.test_state') }}", + ) ], ) -@pytest.mark.usefixtures("start_ha") +@pytest.mark.parametrize( + "style", + [ConfigurationStyle.MODERN, ConfigurationStyle.TRIGGER], +) +@pytest.mark.usefixtures("setup_single_attribute_state_sensor") async def test_duplicate_templates(hass: HomeAssistant) -> None: """Test template entity where the value and friendly name as the same template.""" - hass.states.async_set("sensor.test_state", "Abc") - await hass.async_block_till_done() - state = hass.states.get(TEST_NAME) + await async_trigger(hass, TEST_STATE_SENSOR, "Abc") + state = hass.states.get(TEST_SENSOR.entity_id) assert state.attributes["friendly_name"] == "Abc" assert state.state == "Abc" - hass.states.async_set("sensor.test_state", "Def") - await hass.async_block_till_done() - state = hass.states.get(TEST_NAME) + await async_trigger(hass, TEST_STATE_SENSOR, "Def") + state = hass.states.get(TEST_SENSOR.entity_id) assert state.attributes["friendly_name"] == "Def" assert state.state == "Def" -@pytest.mark.parametrize(("count", "domain"), [(2, "template")]) -@pytest.mark.parametrize( - "config", - [ - { - "template": [ - {"invalid": "config"}, - # Config after invalid should still be set up - { - "unique_id": "listening-test-event", - "trigger": {"platform": "event", "event_type": "test_event"}, - "sensors": { - "hello_name": { - "friendly_name": "Hello Name", - "unique_id": "hello_name-id", - "device_class": "battery", - "unit_of_measurement": "%", - "value_template": "{{ trigger.event.data.beer }}", - "entity_picture_template": "{{ '/local/dogs.png' }}", - "icon_template": "{{ 'mdi:pirate' }}", - "attribute_templates": { - "plus_one": "{{ trigger.event.data.beer + 1 }}" - }, - }, - }, - "sensor": [ - { - "name": "via list", - "unique_id": "via_list-id", - "device_class": "battery", - "unit_of_measurement": "%", - "availability": "{{ True }}", - "state": "{{ trigger.event.data.beer + 1 }}", - "picture": "{{ '/local/dogs.png' }}", - "icon": "{{ 'mdi:pirate' }}", - "attributes": { - "plus_one": "{{ trigger.event.data.beer + 1 }}" - }, - "state_class": "measurement", - } - ], - }, - { - "trigger": [], - "sensors": { - "bare_minimum": { - "value_template": "{{ trigger.event.data.beer }}" - }, - }, - }, - ], - }, - ], -) -@pytest.mark.usefixtures("start_ha") -async def test_trigger_entity( - hass: HomeAssistant, entity_registry: er.EntityRegistry -) -> None: - """Test trigger entity works.""" - state = hass.states.get("sensor.hello_name") - assert state is not None - assert state.state == STATE_UNKNOWN - - state = hass.states.get("sensor.bare_minimum") - assert state is not None - assert state.state == STATE_UNKNOWN - - context = Context() - hass.bus.async_fire("test_event", {"beer": 2}, context=context) - await hass.async_block_till_done() - - state = hass.states.get("sensor.hello_name") - assert state.state == "2" - assert state.attributes.get("device_class") == "battery" - assert state.attributes.get("icon") == "mdi:pirate" - assert state.attributes.get("entity_picture") == "/local/dogs.png" - assert state.attributes.get("plus_one") == 3 - assert state.attributes.get("unit_of_measurement") == "%" - assert state.context is context - - assert len(entity_registry.entities) == 2 - assert ( - entity_registry.entities["sensor.hello_name"].unique_id - == "listening-test-event-hello_name-id" - ) - assert ( - entity_registry.entities["sensor.via_list"].unique_id - == "listening-test-event-via_list-id" - ) - - state = hass.states.get("sensor.via_list") - assert state.state == "3" - assert state.attributes.get("device_class") == "battery" - assert state.attributes.get("icon") == "mdi:pirate" - assert state.attributes.get("entity_picture") == "/local/dogs.png" - assert state.attributes.get("plus_one") == 3 - assert state.attributes.get("unit_of_measurement") == "%" - assert state.attributes.get("state_class") == "measurement" - assert state.context is context - - @pytest.mark.parametrize(("count", "domain"), [(1, template.DOMAIN)]) @pytest.mark.parametrize( "config", @@ -1392,44 +1221,6 @@ async def test_trigger_entity_runs_once(hass: HomeAssistant) -> None: assert state.attributes.get("history_1") == "Not yet set" -@pytest.mark.parametrize(("count", "domain"), [(1, "template")]) -@pytest.mark.parametrize( - "config", - [ - { - "template": { - "trigger": {"platform": "event", "event_type": "test_event"}, - "sensors": { - "hello": { - "unique_id": "no-base-id", - "friendly_name": "Hello", - "value_template": "{{ non_existing + 1 }}", - } - }, - }, - }, - ], -) -@pytest.mark.usefixtures("start_ha") -async def test_trigger_entity_render_error( - hass: HomeAssistant, entity_registry: er.EntityRegistry -) -> None: - """Test trigger entity handles render error.""" - state = hass.states.get("sensor.hello") - assert state is not None - assert state.state == STATE_UNKNOWN - - context = Context() - hass.bus.async_fire("test_event", {"beer": 2}, context=context) - await hass.async_block_till_done() - - state = hass.states.get("sensor.hello") - assert state.state == STATE_UNAVAILABLE - - assert len(entity_registry.entities) == 1 - assert entity_registry.entities["sensor.hello"].unique_id == "no-base-id" - - @pytest.mark.parametrize(("count", "domain"), [(0, sensor.DOMAIN)]) @pytest.mark.parametrize( "config", @@ -1453,7 +1244,7 @@ async def test_trigger_not_allowed_platform_config( hass: HomeAssistant, caplog_setup_text ) -> None: """Test we throw a helpful warning if a trigger is configured in platform config.""" - state = hass.states.get(TEST_NAME) + state = hass.states.get(TEST_SENSOR.entity_id) assert state is None assert ( "Invalid config for 'sensor' from integration 'template': 'trigger' is an invalid option for" @@ -1461,75 +1252,6 @@ async def test_trigger_not_allowed_platform_config( ) -@pytest.mark.parametrize(("count", "domain"), [(1, "template")]) -@pytest.mark.parametrize( - "config", - [ - { - "template": { - "sensor": { - "name": "top-level", - "device_class": "battery", - "state_class": "measurement", - "state": "5", - "unit_of_measurement": "%", - }, - }, - }, - ], -) -@pytest.mark.usefixtures("start_ha") -async def test_config_top_level(hass: HomeAssistant) -> None: - """Test unique_id option only creates one sensor per id.""" - assert len(hass.states.async_all()) == 1 - state = hass.states.get("sensor.top_level") - assert state is not None - assert state.state == "5" - assert state.attributes["device_class"] == "battery" - assert state.attributes["state_class"] == "measurement" - - -async def test_trigger_entity_available(hass: HomeAssistant) -> None: - """Test trigger entity availability works.""" - assert await async_setup_component( - hass, - "template", - { - "template": [ - { - "trigger": {"platform": "event", "event_type": "test_event"}, - "sensor": [ - { - "name": "Maybe Available", - "availability": "{{ trigger and trigger.event.data.beer == 2 }}", - "state": "{{ trigger.event.data.beer }}", - }, - ], - }, - ], - }, - ) - - await hass.async_block_till_done() - - # Sensors are unknown if never triggered - state = hass.states.get("sensor.maybe_available") - assert state is not None - assert state.state == STATE_UNKNOWN - - hass.bus.async_fire("test_event", {"beer": 2}) - await hass.async_block_till_done() - - state = hass.states.get("sensor.maybe_available") - assert state.state == "2" - - hass.bus.async_fire("test_event", {"beer": 1}) - await hass.async_block_till_done() - - state = hass.states.get("sensor.maybe_available") - assert state.state == "unavailable" - - @pytest.mark.parametrize(("source_event_value"), [None, "None"]) async def test_numeric_trigger_entity_set_unknown( hass: HomeAssistant, source_event_value: str | None @@ -1569,145 +1291,6 @@ async def test_numeric_trigger_entity_set_unknown( assert state.state == STATE_UNKNOWN -async def test_trigger_entity_available_skips_state( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test trigger entity availability works.""" - assert await async_setup_component( - hass, - "template", - { - "template": [ - { - "trigger": {"platform": "event", "event_type": "test_event"}, - "sensor": [ - { - "name": "Never Available", - "availability": "{{ trigger and trigger.event.data.beer == 2 }}", - "state": "{{ noexist - 1 }}", - }, - ], - }, - ], - }, - ) - - await hass.async_block_till_done() - - # Sensors are unknown if never triggered - state = hass.states.get("sensor.never_available") - assert state is not None - assert state.state == STATE_UNKNOWN - - hass.bus.async_fire("test_event", {"beer": 1}) - await hass.async_block_till_done() - - state = hass.states.get("sensor.never_available") - assert state.state == "unavailable" - - assert "'noexist' is undefined" not in caplog.text - - hass.bus.async_fire("test_event", {"beer": 2}) - await hass.async_block_till_done() - - state = hass.states.get("sensor.never_available") - assert state.state == "unavailable" - - assert "'noexist' is undefined" in caplog.text - - -async def test_trigger_state_with_availability_syntax_error( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test trigger entity is available when attributes have syntax errors.""" - assert await async_setup_component( - hass, - "template", - { - "template": [ - { - "trigger": {"platform": "event", "event_type": "test_event"}, - "sensor": [ - { - "name": "Test Sensor", - "availability": "{{ what_the_heck == 2 }}", - "state": "{{ trigger.event.data.beer }}", - }, - ], - }, - ], - }, - ) - - await hass.async_block_till_done() - - # Sensors are unknown if never triggered - state = hass.states.get("sensor.test_sensor") - assert state is not None - assert state.state == STATE_UNKNOWN - - hass.bus.async_fire("test_event", {"beer": 2}) - await hass.async_block_till_done() - - state = hass.states.get("sensor.test_sensor") - assert state.state == "2" - - assert ( - "Error rendering availability template for sensor.test_sensor: UndefinedError: 'what_the_heck' is undefined" - in caplog.text - ) - - -async def test_trigger_available_with_attribute_syntax_error( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test trigger entity is available when attributes have syntax errors.""" - assert await async_setup_component( - hass, - "template", - { - "template": [ - { - "trigger": {"platform": "event", "event_type": "test_event"}, - "sensor": [ - { - "name": "Test Sensor", - "availability": "{{ trigger and trigger.event.data.beer == 2 }}", - "state": "{{ trigger.event.data.beer }}", - "attributes": { - "beer": "{{ trigger.event.data.beer }}", - "no_beer": "{{ sad - 1 }}", - "more_beer": "{{ beer + 1 }}", - }, - }, - ], - }, - ], - }, - ) - - await hass.async_block_till_done() - - # Sensors are unknown if never triggered - state = hass.states.get("sensor.test_sensor") - assert state is not None - assert state.state == STATE_UNKNOWN - - hass.bus.async_fire("test_event", {"beer": 2}) - await hass.async_block_till_done() - - state = hass.states.get("sensor.test_sensor") - assert state.state == "2" - - assert state.attributes["beer"] == 2 - assert "no_beer" not in state.attributes - assert ( - "Error rendering attributes.no_beer template for sensor.test_sensor: UndefinedError: 'sad' is undefined" - in caplog.text - ) - assert state.attributes["more_beer"] == 3 - - async def test_trigger_attribute_order( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: diff --git a/tests/components/transmission/conftest.py b/tests/components/transmission/conftest.py index 0390981db92354..2adb1bf67b2922 100644 --- a/tests/components/transmission/conftest.py +++ b/tests/components/transmission/conftest.py @@ -87,16 +87,15 @@ def _create_mock_torrent( torrent_data = { "id": torrent_id, "name": name, - "percentDone": percent_done, "status": status, - "rateDownload": 0, - "rateUpload": 0, - "downloadDir": download_dir, + "percentDone": percent_done, + "uploadRatio": ratio, + "ratio": ratio, "eta": eta, "addedDate": int(added_date.timestamp()), - "uploadRatio": ratio, - "error": 0, - "errorString": "", + "doneDate": int(added_date.timestamp()) if percent_done >= 1.0 else 0, + "downloadDir": download_dir, + "labels": [], } return Torrent(fields=torrent_data) diff --git a/tests/components/transmission/test_services.py b/tests/components/transmission/test_services.py index 52ff3e2aaef9de..d7868bdd824c15 100644 --- a/tests/components/transmission/test_services.py +++ b/tests/components/transmission/test_services.py @@ -8,9 +8,12 @@ ATTR_DELETE_DATA, ATTR_DOWNLOAD_PATH, ATTR_TORRENT, + ATTR_TORRENT_FILTER, + ATTR_TORRENTS, CONF_ENTRY_ID, DOMAIN, SERVICE_ADD_TORRENT, + SERVICE_GET_TORRENTS, SERVICE_REMOVE_TORRENT, SERVICE_START_TORRENT, SERVICE_STOP_TORRENT, @@ -252,3 +255,69 @@ async def test_remove_torrent_service_with_delete_data( ) client.remove_torrent.assert_called_once_with(789, delete_data=True) + + +@pytest.mark.parametrize( + ("filter_mode", "expected_statuses", "expected_torrents"), + [ + ("all", ["seeding", "downloading", "stopped"], [1, 2, 3]), + ("started", ["downloading"], [1]), + ("completed", ["seeding"], [2]), + ("paused", ["stopped"], [3]), + ("active", ["seeding", "downloading"], [1, 2]), + ], +) +async def test_get_torrents_service( + hass: HomeAssistant, + mock_transmission_client: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_torrent, + filter_mode: str, + expected_statuses: list[str], + expected_torrents: list[int], +) -> None: + """Test get torrents service with various filter modes.""" + client = mock_transmission_client.return_value + + downloading_torrent = mock_torrent(torrent_id=1, name="Downloading", status=4) + seeding_torrent = mock_torrent(torrent_id=2, name="Seeding", status=6) + stopped_torrent = mock_torrent(torrent_id=3, name="Stopped", status=0) + + client.get_torrents.return_value = [ + downloading_torrent, + seeding_torrent, + stopped_torrent, + ] + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_TORRENTS, + { + CONF_ENTRY_ID: mock_config_entry.entry_id, + ATTR_TORRENT_FILTER: filter_mode, + }, + blocking=True, + return_response=True, + ) + + assert response is not None + assert ATTR_TORRENTS in response + torrents = response[ATTR_TORRENTS] + assert isinstance(torrents, dict) + + assert len(torrents) == len(expected_statuses) + + for torrent_name, torrent_data in torrents.items(): + assert isinstance(torrent_data, dict) + assert "id" in torrent_data + assert "name" in torrent_data + assert "status" in torrent_data + assert torrent_data["name"] == torrent_name + assert torrent_data["id"] in expected_torrents + expected_torrents.remove(int(torrent_data["id"])) + + assert len(expected_torrents) == 0 diff --git a/tests/components/unifiprotect/__init__.py b/tests/components/unifiprotect/__init__.py index cc59bca3506168..51a7e9af177859 100644 --- a/tests/components/unifiprotect/__init__.py +++ b/tests/components/unifiprotect/__init__.py @@ -1,9 +1,12 @@ """Tests for the UniFi Protect integration.""" +from collections.abc import Generator from contextlib import contextmanager -from unittest.mock import AsyncMock, MagicMock, patch +from typing import Any +from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest +from uiprotect.data.base import ProtectModel from unifi_discovery import AIOUnifiScanner, UnifiDevice, UnifiService DEVICE_HOSTNAME = "unvr" @@ -46,3 +49,26 @@ def _patcher(): yield return _patcher() + + +@contextmanager +def patch_ufp_method( + obj: ProtectModel, method: str, *args: Any, **kwargs: Any +) -> Generator[MagicMock]: + """Patch a method on a UniFi Protect pydantic model. + + Pydantic models have frozen fields that cannot be directly patched. + This context manager temporarily modifies the field descriptor to allow + patching. + + Note: The field modification is intentionally not restored, as test fixtures + create fresh model instances for each test. + + Usage: + with patch_ufp_method(doorbell, "set_lcd_text", new_callable=AsyncMock) as mock: + await hass.services.async_call(...) + mock.assert_called_once_with(...) + """ + obj.__pydantic_fields__[method] = Mock(final=False, frozen=False) + with patch.object(obj, method, *args, **kwargs) as mock_method: + yield mock_method diff --git a/tests/components/unifiprotect/test_camera.py b/tests/components/unifiprotect/test_camera.py index 717f2c3a3921f6..e76de9ea1514f7 100644 --- a/tests/components/unifiprotect/test_camera.py +++ b/tests/components/unifiprotect/test_camera.py @@ -44,6 +44,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component +from . import patch_ufp_method from .utils import ( Camera, MockUFPFixture, @@ -615,14 +616,14 @@ async def test_camera_motion_detection( assert_entity_counts(hass, Platform.CAMERA, 2, 1) entity_id = "camera.test_camera_high_resolution_channel" - camera.__pydantic_fields__["set_motion_detection"] = Mock(final=False, frozen=False) - camera.set_motion_detection = AsyncMock() - - await hass.services.async_call( - "camera", - service, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - camera.set_motion_detection.assert_called_once_with(expected_value) + with patch_ufp_method( + camera, "set_motion_detection", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + "camera", + service, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + mock_method.assert_called_once_with(expected_value) diff --git a/tests/components/unifiprotect/test_lock.py b/tests/components/unifiprotect/test_lock.py index 9095c092ea2148..94fecb36da25ef 100644 --- a/tests/components/unifiprotect/test_lock.py +++ b/tests/components/unifiprotect/test_lock.py @@ -17,6 +17,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from . import patch_ufp_method from .utils import ( MockUFPFixture, adopt_devices, @@ -210,17 +211,17 @@ async def test_lock_do_lock( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - doorlock.__pydantic_fields__["close_lock"] = Mock(final=False, frozen=False) - doorlock.close_lock = AsyncMock() + with patch_ufp_method( + doorlock, "close_lock", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + "lock", + "lock", + {ATTR_ENTITY_ID: "lock.test_lock_lock"}, + blocking=True, + ) - await hass.services.async_call( - "lock", - "lock", - {ATTR_ENTITY_ID: "lock.test_lock_lock"}, - blocking=True, - ) - - doorlock.close_lock.assert_called_once() + mock_method.assert_called_once() async def test_lock_do_unlock( @@ -245,14 +246,12 @@ async def test_lock_do_unlock( ufp.ws_msg(mock_msg) await hass.async_block_till_done() - doorlock.__pydantic_fields__["open_lock"] = Mock(final=False, frozen=False) - new_lock.open_lock = AsyncMock() - - await hass.services.async_call( - "lock", - "unlock", - {ATTR_ENTITY_ID: "lock.test_lock_lock"}, - blocking=True, - ) + with patch_ufp_method(new_lock, "open_lock", new_callable=AsyncMock) as mock_method: + await hass.services.async_call( + "lock", + "unlock", + {ATTR_ENTITY_ID: "lock.test_lock_lock"}, + blocking=True, + ) - new_lock.open_lock.assert_called_once() + mock_method.assert_called_once() diff --git a/tests/components/unifiprotect/test_media_player.py b/tests/components/unifiprotect/test_media_player.py index 0d5e8d4ed4bb61..f163687a2c682d 100644 --- a/tests/components/unifiprotect/test_media_player.py +++ b/tests/components/unifiprotect/test_media_player.py @@ -25,6 +25,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +from . import patch_ufp_method from .utils import ( MockUFPFixture, adopt_devices, @@ -116,17 +117,17 @@ async def test_media_player_set_volume( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__pydantic_fields__["set_speaker_volume"] = Mock(final=False, frozen=False) - doorbell.set_speaker_volume = AsyncMock() - - await hass.services.async_call( - "media_player", - "volume_set", - {ATTR_ENTITY_ID: "media_player.test_camera_speaker", "volume_level": 0.5}, - blocking=True, - ) + with patch_ufp_method( + doorbell, "set_speaker_volume", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + "media_player", + "volume_set", + {ATTR_ENTITY_ID: "media_player.test_camera_speaker", "volume_level": 0.5}, + blocking=True, + ) - doorbell.set_speaker_volume.assert_called_once_with(50) + mock_method.assert_called_once_with(50) async def test_media_player_stop( @@ -173,30 +174,26 @@ async def test_media_player_play( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__pydantic_fields__["stop_audio"] = Mock(final=False, frozen=False) - doorbell.__pydantic_fields__["play_audio"] = Mock(final=False, frozen=False) - doorbell.__pydantic_fields__["wait_until_audio_completes"] = Mock( - final=False, frozen=False - ) - doorbell.stop_audio = AsyncMock() - doorbell.play_audio = AsyncMock() - doorbell.wait_until_audio_completes = AsyncMock() - - await hass.services.async_call( - "media_player", - "play_media", - { - ATTR_ENTITY_ID: "media_player.test_camera_speaker", - "media_content_id": "http://example.com/test.mp3", - "media_content_type": "music", - }, - blocking=True, - ) + with ( + patch_ufp_method(doorbell, "stop_audio", new_callable=AsyncMock), + patch_ufp_method(doorbell, "play_audio", new_callable=AsyncMock) as mock_play, + patch_ufp_method( + doorbell, "wait_until_audio_completes", new_callable=AsyncMock + ) as mock_wait, + ): + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: "media_player.test_camera_speaker", + "media_content_id": "http://example.com/test.mp3", + "media_content_type": "music", + }, + blocking=True, + ) - doorbell.play_audio.assert_called_once_with( - "http://example.com/test.mp3", blocking=False - ) - doorbell.wait_until_audio_completes.assert_called_once() + mock_play.assert_called_once_with("http://example.com/test.mp3", blocking=False) + mock_wait.assert_called_once() async def test_media_player_play_media_source( @@ -210,18 +207,16 @@ async def test_media_player_play_media_source( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__pydantic_fields__["stop_audio"] = Mock(final=False, frozen=False) - doorbell.__pydantic_fields__["play_audio"] = Mock(final=False, frozen=False) - doorbell.__pydantic_fields__["wait_until_audio_completes"] = Mock( - final=False, frozen=False - ) - doorbell.stop_audio = AsyncMock() - doorbell.play_audio = AsyncMock() - doorbell.wait_until_audio_completes = AsyncMock() - - with patch( - "homeassistant.components.media_source.async_resolve_media", - return_value=Mock(url="http://example.com/test.mp3"), + with ( + patch_ufp_method(doorbell, "stop_audio", new_callable=AsyncMock), + patch_ufp_method(doorbell, "play_audio", new_callable=AsyncMock) as mock_play, + patch_ufp_method( + doorbell, "wait_until_audio_completes", new_callable=AsyncMock + ) as mock_wait, + patch( + "homeassistant.components.media_source.async_resolve_media", + return_value=Mock(url="http://example.com/test.mp3"), + ), ): await hass.services.async_call( "media_player", @@ -234,10 +229,8 @@ async def test_media_player_play_media_source( blocking=True, ) - doorbell.play_audio.assert_called_once_with( - "http://example.com/test.mp3", blocking=False - ) - doorbell.wait_until_audio_completes.assert_called_once() + mock_play.assert_called_once_with("http://example.com/test.mp3", blocking=False) + mock_wait.assert_called_once() async def test_media_player_play_invalid( @@ -251,22 +244,22 @@ async def test_media_player_play_invalid( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__pydantic_fields__["play_audio"] = Mock(final=False, frozen=False) - doorbell.play_audio = AsyncMock() + with patch_ufp_method( + doorbell, "play_audio", new_callable=AsyncMock + ) as mock_method: + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: "media_player.test_camera_speaker", + "media_content_id": "/test.png", + "media_content_type": "image", + }, + blocking=True, + ) - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - "media_player", - "play_media", - { - ATTR_ENTITY_ID: "media_player.test_camera_speaker", - "media_content_id": "/test.png", - "media_content_type": "image", - }, - blocking=True, - ) - - assert not doorbell.play_audio.called + assert not mock_method.called async def test_media_player_play_error( @@ -280,24 +273,25 @@ async def test_media_player_play_error( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__pydantic_fields__["play_audio"] = Mock(final=False, frozen=False) - doorbell.__pydantic_fields__["wait_until_audio_completes"] = Mock( - final=False, frozen=False - ) - doorbell.play_audio = AsyncMock(side_effect=StreamError) - doorbell.wait_until_audio_completes = AsyncMock() - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - "media_player", - "play_media", - { - ATTR_ENTITY_ID: "media_player.test_camera_speaker", - "media_content_id": "/test.mp3", - "media_content_type": "music", - }, - blocking=True, - ) - - assert doorbell.play_audio.called - assert not doorbell.wait_until_audio_completes.called + with ( + patch_ufp_method( + doorbell, "play_audio", new_callable=AsyncMock, side_effect=StreamError + ) as mock_play, + patch_ufp_method( + doorbell, "wait_until_audio_completes", new_callable=AsyncMock + ) as mock_wait, + ): + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: "media_player.test_camera_speaker", + "media_content_id": "/test.mp3", + "media_content_type": "music", + }, + blocking=True, + ) + + assert mock_play.called + assert not mock_wait.called diff --git a/tests/components/unifiprotect/test_media_source.py b/tests/components/unifiprotect/test_media_source.py index 8b6746f43719e7..875932e3e83567 100644 --- a/tests/components/unifiprotect/test_media_source.py +++ b/tests/components/unifiprotect/test_media_source.py @@ -21,6 +21,7 @@ from homeassistant.components.unifiprotect.const import DOMAIN from homeassistant.components.unifiprotect.media_source import ( ProtectMediaSource, + SimpleEventType, async_get_media_source, ) from homeassistant.core import HomeAssistant @@ -1041,3 +1042,66 @@ async def test_browse_media_browse_whole_month_december( assert browse.identifier == base_id assert len(browse.children) == 1 assert browse.children[0].identifier == "test_id:event:test_event_id" + + +@pytest.mark.parametrize( + ("year", "month", "expected_days", "expected_end_month", "expected_end_year"), + [ + (2024, 1, 31, 2, 2024), # January + (2024, 2, 29, 3, 2024), # February (leap year) + (2023, 2, 28, 3, 2023), # February (non-leap year) + (2024, 4, 30, 5, 2024), # April + (2024, 12, 31, 1, 2025), # December - critical edge case + ], +) +async def test_build_days_whole_month_date_calculation( + hass: HomeAssistant, + ufp: MockUFPFixture, + year: int, + month: int, + expected_days: int, + expected_end_month: int, + expected_end_year: int, +) -> None: + """Test that whole month date calculation works for all month types. + + This test verifies the monthrange-based date calculation in _build_days, + especially for December which previously used manual year/month increment logic. + """ + # Initialize the integration entry to get ProtectData + await init_entry(hass, ufp, [], regenerate_ids=False) + + # Create a start date for the first day of the month + start = datetime(year=year, month=month, day=1).date() + start_dt = datetime( + year=start.year, + month=start.month, + day=start.day, + hour=0, + minute=0, + second=0, + tzinfo=dt_util.get_default_time_zone(), + ) + + # Verify we got the expected number of days + expected_end = start_dt + timedelta(days=expected_days) + + # Verify it correctly goes to the expected month/year + assert expected_end.month == expected_end_month + assert expected_end.year == expected_end_year + assert expected_end.day == 1 + + # Build the media source with is_all=True (whole month) + source = ProtectMediaSource(hass, {}) + result = await source._build_days( + data=ufp.entry.runtime_data, + camera_id="test_camera", + event_type=SimpleEventType.ALL, + start=start, + is_all=True, + build_children=False, # We only care about the identifier, not children + ) + + # Verify the identifier format is correct + assert result.identifier.endswith(f"range:{year}:{month}:all") + assert "Whole Month" in result.title diff --git a/tests/components/unifiprotect/test_number.py b/tests/components/unifiprotect/test_number.py index 7ec104caf73eb1..d308d0199d7c66 100644 --- a/tests/components/unifiprotect/test_number.py +++ b/tests/components/unifiprotect/test_number.py @@ -3,7 +3,7 @@ from __future__ import annotations from datetime import timedelta -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock import pytest from uiprotect.data import Camera, Doorlock, IRLEDMode, Light @@ -19,6 +19,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from . import patch_ufp_method from .utils import ( MockUFPFixture, adopt_devices, @@ -166,18 +167,21 @@ async def test_number_light_sensitivity( description = LIGHT_NUMBERS[0] assert description.ufp_set_method is not None - light.__pydantic_fields__["set_sensitivity"] = Mock(final=False, frozen=False) - light.set_sensitivity = AsyncMock() - _, entity_id = await ids_from_device_description( hass, Platform.NUMBER, light, description ) - await hass.services.async_call( - "number", "set_value", {ATTR_ENTITY_ID: entity_id, "value": 15.0}, blocking=True - ) + with patch_ufp_method( + light, "set_sensitivity", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + "number", + "set_value", + {ATTR_ENTITY_ID: entity_id, "value": 15.0}, + blocking=True, + ) - light.set_sensitivity.assert_called_once_with(15.0) + mock_method.assert_called_once_with(15.0) async def test_number_light_duration( @@ -190,18 +194,19 @@ async def test_number_light_duration( description = LIGHT_NUMBERS[1] - light.__pydantic_fields__["set_duration"] = Mock(final=False, frozen=False) - light.set_duration = AsyncMock() - _, entity_id = await ids_from_device_description( hass, Platform.NUMBER, light, description ) - await hass.services.async_call( - "number", "set_value", {ATTR_ENTITY_ID: entity_id, "value": 15.0}, blocking=True - ) + with patch_ufp_method(light, "set_duration", new_callable=AsyncMock) as mock_method: + await hass.services.async_call( + "number", + "set_value", + {ATTR_ENTITY_ID: entity_id, "value": 15.0}, + blocking=True, + ) - light.set_duration.assert_called_once_with(timedelta(seconds=15.0)) + mock_method.assert_called_once_with(timedelta(seconds=15.0)) @pytest.mark.parametrize("description", CAMERA_NUMBERS) @@ -221,11 +226,9 @@ async def test_number_camera_simple( hass, Platform.NUMBER, camera_all_features, description ) - camera_all_features.__pydantic_fields__[description.ufp_set_method] = Mock( - final=False, frozen=False - ) - mock_method = AsyncMock() - with patch.object(camera_all_features, description.ufp_set_method, mock_method): + with patch_ufp_method( + camera_all_features, description.ufp_set_method, new_callable=AsyncMock + ) as mock_method: await hass.services.async_call( "number", "set_value", @@ -246,17 +249,18 @@ async def test_number_lock_auto_close( description = DOORLOCK_NUMBERS[0] - doorlock.__pydantic_fields__["set_auto_close_time"] = Mock( - final=False, frozen=False - ) - doorlock.set_auto_close_time = AsyncMock() - _, entity_id = await ids_from_device_description( hass, Platform.NUMBER, doorlock, description ) - await hass.services.async_call( - "number", "set_value", {ATTR_ENTITY_ID: entity_id, "value": 15.0}, blocking=True - ) + with patch_ufp_method( + doorlock, "set_auto_close_time", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + "number", + "set_value", + {ATTR_ENTITY_ID: entity_id, "value": 15.0}, + blocking=True, + ) - doorlock.set_auto_close_time.assert_called_once_with(timedelta(seconds=15.0)) + mock_method.assert_called_once_with(timedelta(seconds=15.0)) diff --git a/tests/components/unifiprotect/test_select.py b/tests/components/unifiprotect/test_select.py index 699144a65fc9db..fdf3b7bb70af95 100644 --- a/tests/components/unifiprotect/test_select.py +++ b/tests/components/unifiprotect/test_select.py @@ -31,6 +31,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from . import patch_ufp_method from .utils import ( MockUFPFixture, adopt_devices, @@ -262,8 +263,6 @@ async def test_select_update_doorbell_settings( expected_length += 1 new_nvr = copy(ufp.api.bootstrap.nvr) - new_nvr.__pydantic_fields__["update_all_messages"] = Mock(final=False, frozen=False) - new_nvr.update_all_messages = Mock() new_nvr.doorbell_settings.all_messages = [ *new_nvr.doorbell_settings.all_messages, @@ -277,11 +276,12 @@ async def test_select_update_doorbell_settings( mock_msg.changed_data = {"doorbell_settings": {}} mock_msg.new_obj = new_nvr - ufp.api.bootstrap.nvr = new_nvr - ufp.ws_msg(mock_msg) - await hass.async_block_till_done() + with patch_ufp_method(new_nvr, "update_all_messages") as mock_method: + ufp.api.bootstrap.nvr = new_nvr + ufp.ws_msg(mock_msg) + await hass.async_block_till_done() - new_nvr.update_all_messages.assert_called_once() + mock_method.assert_called_once() state = hass.states.get(entity_id) assert state @@ -334,19 +334,17 @@ async def test_select_set_option_light_motion( hass, Platform.SELECT, light, LIGHT_SELECTS[0] ) - light.__pydantic_fields__["set_light_settings"] = Mock(final=False, frozen=False) - light.set_light_settings = AsyncMock() - - await hass.services.async_call( - "select", - "select_option", - {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: LIGHT_MODE_OFF}, - blocking=True, - ) + with patch_ufp_method( + light, "set_light_settings", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + "select", + "select_option", + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: LIGHT_MODE_OFF}, + blocking=True, + ) - light.set_light_settings.assert_called_once_with( - LightModeType.MANUAL, enable_at=None - ) + mock_method.assert_called_once_with(LightModeType.MANUAL, enable_at=None) async def test_select_set_option_light_camera( @@ -361,28 +359,28 @@ async def test_select_set_option_light_camera( hass, Platform.SELECT, light, LIGHT_SELECTS[1] ) - light.__pydantic_fields__["set_paired_camera"] = Mock(final=False, frozen=False) - light.set_paired_camera = AsyncMock() - camera = list(light.api.bootstrap.cameras.values())[0] - await hass.services.async_call( - "select", - "select_option", - {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: camera.name}, - blocking=True, - ) + with patch_ufp_method( + light, "set_paired_camera", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + "select", + "select_option", + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: camera.name}, + blocking=True, + ) - light.set_paired_camera.assert_called_once_with(camera) + mock_method.assert_called_once_with(camera) - await hass.services.async_call( - "select", - "select_option", - {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: "Not Paired"}, - blocking=True, - ) + await hass.services.async_call( + "select", + "select_option", + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: "Not Paired"}, + blocking=True, + ) - light.set_paired_camera.assert_called_with(None) + mock_method.assert_called_with(None) async def test_select_set_option_camera_recording( @@ -397,17 +395,17 @@ async def test_select_set_option_camera_recording( hass, Platform.SELECT, doorbell, CAMERA_SELECTS[0] ) - doorbell.__pydantic_fields__["set_recording_mode"] = Mock(final=False, frozen=False) - doorbell.set_recording_mode = AsyncMock() - - await hass.services.async_call( - "select", - "select_option", - {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: "never"}, - blocking=True, - ) + with patch_ufp_method( + doorbell, "set_recording_mode", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + "select", + "select_option", + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: "never"}, + blocking=True, + ) - doorbell.set_recording_mode.assert_called_once_with(RecordingMode.NEVER) + mock_method.assert_called_once_with(RecordingMode.NEVER) async def test_select_set_option_camera_ir( @@ -422,17 +420,17 @@ async def test_select_set_option_camera_ir( hass, Platform.SELECT, doorbell, CAMERA_SELECTS[1] ) - doorbell.__pydantic_fields__["set_ir_led_model"] = Mock(final=False, frozen=False) - doorbell.set_ir_led_model = AsyncMock() - - await hass.services.async_call( - "select", - "select_option", - {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: "on"}, - blocking=True, - ) + with patch_ufp_method( + doorbell, "set_ir_led_model", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + "select", + "select_option", + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: "on"}, + blocking=True, + ) - doorbell.set_ir_led_model.assert_called_once_with(IRLEDMode.ON) + mock_method.assert_called_once_with(IRLEDMode.ON) async def test_select_set_option_camera_doorbell_custom( @@ -447,19 +445,19 @@ async def test_select_set_option_camera_doorbell_custom( hass, Platform.SELECT, doorbell, CAMERA_SELECTS[2] ) - doorbell.__pydantic_fields__["set_lcd_text"] = Mock(final=False, frozen=False) - doorbell.set_lcd_text = AsyncMock() - - await hass.services.async_call( - "select", - "select_option", - {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: "Test"}, - blocking=True, - ) + with patch_ufp_method( + doorbell, "set_lcd_text", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + "select", + "select_option", + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: "Test"}, + blocking=True, + ) - doorbell.set_lcd_text.assert_called_once_with( - DoorbellMessageType.CUSTOM_MESSAGE, text="Test" - ) + mock_method.assert_called_once_with( + DoorbellMessageType.CUSTOM_MESSAGE, text="Test" + ) async def test_select_set_option_camera_doorbell_unifi( @@ -474,34 +472,32 @@ async def test_select_set_option_camera_doorbell_unifi( hass, Platform.SELECT, doorbell, CAMERA_SELECTS[2] ) - doorbell.__pydantic_fields__["set_lcd_text"] = Mock(final=False, frozen=False) - doorbell.set_lcd_text = AsyncMock() - - await hass.services.async_call( - "select", - "select_option", - { - ATTR_ENTITY_ID: entity_id, - ATTR_OPTION: "LEAVE PACKAGE AT DOOR", - }, - blocking=True, - ) + with patch_ufp_method( + doorbell, "set_lcd_text", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + "select", + "select_option", + { + ATTR_ENTITY_ID: entity_id, + ATTR_OPTION: "LEAVE PACKAGE AT DOOR", + }, + blocking=True, + ) - doorbell.set_lcd_text.assert_called_once_with( - DoorbellMessageType.LEAVE_PACKAGE_AT_DOOR - ) + mock_method.assert_called_once_with(DoorbellMessageType.LEAVE_PACKAGE_AT_DOOR) - await hass.services.async_call( - "select", - "select_option", - { - ATTR_ENTITY_ID: entity_id, - ATTR_OPTION: "Default Message (Welcome)", - }, - blocking=True, - ) + await hass.services.async_call( + "select", + "select_option", + { + ATTR_ENTITY_ID: entity_id, + ATTR_OPTION: "Default Message (Welcome)", + }, + blocking=True, + ) - doorbell.set_lcd_text.assert_called_with(None) + mock_method.assert_called_with(None) async def test_select_set_option_camera_doorbell_default( @@ -516,20 +512,20 @@ async def test_select_set_option_camera_doorbell_default( hass, Platform.SELECT, doorbell, CAMERA_SELECTS[2] ) - doorbell.__pydantic_fields__["set_lcd_text"] = Mock(final=False, frozen=False) - doorbell.set_lcd_text = AsyncMock() - - await hass.services.async_call( - "select", - "select_option", - { - ATTR_ENTITY_ID: entity_id, - ATTR_OPTION: "Default Message (Welcome)", - }, - blocking=True, - ) + with patch_ufp_method( + doorbell, "set_lcd_text", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + "select", + "select_option", + { + ATTR_ENTITY_ID: entity_id, + ATTR_OPTION: "Default Message (Welcome)", + }, + blocking=True, + ) - doorbell.set_lcd_text.assert_called_once_with(None) + mock_method.assert_called_once_with(None) async def test_select_set_option_viewer( @@ -545,16 +541,16 @@ async def test_select_set_option_viewer( hass, Platform.SELECT, viewer, VIEWER_SELECTS[0] ) - viewer.__pydantic_fields__["set_liveview"] = Mock(final=False, frozen=False) - viewer.set_liveview = AsyncMock() - liveview = list(viewer.api.bootstrap.liveviews.values())[0] - await hass.services.async_call( - "select", - "select_option", - {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: liveview.name}, - blocking=True, - ) + with patch_ufp_method( + viewer, "set_liveview", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + "select", + "select_option", + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: liveview.name}, + blocking=True, + ) - viewer.set_liveview.assert_called_once_with(liveview) + mock_method.assert_called_once_with(liveview) diff --git a/tests/components/unifiprotect/test_services.py b/tests/components/unifiprotect/test_services.py index 8073715dcd1aec..f08e7157b83c88 100644 --- a/tests/components/unifiprotect/test_services.py +++ b/tests/components/unifiprotect/test_services.py @@ -32,6 +32,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er +from . import patch_ufp_method from .utils import MockUFPFixture, init_entry @@ -66,19 +67,18 @@ async def test_global_service_bad_device( """Test global service, invalid device ID.""" nvr = ufp.api.bootstrap.nvr - nvr.__pydantic_fields__["add_custom_doorbell_message"] = Mock( - final=False, frozen=False - ) - nvr.add_custom_doorbell_message = AsyncMock() - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - DOMAIN, - SERVICE_ADD_DOORBELL_TEXT, - {ATTR_DEVICE_ID: "bad_device_id", ATTR_MESSAGE: "Test Message"}, - blocking=True, - ) - assert not nvr.add_custom_doorbell_message.called + with patch_ufp_method( + nvr, "add_custom_doorbell_message", new_callable=AsyncMock + ) as mock_method: + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + DOMAIN, + SERVICE_ADD_DOORBELL_TEXT, + {ATTR_DEVICE_ID: "bad_device_id", ATTR_MESSAGE: "Test Message"}, + blocking=True, + ) + assert not mock_method.called async def test_global_service_exception( @@ -87,19 +87,21 @@ async def test_global_service_exception( """Test global service, unexpected error.""" nvr = ufp.api.bootstrap.nvr - nvr.__pydantic_fields__["add_custom_doorbell_message"] = Mock( - final=False, frozen=False - ) - nvr.add_custom_doorbell_message = AsyncMock(side_effect=BadRequest) - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - DOMAIN, - SERVICE_ADD_DOORBELL_TEXT, - {ATTR_DEVICE_ID: device.id, ATTR_MESSAGE: "Test Message"}, - blocking=True, - ) - assert nvr.add_custom_doorbell_message.called + with patch_ufp_method( + nvr, + "add_custom_doorbell_message", + new_callable=AsyncMock, + side_effect=BadRequest, + ) as mock_method: + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + DOMAIN, + SERVICE_ADD_DOORBELL_TEXT, + {ATTR_DEVICE_ID: device.id, ATTR_MESSAGE: "Test Message"}, + blocking=True, + ) + assert mock_method.called async def test_add_doorbell_text( @@ -108,18 +110,17 @@ async def test_add_doorbell_text( """Test add_doorbell_text service.""" nvr = ufp.api.bootstrap.nvr - nvr.__pydantic_fields__["add_custom_doorbell_message"] = Mock( - final=False, frozen=False - ) - nvr.add_custom_doorbell_message = AsyncMock() - await hass.services.async_call( - DOMAIN, - SERVICE_ADD_DOORBELL_TEXT, - {ATTR_DEVICE_ID: device.id, ATTR_MESSAGE: "Test Message"}, - blocking=True, - ) - nvr.add_custom_doorbell_message.assert_called_once_with("Test Message") + with patch_ufp_method( + nvr, "add_custom_doorbell_message", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + DOMAIN, + SERVICE_ADD_DOORBELL_TEXT, + {ATTR_DEVICE_ID: device.id, ATTR_MESSAGE: "Test Message"}, + blocking=True, + ) + mock_method.assert_called_once_with("Test Message") async def test_remove_doorbell_text( @@ -128,18 +129,17 @@ async def test_remove_doorbell_text( """Test remove_doorbell_text service.""" nvr = ufp.api.bootstrap.nvr - nvr.__pydantic_fields__["remove_custom_doorbell_message"] = Mock( - final=False, frozen=False - ) - nvr.remove_custom_doorbell_message = AsyncMock() - await hass.services.async_call( - DOMAIN, - SERVICE_REMOVE_DOORBELL_TEXT, - {ATTR_DEVICE_ID: subdevice.id, ATTR_MESSAGE: "Test Message"}, - blocking=True, - ) - nvr.remove_custom_doorbell_message.assert_called_once_with("Test Message") + with patch_ufp_method( + nvr, "remove_custom_doorbell_message", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + DOMAIN, + SERVICE_REMOVE_DOORBELL_TEXT, + {ATTR_DEVICE_ID: subdevice.id, ATTR_MESSAGE: "Test Message"}, + blocking=True, + ) + mock_method.assert_called_once_with("Test Message") async def test_add_doorbell_text_disabled_config_entry( @@ -147,24 +147,23 @@ async def test_add_doorbell_text_disabled_config_entry( ) -> None: """Test add_doorbell_text service.""" nvr = ufp.api.bootstrap.nvr - nvr.__pydantic_fields__["add_custom_doorbell_message"] = Mock( - final=False, frozen=False - ) - nvr.add_custom_doorbell_message = AsyncMock() await hass.config_entries.async_set_disabled_by( ufp.entry.entry_id, ConfigEntryDisabler.USER ) await hass.async_block_till_done() - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - DOMAIN, - SERVICE_ADD_DOORBELL_TEXT, - {ATTR_DEVICE_ID: device.id, ATTR_MESSAGE: "Test Message"}, - blocking=True, - ) - assert not nvr.add_custom_doorbell_message.called + with patch_ufp_method( + nvr, "add_custom_doorbell_message", new_callable=AsyncMock + ) as mock_method: + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + DOMAIN, + SERVICE_ADD_DOORBELL_TEXT, + {ATTR_DEVICE_ID: device.id, ATTR_MESSAGE: "Test Message"}, + blocking=True, + ) + assert not mock_method.called async def test_set_chime_paired_doorbells( diff --git a/tests/components/unifiprotect/test_switch.py b/tests/components/unifiprotect/test_switch.py index 0e5efd8a1826d0..852ecb62f7bfca 100644 --- a/tests/components/unifiprotect/test_switch.py +++ b/tests/components/unifiprotect/test_switch.py @@ -2,7 +2,7 @@ from __future__ import annotations -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, Mock import pytest from uiprotect.data import Camera, Light, Permission, RecordingMode, VideoMode @@ -22,6 +22,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +from . import patch_ufp_method from .utils import ( MockUFPFixture, adopt_devices, @@ -90,21 +91,20 @@ async def test_switch_nvr(hass: HomeAssistant, ufp: MockUFPFixture) -> None: assert_entity_counts(hass, Platform.SWITCH, 2, 2) nvr = ufp.api.bootstrap.nvr - nvr.__pydantic_fields__["set_insights"] = Mock(final=False, frozen=False) - nvr.set_insights = AsyncMock() entity_id = "switch.unifiprotect_insights_enabled" - await hass.services.async_call( - "switch", "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + with patch_ufp_method(nvr, "set_insights", new_callable=AsyncMock) as mock_method: + await hass.services.async_call( + "switch", "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) - nvr.set_insights.assert_called_once_with(True) + mock_method.assert_called_once_with(True) - await hass.services.async_call( - "switch", "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + await hass.services.async_call( + "switch", "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) - nvr.set_insights.assert_called_with(False) + mock_method.assert_called_with(False) async def test_switch_setup_no_perm( @@ -267,24 +267,24 @@ async def test_switch_light_status( description = LIGHT_SWITCHES[1] - light.__pydantic_fields__["set_status_light"] = Mock(final=False, frozen=False) - light.set_status_light = AsyncMock() - _, entity_id = await ids_from_device_description( hass, Platform.SWITCH, light, description ) - await hass.services.async_call( - "switch", "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + with patch_ufp_method( + light, "set_status_light", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + "switch", "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) - light.set_status_light.assert_called_once_with(True) + mock_method.assert_called_once_with(True) - await hass.services.async_call( - "switch", "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + await hass.services.async_call( + "switch", "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) - light.set_status_light.assert_called_with(False) + mock_method.assert_called_with(False) async def test_switch_camera_ssh( @@ -297,25 +297,23 @@ async def test_switch_camera_ssh( description = CAMERA_SWITCHES[0] - doorbell.__pydantic_fields__["set_ssh"] = Mock(final=False, frozen=False) - doorbell.set_ssh = AsyncMock() - _, entity_id = await ids_from_device_description( hass, Platform.SWITCH, doorbell, description ) await enable_entity(hass, ufp.entry.entry_id, entity_id) - await hass.services.async_call( - "switch", "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + with patch_ufp_method(doorbell, "set_ssh", new_callable=AsyncMock) as mock_method: + await hass.services.async_call( + "switch", "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) - doorbell.set_ssh.assert_called_once_with(True) + mock_method.assert_called_once_with(True) - await hass.services.async_call( - "switch", "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + await hass.services.async_call( + "switch", "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) - doorbell.set_ssh.assert_called_with(False) + mock_method.assert_called_with(False) @pytest.mark.parametrize("description", CAMERA_SWITCHES_NO_EXTRA) @@ -332,11 +330,9 @@ async def test_switch_camera_simple( assert description.ufp_set_method is not None - doorbell.__pydantic_fields__[description.ufp_set_method] = Mock( - final=False, frozen=False - ) - mock_method = AsyncMock() - with patch.object(doorbell, description.ufp_set_method, mock_method): + with patch_ufp_method( + doorbell, description.ufp_set_method, new_callable=AsyncMock + ) as mock_method: _, entity_id = await ids_from_device_description( hass, Platform.SWITCH, doorbell, description ) @@ -364,24 +360,24 @@ async def test_switch_camera_highfps( description = CAMERA_SWITCHES[3] - doorbell.__pydantic_fields__["set_video_mode"] = Mock(final=False, frozen=False) - doorbell.set_video_mode = AsyncMock() - _, entity_id = await ids_from_device_description( hass, Platform.SWITCH, doorbell, description ) - await hass.services.async_call( - "switch", "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + with patch_ufp_method( + doorbell, "set_video_mode", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + "switch", "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) - doorbell.set_video_mode.assert_called_once_with(VideoMode.HIGH_FPS) + mock_method.assert_called_once_with(VideoMode.HIGH_FPS) - await hass.services.async_call( - "switch", "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + await hass.services.async_call( + "switch", "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) - doorbell.set_video_mode.assert_called_with(VideoMode.DEFAULT) + mock_method.assert_called_with(VideoMode.DEFAULT) async def test_switch_camera_privacy( @@ -397,9 +393,6 @@ async def test_switch_camera_privacy( description = PRIVACY_MODE_SWITCH - doorbell.__pydantic_fields__["set_privacy"] = Mock(final=False, frozen=False) - doorbell.set_privacy = AsyncMock() - _, entity_id = await ids_from_device_description( hass, Platform.SWITCH, doorbell, description ) @@ -409,35 +402,38 @@ async def test_switch_camera_privacy( assert ATTR_PREV_MIC not in state.attributes assert ATTR_PREV_RECORD not in state.attributes - await hass.services.async_call( - "switch", "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + with patch_ufp_method( + doorbell, "set_privacy", new_callable=AsyncMock + ) as mock_set_privacy: + await hass.services.async_call( + "switch", "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) - doorbell.set_privacy.assert_called_with(True, 0, RecordingMode.NEVER) + mock_set_privacy.assert_called_with(True, 0, RecordingMode.NEVER) - new_doorbell = doorbell.model_copy() - new_doorbell.add_privacy_zone() - new_doorbell.mic_volume = 0 - new_doorbell.recording_settings.mode = RecordingMode.NEVER - ufp.api.bootstrap.cameras = {new_doorbell.id: new_doorbell} + new_doorbell = doorbell.model_copy() + new_doorbell.add_privacy_zone() + new_doorbell.mic_volume = 0 + new_doorbell.recording_settings.mode = RecordingMode.NEVER + ufp.api.bootstrap.cameras = {new_doorbell.id: new_doorbell} - mock_msg = Mock() - mock_msg.changed_data = {} - mock_msg.new_obj = new_doorbell - ufp.ws_msg(mock_msg) + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = new_doorbell + ufp.ws_msg(mock_msg) - state = hass.states.get(entity_id) - assert state and state.state == "on" - assert state.attributes[ATTR_PREV_MIC] == previous_mic - assert state.attributes[ATTR_PREV_RECORD] == previous_record.value + state = hass.states.get(entity_id) + assert state and state.state == "on" + assert state.attributes[ATTR_PREV_MIC] == previous_mic + assert state.attributes[ATTR_PREV_RECORD] == previous_record.value - doorbell.set_privacy.reset_mock() + mock_set_privacy.reset_mock() - await hass.services.async_call( - "switch", "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + await hass.services.async_call( + "switch", "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) - doorbell.set_privacy.assert_called_with(False, previous_mic, previous_record) + mock_set_privacy.assert_called_with(False, previous_mic, previous_record) async def test_switch_camera_privacy_already_on( @@ -451,18 +447,18 @@ async def test_switch_camera_privacy_already_on( description = PRIVACY_MODE_SWITCH - doorbell.__pydantic_fields__["set_privacy"] = Mock(final=False, frozen=False) - doorbell.set_privacy = AsyncMock() - _, entity_id = await ids_from_device_description( hass, Platform.SWITCH, doorbell, description ) - await hass.services.async_call( - "switch", "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + with patch_ufp_method( + doorbell, "set_privacy", new_callable=AsyncMock + ) as mock_set_privacy: + await hass.services.async_call( + "switch", "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) - doorbell.set_privacy.assert_called_once_with(False, 100, RecordingMode.ALWAYS) + mock_set_privacy.assert_called_once_with(False, 100, RecordingMode.ALWAYS) async def test_switch_turn_on_client_error( @@ -474,14 +470,19 @@ async def test_switch_turn_on_client_error( description = LIGHT_SWITCHES[1] - light.__pydantic_fields__["set_status_light"] = Mock(final=False, frozen=False) - light.set_status_light = AsyncMock(side_effect=ClientError("Test error")) - _, entity_id = await ids_from_device_description( hass, Platform.SWITCH, light, description ) - with pytest.raises(HomeAssistantError): + with ( + patch_ufp_method( + light, + "set_status_light", + new_callable=AsyncMock, + side_effect=ClientError("Test error"), + ), + pytest.raises(HomeAssistantError), + ): await hass.services.async_call( "switch", "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True ) @@ -496,14 +497,19 @@ async def test_switch_turn_on_not_authorized( description = LIGHT_SWITCHES[1] - light.__pydantic_fields__["set_status_light"] = Mock(final=False, frozen=False) - light.set_status_light = AsyncMock(side_effect=NotAuthorized("Not authorized")) - _, entity_id = await ids_from_device_description( hass, Platform.SWITCH, light, description ) - with pytest.raises(HomeAssistantError): + with ( + patch_ufp_method( + light, + "set_status_light", + new_callable=AsyncMock, + side_effect=NotAuthorized("Not authorized"), + ), + pytest.raises(HomeAssistantError), + ): await hass.services.async_call( "switch", "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True ) diff --git a/tests/components/unifiprotect/test_text.py b/tests/components/unifiprotect/test_text.py index bf9f0502e35a29..37213c3c99df69 100644 --- a/tests/components/unifiprotect/test_text.py +++ b/tests/components/unifiprotect/test_text.py @@ -2,7 +2,7 @@ from __future__ import annotations -from unittest.mock import AsyncMock, Mock +from unittest.mock import AsyncMock from uiprotect.data import Camera, DoorbellMessageType, LCDMessage @@ -12,6 +12,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from . import patch_ufp_method from .utils import ( MockUFPFixture, adopt_devices, @@ -78,16 +79,16 @@ async def test_text_camera_set( hass, Platform.TEXT, doorbell, description ) - doorbell.__pydantic_fields__["set_lcd_text"] = Mock(final=False, frozen=False) - doorbell.set_lcd_text = AsyncMock() - - await hass.services.async_call( - "text", - "set_value", - {ATTR_ENTITY_ID: entity_id, "value": "Test test"}, - blocking=True, - ) - - doorbell.set_lcd_text.assert_called_once_with( - DoorbellMessageType.CUSTOM_MESSAGE, text="Test test" - ) + with patch_ufp_method( + doorbell, "set_lcd_text", new_callable=AsyncMock + ) as mock_method: + await hass.services.async_call( + "text", + "set_value", + {ATTR_ENTITY_ID: entity_id, "value": "Test test"}, + blocking=True, + ) + + mock_method.assert_called_once_with( + DoorbellMessageType.CUSTOM_MESSAGE, text="Test test" + ) diff --git a/tests/components/vesync/test_init.py b/tests/components/vesync/test_init.py index 758ae61858a38d..97d27fe221cd12 100644 --- a/tests/components/vesync/test_init.py +++ b/tests/components/vesync/test_init.py @@ -6,7 +6,6 @@ from pyvesync.utils.errors import VeSyncLoginError from homeassistant.components.vesync import ( - SERVICE_UPDATE_DEVS, async_remove_config_entry_device, async_setup_entry, ) @@ -91,34 +90,6 @@ async def test_async_setup_entry__loads_fans( assert list(hass.data[DOMAIN][VS_MANAGER].devices) == [fan] -async def test_async_new_device_discovery( - hass: HomeAssistant, config_entry: ConfigEntry, manager: VeSync, fan, humidifier -) -> None: - """Test new device discovery.""" - - assert await hass.config_entries.async_setup(config_entry.entry_id) - # Assert platforms loaded - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - assert not hass.data[DOMAIN][VS_MANAGER].devices - - # Mock discovery of new fan which would get added to VS_DEVICES. - manager._dev_list["fans"].append(fan) - await hass.services.async_call(DOMAIN, SERVICE_UPDATE_DEVS, {}, blocking=True) - - assert manager.get_devices.call_count == 1 - assert hass.data[DOMAIN][VS_MANAGER] == manager - assert list(hass.data[DOMAIN][VS_MANAGER].devices) == [fan] - - # Mock discovery of new humidifier which would invoke discovery in all platforms. - manager._dev_list["humidifiers"].append(humidifier) - await hass.services.async_call(DOMAIN, SERVICE_UPDATE_DEVS, {}, blocking=True) - - assert manager.get_devices.call_count == 2 - assert hass.data[DOMAIN][VS_MANAGER] == manager - assert list(hass.data[DOMAIN][VS_MANAGER].devices) == [fan, humidifier] - - async def test_migrate_config_entry( hass: HomeAssistant, switch_old_id_config_entry: MockConfigEntry, diff --git a/tests/components/vesync/test_services.py b/tests/components/vesync/test_services.py new file mode 100644 index 00000000000000..6508a49f311a6a --- /dev/null +++ b/tests/components/vesync/test_services.py @@ -0,0 +1,86 @@ +"""Tests for VeSync services.""" + +from unittest.mock import AsyncMock + +import pytest +from pyvesync import VeSync + +from homeassistant.components.vesync import async_setup +from homeassistant.components.vesync.const import ( + DOMAIN, + SERVICE_UPDATE_DEVS, + VS_MANAGER, +) +from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + + +async def test_async_new_device_discovery_no_entry( + hass: HomeAssistant, +) -> None: + """Service should raise when no config entry exists.""" + + # Ensure the integration is set up so the service is registered + assert await async_setup(hass, {}) + + # No entries for the domain, service should raise + with pytest.raises(ServiceValidationError, match="Entry not found"): + await hass.services.async_call("vesync", SERVICE_UPDATE_DEVS, {}, blocking=True) + + +async def test_async_new_device_discovery_entry_not_loaded( + hass: HomeAssistant, config_entry: ConfigEntry +) -> None: + """Service should raise when entry exists but is not loaded.""" + + # Add a config entry but do not set it up (state is not LOADED) + assert config_entry.state is ConfigEntryState.NOT_LOADED + # Ensure the integration is set up so the service is registered + assert await async_setup(hass, {}) + + with pytest.raises(ServiceValidationError, match="Entry not loaded"): + await hass.services.async_call("vesync", SERVICE_UPDATE_DEVS, {}, blocking=True) + + +async def test_async_new_device_discovery( + hass: HomeAssistant, + config_entry: ConfigEntry, + manager: VeSync, + fan, + entity_registry: er.EntityRegistry, +) -> None: + """Test new device discovery.""" + + # Entry should not be set up yet; we'll install a fan before setup + assert config_entry.state is ConfigEntryState.NOT_LOADED + + # Set up the config entry (no devices initially) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + assert not hass.data[DOMAIN][VS_MANAGER].devices + + # Simulate the manager discovering a new fan when get_devices is called + manager.get_devices = AsyncMock( + side_effect=lambda: manager._dev_list["fans"].append(fan) + ) + + # Call the service that should trigger discovery and platform setup + await hass.services.async_call(DOMAIN, SERVICE_UPDATE_DEVS, {}, blocking=True) + await hass.async_block_till_done() + + assert manager.get_devices.call_count == 1 + + # Verify an entity for the new fan was created in Home Assistant + fan_entry = next( + ( + e + for e in entity_registry.entities.values() + if e.unique_id == fan.cid and e.domain == "fan" + ), + None, + ) + assert fan_entry is not None diff --git a/tests/components/webdav/test_init.py b/tests/components/webdav/test_init.py index 89f0e703b22c17..feaf8ac778fa08 100644 --- a/tests/components/webdav/test_init.py +++ b/tests/components/webdav/test_init.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from aiowebdav2.exceptions import AccessDeniedError, UnauthorizedError, WebDavError +from aiowebdav2.exceptions import AccessDeniedError, UnauthorizedError import pytest from homeassistant.components.webdav.const import CONF_BACKUP_PATH, DOMAIN @@ -15,103 +15,6 @@ from tests.common import MockConfigEntry -async def test_migrate_wrong_path( - hass: HomeAssistant, webdav_client: AsyncMock -) -> None: - """Test migration of wrong encoded folder path.""" - webdav_client.list_with_properties.return_value = [ - {"/wrong%20path": []}, - ] - - config_entry = MockConfigEntry( - title="user@webdav.demo", - domain=DOMAIN, - data={ - CONF_URL: "https://webdav.demo", - CONF_USERNAME: "user", - CONF_PASSWORD: "supersecretpassword", - CONF_BACKUP_PATH: "/wrong path", - }, - entry_id="01JKXV07ASC62D620DGYNG2R8H", - ) - await setup_integration(hass, config_entry) - - webdav_client.move.assert_called_once_with("/wrong%20path", "/wrong path") - - -@pytest.mark.parametrize( - ("expected_path", "remote_path_check"), - [ - ( - "/correct path", - False, - ), # remote_path_check is False as /correct%20path is not there - ("/", True), - ("/folder_with_underscores", True), - ], -) -async def test_migrate_non_wrong_path( - hass: HomeAssistant, - webdav_client: AsyncMock, - expected_path: str, - remote_path_check: bool, -) -> None: - """Test no migration of correct folder path.""" - webdav_client.list_with_properties.return_value = [ - {expected_path: []}, - ] - # first return is used to check the connectivity - # second is used in the migration to determine if wrong quoted path is there - webdav_client.check.side_effect = [True, remote_path_check] - - config_entry = MockConfigEntry( - title="user@webdav.demo", - domain=DOMAIN, - data={ - CONF_URL: "https://webdav.demo", - CONF_USERNAME: "user", - CONF_PASSWORD: "supersecretpassword", - CONF_BACKUP_PATH: expected_path, - }, - entry_id="01JKXV07ASC62D620DGYNG2R8H", - ) - - await setup_integration(hass, config_entry) - - webdav_client.move.assert_not_called() - - -async def test_migrate_error( - hass: HomeAssistant, - webdav_client: AsyncMock, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test migration of wrong encoded folder path with error.""" - webdav_client.list_with_properties.return_value = [ - {"/wrong%20path": []}, - ] - webdav_client.move.side_effect = WebDavError("Failed to move") - - config_entry = MockConfigEntry( - title="user@webdav.demo", - domain=DOMAIN, - data={ - CONF_URL: "https://webdav.demo", - CONF_USERNAME: "user", - CONF_PASSWORD: "supersecretpassword", - CONF_BACKUP_PATH: "/wrong path", - }, - entry_id="01JKXV07ASC62D620DGYNG2R8H", - ) - await setup_integration(hass, config_entry) - - assert config_entry.state is ConfigEntryState.SETUP_RETRY - assert ( - 'Failed to migrate wrong encoded folder "/wrong%20path" to "/wrong path"' - in caplog.text - ) - - @pytest.mark.parametrize( ("error", "expected_message", "expected_state"), [ diff --git a/tests/components/wled/conftest.py b/tests/components/wled/conftest.py index 301729843a2245..32acd40c853420 100644 --- a/tests/components/wled/conftest.py +++ b/tests/components/wled/conftest.py @@ -22,6 +22,7 @@ def mock_config_entry() -> MockConfigEntry: domain=DOMAIN, data={CONF_HOST: "192.168.1.123"}, unique_id="aabbccddeeff", + minor_version=2, ) diff --git a/tests/components/wled/test_config_flow.py b/tests/components/wled/test_config_flow.py index fe435135774eb6..40758771bb14f4 100644 --- a/tests/components/wled/test_config_flow.py +++ b/tests/components/wled/test_config_flow.py @@ -292,12 +292,15 @@ async def test_zeroconf_unsupported_version_error( @pytest.mark.usefixtures("mock_wled") +@pytest.mark.parametrize("device_mac", ["aabbccddeeff", "AABBCCDDEEFF"]) async def test_user_device_exists_abort( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_wled: MagicMock, + device_mac: str, ) -> None: """Test we abort zeroconf flow if WLED device already configured.""" + mock_wled.update.return_value.info.mac_address = device_mac mock_config_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, @@ -334,10 +337,12 @@ async def test_zeroconf_without_mac_device_exists_abort( assert result.get("reason") == "already_configured" +@pytest.mark.parametrize("device_mac", ["aabbccddeeff", "AABBCCDDEEFF"]) async def test_zeroconf_with_mac_device_exists_abort( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_wled: MagicMock, + device_mac: str, ) -> None: """Test we abort zeroconf flow if WLED device already configured.""" mock_config_entry.add_to_hass(hass) @@ -350,7 +355,7 @@ async def test_zeroconf_with_mac_device_exists_abort( hostname="example.local.", name="mock_name", port=None, - properties={CONF_MAC: "aabbccddeeff"}, + properties={CONF_MAC: device_mac}, type="mock_type", ), ) diff --git a/tests/components/wled/test_init.py b/tests/components/wled/test_init.py index 9dfcabd55e3f8f..25d2ee3530d2c8 100644 --- a/tests/components/wled/test_init.py +++ b/tests/components/wled/test_init.py @@ -7,7 +7,9 @@ import pytest from wled import WLEDConnectionError -from homeassistant.config_entries import ConfigEntryState +from homeassistant.components.wled.const import DOMAIN +from homeassistant.config_entries import SOURCE_IGNORE, ConfigEntryState +from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -61,9 +63,151 @@ async def test_config_entry_not_ready( assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY -async def test_setting_unique_id( - hass: HomeAssistant, init_integration: MockConfigEntry +@pytest.fixture +def config_entry_v1() -> MockConfigEntry: + """Return a WLED config entry at version 1.0 with a specific MAC.""" + return MockConfigEntry( + domain=DOMAIN, + data={CONF_HOST: "192.168.1.123"}, + unique_id="AABBCCDDEEFF", + minor_version=1, + ) + + +@pytest.mark.usefixtures("mock_setup_entry", "mock_wled") +async def test_migrate_entry_future_version_is_downgrade( + hass: HomeAssistant, +) -> None: + """Return False when user downgraded from a future version.""" + entry = MockConfigEntry( + domain=DOMAIN, + title="WLED Future", + unique_id="AABBCCDDEEFF", + version=2, + minor_version=0, + data={CONF_HOST: "wled.local"}, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert result is False + assert entry.state == ConfigEntryState.MIGRATION_ERROR + assert entry.version == 2 + assert entry.minor_version == 0 + assert entry.unique_id == "AABBCCDDEEFF" + + +@pytest.mark.usefixtures("mock_setup_entry", "mock_wled") +async def test_migrate_entry_v1_to_1_2_no_duplicates( + hass: HomeAssistant, config_entry_v1: MockConfigEntry +) -> None: + """Migrate from 1.x to 1.2 when there are no other entries with same MAC.""" + config_entry_v1.add_to_hass(hass) + + result = await hass.config_entries.async_setup(config_entry_v1.entry_id) + await hass.async_block_till_done() + + assert result is True + assert config_entry_v1.state == ConfigEntryState.LOADED + assert config_entry_v1.version == 1 + assert config_entry_v1.minor_version == 2 + assert config_entry_v1.unique_id == "aabbccddeeff" + + +@pytest.mark.usefixtures("mock_setup_entry", "mock_wled") +async def test_migrate_entry_v1_with_ignored_duplicates( + hass: HomeAssistant, config_entry_v1: MockConfigEntry ) -> None: - """Test we set unique ID if not set yet.""" - assert init_integration.runtime_data - assert init_integration.unique_id == "aabbccddeeff" + """Remove ignored entries with the same MAC and then migrate.""" + config_entry_v1.add_to_hass(hass) + + ignored_1 = MockConfigEntry( + domain=DOMAIN, + title="Ignored 1", + unique_id="aabbccddeeff", + source=SOURCE_IGNORE, + version=1, + minor_version=0, + data={"host": "wled-ignored-1.local"}, + ) + ignored_2 = MockConfigEntry( + domain=DOMAIN, + title="Ignored 2", + unique_id="aabbccddeeff", + source=SOURCE_IGNORE, + version=1, + minor_version=0, + data={"host": "wled-ignored-2.local"}, + ) + + ignored_1.add_to_hass(hass) + ignored_2.add_to_hass(hass) + + result = await hass.config_entries.async_setup(config_entry_v1.entry_id) + await hass.async_block_till_done() + + assert result is True + assert config_entry_v1.state == ConfigEntryState.LOADED + assert config_entry_v1.version == 1 + assert config_entry_v1.minor_version == 2 + assert config_entry_v1.unique_id == "aabbccddeeff" + + assert ignored_1.state is ConfigEntryState.NOT_LOADED + assert ignored_2.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.usefixtures("mock_setup_entry", "mock_wled") +async def test_migrate_entry_v1_with_non_ignored_duplicate_aborts( + hass: HomeAssistant, + config_entry_v1: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Abort migration when there is another non-ignored entry with the same MAC.""" + config_entry_v1.add_to_hass(hass) + + duplicate_active = MockConfigEntry( + domain=DOMAIN, + title="Active duplicate", + unique_id="aabbccddeeff", + version=1, + minor_version=0, + data={"host": "wled-duplicate.local"}, + ) + duplicate_active.add_to_hass(hass) + + result = await hass.config_entries.async_setup(config_entry_v1.entry_id) + await hass.async_block_till_done() + + assert result is False + assert config_entry_v1.state == ConfigEntryState.MIGRATION_ERROR + assert config_entry_v1.version == 1 + assert config_entry_v1.minor_version == 1 + assert config_entry_v1.unique_id == "AABBCCDDEEFF" + assert "multiple WLED config entries with the same MAC address" in caplog.text + + +@pytest.mark.usefixtures("mock_setup_entry", "mock_wled") +async def test_migrate_entry_already_at_1_2_is_noop( + hass: HomeAssistant, +) -> None: + """Do nothing when entry is already at version 1.2.""" + entry = MockConfigEntry( + domain=DOMAIN, + title="WLED Already 1.2", + unique_id="aabbccddeeff", + version=1, + minor_version=2, + data={"host": "wled.local"}, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert result is True + assert entry.state == ConfigEntryState.LOADED + assert entry.version == 1 + assert entry.minor_version == 2 + assert entry.unique_id == "aabbccddeeff" diff --git a/tests/test_requirements.py b/tests/test_requirements.py index 9fcb84beec61bc..bb44f9df41a37a 100644 --- a/tests/test_requirements.py +++ b/tests/test_requirements.py @@ -12,6 +12,7 @@ from homeassistant.loader import async_get_integration from homeassistant.requirements import ( CONSTRAINT_FILE, + DEPRECATED_PACKAGES, RequirementsNotFound, _async_get_manager, async_clear_install_history, @@ -657,3 +658,62 @@ async def test_discovery_requirements_dhcp(hass: HomeAssistant) -> None: assert len(mock_process.mock_calls) == 2 # dhcp does not depend on http assert mock_process.mock_calls[0][1][1] == dhcp.requirements + + +@pytest.mark.parametrize( + ("requirement", "is_built_in", "deprecation_info"), + [ + ( + "hello", + True, + "which is deprecated for testing. This will stop working in Home Assistant" + " 2020.12, please create a bug report at https://github.com/home-assistant/" + "core/issues?q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+test_component%22", + ), + ( + "hello>=1.0.0", + False, + "which is deprecated for testing. This will stop working in Home Assistant" + " 2020.12, please create a bug report at https://github.com/home-assistant/" + "core/issues?q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+test_component%22", + ), + ( + "pyserial-asyncio", + False, + "which should be replaced by pyserial-asyncio-fast. This will stop" + " working in Home Assistant 2026.7, please create a bug report at " + "https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue+" + "label%3A%22integration%3A+test_component%22", + ), + ( + "pyserial-asyncio>=0.6", + True, + "which should be replaced by pyserial-asyncio-fast. This will stop" + " working in Home Assistant 2026.7, please create a bug report at " + "https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue+" + "label%3A%22integration%3A+test_component%22", + ), + ], +) +async def test_install_deprecated_package( + hass: HomeAssistant, + requirement: str, + is_built_in: bool, + deprecation_info: str, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test installation of a deprecated package.""" + with ( + patch.dict( + DEPRECATED_PACKAGES, {"hello": ("is deprecated for testing", "2020.12")} + ), + patch("homeassistant.util.package.install_package", return_value=True), + ): + await async_process_requirements( + hass, "test_component", [requirement], is_built_in + ) + + assert ( + f"Detected that {'' if is_built_in else 'custom '}integration " + f"'test_component' has requirement '{requirement}' {deprecation_info}" + ) in caplog.text diff --git a/tests/util/test_unit_conversion.py b/tests/util/test_unit_conversion.py index 8a3ce0939358d8..7351ef60d3ac0b 100644 --- a/tests/util/test_unit_conversion.py +++ b/tests/util/test_unit_conversion.py @@ -1264,6 +1264,58 @@ def test_unit_conversion_factory_allow_none_with_none() -> None: )(None) is None ) + assert ( + EnergyDistanceConverter.converter_factory_allow_none( + UnitOfEnergyDistance.MILES_PER_KILO_WATT_HOUR, + UnitOfEnergyDistance.KILO_WATT_HOUR_PER_100_KM, + )(0) + is None + ) + assert ( + EnergyDistanceConverter.converter_factory_allow_none( + UnitOfEnergyDistance.KILO_WATT_HOUR_PER_100_KM, + UnitOfEnergyDistance.WATT_HOUR_PER_KM, + )(0) + == 0 + ) + assert ( + EnergyDistanceConverter.converter_factory_allow_none( + UnitOfEnergyDistance.KM_PER_KILO_WATT_HOUR, + UnitOfEnergyDistance.MILES_PER_KILO_WATT_HOUR, + )(0.0) + == 0.0 + ) + assert ( + EnergyDistanceConverter.converter_factory_allow_none( + UnitOfEnergyDistance.MILES_PER_KILO_WATT_HOUR, + UnitOfEnergyDistance.KM_PER_KILO_WATT_HOUR, + )(0) + == 0.0 + ) + + +def test_unit_conversion_factory_allow_none_with_zero_for_inverse_units() -> None: + """Test converter_factory_allow_none returns None for zero with inverse units.""" + # Test EnergyDistanceConverter with inverse units (kWh/100km <-> km/kWh) + assert ( + EnergyDistanceConverter.converter_factory_allow_none( + UnitOfEnergyDistance.KILO_WATT_HOUR_PER_100_KM, + UnitOfEnergyDistance.KM_PER_KILO_WATT_HOUR, + )(0) + is None + ) + assert ( + EnergyDistanceConverter.converter_factory_allow_none( + UnitOfEnergyDistance.KM_PER_KILO_WATT_HOUR, + UnitOfEnergyDistance.KILO_WATT_HOUR_PER_100_KM, + )(0) + is None + ) + # Test with non-zero value to ensure normal conversion still works + assert EnergyDistanceConverter.converter_factory_allow_none( + UnitOfEnergyDistance.KILO_WATT_HOUR_PER_100_KM, + UnitOfEnergyDistance.KM_PER_KILO_WATT_HOUR, + )(25) == pytest.approx(4) @pytest.mark.parametrize(