Switch formatting from black to ruff-format (#102893)

Co-authored-by: Franck Nijhof <git@frenck.dev>
This commit is contained in:
Aarni Koskela 2023-11-27 15:38:59 +02:00 committed by GitHub
parent cf9b0e804f
commit 706add4a57
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
161 changed files with 530 additions and 607 deletions

View File

@ -10,7 +10,7 @@
"customizations": {
"vscode": {
"extensions": [
"ms-python.black-formatter",
"charliermarsh.ruff",
"ms-python.pylint",
"ms-python.vscode-pylance",
"visualstudioexptteam.vscodeintellicode",
@ -39,7 +39,10 @@
"!include_dir_list scalar",
"!include_dir_merge_list scalar",
"!include_dir_merge_named scalar"
]
],
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff"
}
}
}
}

View File

@ -60,7 +60,7 @@
- [ ] There is no commented out code in this PR.
- [ ] I have followed the [development checklist][dev-checklist]
- [ ] I have followed the [perfect PR recommendations][perfect-pr]
- [ ] The code has been formatted using Black (`black --fast homeassistant tests`)
- [ ] The code has been formatted using Ruff (`ruff format homeassistant tests`)
- [ ] Tests have been added to verify that the new code works.
If user exposed functionality or configuration variables are added/changed:

View File

@ -36,7 +36,6 @@ env:
CACHE_VERSION: 5
PIP_CACHE_VERSION: 4
MYPY_CACHE_VERSION: 6
BLACK_CACHE_VERSION: 1
HA_SHORT_VERSION: "2023.12"
DEFAULT_PYTHON: "3.11"
ALL_PYTHON_VERSIONS: "['3.11', '3.12']"
@ -58,7 +57,6 @@ env:
POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']"
PRE_COMMIT_CACHE: ~/.cache/pre-commit
PIP_CACHE: /tmp/pip-cache
BLACK_CACHE: /tmp/black-cache
SQLALCHEMY_WARN_20: 1
PYTHONASYNCIODEBUG: 1
HASS_CI: 1
@ -261,8 +259,8 @@ jobs:
. venv/bin/activate
pre-commit install-hooks
lint-black:
name: Check black
lint-ruff-format:
name: Check ruff-format
runs-on: ubuntu-22.04
needs:
- info
@ -276,13 +274,6 @@ jobs:
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
- name: Generate partial black restore key
id: generate-black-key
run: |
black_version=$(cat requirements_test_pre_commit.txt | grep black | cut -d '=' -f 3)
echo "version=$black_version" >> $GITHUB_OUTPUT
echo "key=black-${{ env.BLACK_CACHE_VERSION }}-$black_version-${{
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v3.3.2
@ -301,33 +292,17 @@ jobs:
key: >-
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.pre-commit_cache_key }}
- name: Restore black cache
uses: actions/cache@v3.3.2
with:
path: ${{ env.BLACK_CACHE }}
key: >-
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
steps.generate-black-key.outputs.key }}
restore-keys: |
${{ runner.os }}-${{ steps.python.outputs.python-version }}-black-${{
env.BLACK_CACHE_VERSION }}-${{ steps.generate-black-key.outputs.version }}-${{
env.HA_SHORT_VERSION }}-
- name: Run black (fully)
if: needs.info.outputs.test_full_suite == 'true'
env:
BLACK_CACHE_DIR: ${{ env.BLACK_CACHE }}
- name: Run ruff-format (fully)
run: |
. venv/bin/activate
pre-commit run --hook-stage manual black --all-files --show-diff-on-failure
- name: Run black (partially)
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
- name: Run ruff-format (partially)
if: needs.info.outputs.test_full_suite == 'false'
shell: bash
env:
BLACK_CACHE_DIR: ${{ env.BLACK_CACHE }}
run: |
. venv/bin/activate
shopt -s globstar
pre-commit run --hook-stage manual black --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*} --show-diff-on-failure
pre-commit run --hook-stage manual ruff-format --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*} --show-diff-on-failure
lint-ruff:
name: Check ruff

View File

@ -1,16 +1,11 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.1
rev: v0.1.6
hooks:
- id: ruff
args:
- --fix
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 23.11.0
hooks:
- id: black
args:
- --quiet
- id: ruff-format
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.py$
- repo: https://github.com/codespell-project/codespell
rev: v2.2.2

View File

@ -1,3 +1,7 @@
{
"recommendations": ["esbenp.prettier-vscode", "ms-python.python"]
"recommendations": [
"charliermarsh.ruff",
"esbenp.prettier-vscode",
"ms-python.python"
]
}

View File

@ -5,8 +5,7 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# Uninstall pre-installed formatting and linting tools
# They would conflict with our pinned versions
RUN \
pipx uninstall black \
&& pipx uninstall pydocstyle \
pipx uninstall pydocstyle \
&& pipx uninstall pycodestyle \
&& pipx uninstall mypy \
&& pipx uninstall pylint

View File

@ -5,9 +5,7 @@ from collections.abc import Mapping
ValueType = (
# Example: entities.all = { read: true, control: true }
Mapping[str, bool]
| bool
| None
Mapping[str, bool] | bool | None
)
# Example: entities.domains = { light: … }

View File

@ -1315,9 +1315,9 @@ class PipelineInput:
if stt_audio_buffer:
# Send audio in the buffer first to speech-to-text, then move on to stt_stream.
# This is basically an async itertools.chain.
async def buffer_then_audio_stream() -> AsyncGenerator[
ProcessedAudioChunk, None
]:
async def buffer_then_audio_stream() -> (
AsyncGenerator[ProcessedAudioChunk, None]
):
# Buffered audio
for chunk in stt_audio_buffer:
yield chunk

View File

@ -417,8 +417,7 @@ async def websocket_device_capture(
# single sample (16 bits) per queue item.
max_queue_items = (
# +1 for None to signal end
int(math.ceil(timeout_seconds * CAPTURE_RATE))
+ 1
int(math.ceil(timeout_seconds * CAPTURE_RATE)) + 1
)
audio_queue = DeviceAudioQueue(queue=asyncio.Queue(maxsize=max_queue_items))

View File

@ -44,7 +44,8 @@ SELECT_TYPES: dict[str, BMWSelectEntityDescription] = {
translation_key="ac_limit",
is_available=lambda v: v.is_remote_set_ac_limit_enabled,
dynamic_options=lambda v: [
str(lim) for lim in v.charging_profile.ac_available_limits # type: ignore[union-attr]
str(lim)
for lim in v.charging_profile.ac_available_limits # type: ignore[union-attr]
],
current_option=lambda v: str(v.charging_profile.ac_current_limit), # type: ignore[union-attr]
remote_service=lambda v, o: v.remote_services.trigger_charging_settings_update(

View File

@ -140,7 +140,7 @@ def _ws_handle_cloud_errors(
handler: Callable[
[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]],
Coroutine[None, None, None],
]
],
) -> Callable[
[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]],
Coroutine[None, None, None],
@ -362,8 +362,11 @@ def _require_cloud_login(
handler: Callable[
[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]],
None,
]
) -> Callable[[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]], None,]:
],
) -> Callable[
[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]],
None,
]:
"""Websocket decorator that requires cloud to be logged in."""
@wraps(handler)

View File

@ -129,9 +129,8 @@ class DeconzDevice(DeconzBase[_DeviceT], Entity):
if self.gateway.ignore_state_updates:
return
if (
self._update_keys is not None
and not self._device.changed_keys.intersection(self._update_keys)
if self._update_keys is not None and not self._device.changed_keys.intersection(
self._update_keys
):
return

View File

@ -63,7 +63,8 @@ async def async_setup_entry( # noqa: C901
)
await device.async_connect(session_instance=async_client)
device.password = entry.data.get(
CONF_PASSWORD, "" # This key was added in HA Core 2022.6
CONF_PASSWORD,
"", # This key was added in HA Core 2022.6
)
except DeviceNotFound as err:
raise ConfigEntryNotReady(

View File

@ -453,10 +453,9 @@ class DlnaDmrEntity(MediaPlayerEntity):
for state_variable in state_variables:
# Force a state refresh when player begins or pauses playback
# to update the position info.
if (
state_variable.name == "TransportState"
and state_variable.value
in (TransportState.PLAYING, TransportState.PAUSED_PLAYBACK)
if state_variable.name == "TransportState" and state_variable.value in (
TransportState.PLAYING,
TransportState.PAUSED_PLAYBACK,
):
force_refresh = True

View File

@ -441,9 +441,7 @@ async def async_setup_entry(
description,
entry,
telegram,
*device_class_and_uom(
telegram, description
), # type: ignore[arg-type]
*device_class_and_uom(telegram, description), # type: ignore[arg-type]
)
for description in all_sensors
if (

View File

@ -18,13 +18,11 @@ from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
_COMMAND_BY_MOTION_STATUS = (
{ # Maps the stop command to use for every cover motion status
CoverStatus.DOWN: CoverCommand.DOWN,
CoverStatus.UP: CoverCommand.UP,
CoverStatus.IDLE: None,
}
)
_COMMAND_BY_MOTION_STATUS = { # Maps the stop command to use for every cover motion status
CoverStatus.DOWN: CoverCommand.DOWN,
CoverStatus.UP: CoverCommand.UP,
CoverStatus.IDLE: None,
}
async def async_setup_entry(

View File

@ -14,9 +14,7 @@ class EsphomeEnumMapper(Generic[_EnumT, _ValT]):
def __init__(self, mapping: dict[_EnumT, _ValT]) -> None:
"""Construct a EsphomeEnumMapper."""
# Add none mapping
augmented_mapping: dict[
_EnumT | None, _ValT | None
] = mapping # type: ignore[assignment]
augmented_mapping: dict[_EnumT | None, _ValT | None] = mapping # type: ignore[assignment]
augmented_mapping[None] = None
self._mapping = augmented_mapping

View File

@ -117,7 +117,8 @@ class EsphomeFan(EsphomeEntity[FanInfo, FanState], FanEntity):
"""Return the current speed percentage."""
if not self._supports_speed_levels:
return ordered_list_item_to_percentage(
ORDERED_NAMED_FAN_SPEEDS, self._state.speed # type: ignore[misc]
ORDERED_NAMED_FAN_SPEEDS,
self._state.speed, # type: ignore[misc]
)
return ranged_value_to_percentage(

View File

@ -124,10 +124,13 @@ def convert_dict(dictionary: dict[str, Any]) -> dict[str, Any]:
def convert_key(key: str) -> str:
"""Convert a string to snake_case."""
string = re.sub(r"[\-\.\s]", "_", str(key))
return (string[0]).lower() + re.sub(
r"[A-Z]",
lambda matched: f"_{matched.group(0).lower()}", # type:ignore[str-bytes-safe]
string[1:],
return (
(string[0]).lower()
+ re.sub(
r"[A-Z]",
lambda matched: f"_{matched.group(0).lower()}", # type:ignore[str-bytes-safe]
string[1:],
)
)
return {

View File

@ -79,12 +79,12 @@ _ICONS: dict[SensorKind, str] = {
class GoodweSensorEntityDescription(SensorEntityDescription):
"""Class describing Goodwe sensor entities."""
value: Callable[
[GoodweUpdateCoordinator, str], Any
] = lambda coordinator, sensor: coordinator.sensor_value(sensor)
available: Callable[
[GoodweUpdateCoordinator], bool
] = lambda coordinator: coordinator.last_update_success
value: Callable[[GoodweUpdateCoordinator, str], Any] = (
lambda coordinator, sensor: coordinator.sensor_value(sensor)
)
available: Callable[[GoodweUpdateCoordinator], bool] = (
lambda coordinator: coordinator.last_update_success
)
_DESCRIPTIONS: dict[str, GoodweSensorEntityDescription] = {

View File

@ -59,7 +59,11 @@ LOCAL_SDK_MIN_VERSION = AwesomeVersion("2.1.5")
@callback
def _get_registry_entries(
hass: HomeAssistant, entity_id: str
) -> tuple[er.RegistryEntry | None, dr.DeviceEntry | None, ar.AreaEntry | None,]:
) -> tuple[
er.RegistryEntry | None,
dr.DeviceEntry | None,
ar.AreaEntry | None,
]:
"""Get registry entries."""
ent_reg = er.async_get(hass)
dev_reg = dr.async_get(hass)

View File

@ -93,7 +93,8 @@ class GoogleTaskTodoListEntity(
summary=item["title"],
uid=item["id"],
status=TODO_STATUS_MAP.get(
item.get("status"), TodoItemStatus.NEEDS_ACTION # type: ignore[arg-type]
item.get("status"), # type: ignore[arg-type]
TodoItemStatus.NEEDS_ACTION,
),
)
for item in _order_tasks(self.coordinator.data)

View File

@ -195,9 +195,7 @@ def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901
loop = (
# Create own thread if more than 1 CPU
hass.loop
if multiprocessing.cpu_count() < 2
else None
hass.loop if multiprocessing.cpu_count() < 2 else None
)
host = base_config[DOMAIN].get(CONF_HOST)
display_name = base_config[DOMAIN].get(CONF_DISPLAY_NAME, DEFAULT_DISPLAY_NAME)

View File

@ -124,12 +124,15 @@ class Fan(HomeAccessory):
),
)
setter_callback = (
lambda value, preset_mode=preset_mode: self.set_preset_mode(
value, preset_mode
)
)
self.preset_mode_chars[preset_mode] = preset_serv.configure_char(
CHAR_ON,
value=False,
setter_callback=lambda value, preset_mode=preset_mode: self.set_preset_mode(
value, preset_mode
),
setter_callback=setter_callback,
)
if CHAR_SWING_MODE in self.chars:

View File

@ -116,5 +116,6 @@ class PowerViewSelect(ShadeEntity, SelectEntity):
async def async_select_option(self, option: str) -> None:
"""Change the selected option."""
await self.entity_description.select_fn(self._shade, option)
await self._shade.refresh() # force update data to ensure new info is in coordinator
# force update data to ensure new info is in coordinator
await self._shade.refresh()
self.async_write_ha_state()

View File

@ -66,8 +66,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
coordinator: ImapPushDataUpdateCoordinator | ImapPollingDataUpdateCoordinator = hass.data[
DOMAIN
].pop(
entry.entry_id
)
].pop(entry.entry_id)
await coordinator.shutdown()
return unload_ok

View File

@ -259,7 +259,8 @@ class KrakenSensor(
return
try:
self._attr_native_value = self.entity_description.value_fn(
self.coordinator, self.tracked_asset_pair_wsname # type: ignore[arg-type]
self.coordinator, # type: ignore[arg-type]
self.tracked_asset_pair_wsname,
)
self._received_data_at_least_once = True
except KeyError:

View File

@ -316,7 +316,9 @@ class HeatMeterSensor(
"""Set up the sensor with the initial values."""
super().__init__(coordinator)
self.key = description.key
self._attr_unique_id = f"{coordinator.config_entry.data['device_number']}_{description.key}" # type: ignore[union-attr]
self._attr_unique_id = (
f"{coordinator.config_entry.data['device_number']}_{description.key}" # type: ignore[union-attr]
)
self._attr_name = f"Heat Meter {description.name}"
self.entity_description = description
self._attr_device_info = device

View File

@ -118,7 +118,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
push_coordinator = LookinPushCoordinator(entry.title)
if lookin_device.model >= 2:
meteo_coordinator = LookinDataUpdateCoordinator[MeteoSensor](
coordinator_class = LookinDataUpdateCoordinator[MeteoSensor]
meteo_coordinator = coordinator_class(
hass,
push_coordinator,
name=entry.title,

View File

@ -348,7 +348,10 @@ class MatrixBot:
self._access_tokens[self._mx_id] = token
await self.hass.async_add_executor_job(
save_json, self._session_filepath, self._access_tokens, True # private=True
save_json,
self._session_filepath,
self._access_tokens,
True, # private=True
)
async def _login(self) -> None:

View File

@ -104,9 +104,11 @@ class MatterEventEntity(MatterEntity, EventEntity):
"""Call when Node attribute(s) changed."""
@callback
def _on_matter_node_event(
self, event: EventType, data: MatterNodeEvent
) -> None: # noqa: F821
def _on_matter_node_event( # noqa: F821
self,
event: EventType,
data: MatterNodeEvent,
) -> None:
"""Call on NodeEvent."""
if data.endpoint_id != self._endpoint.endpoint_id:
return

View File

@ -1137,8 +1137,7 @@ class MediaPlayerImageView(HomeAssistantView):
extra_urls = [
# Need to modify the default regex for media_content_id as it may
# include arbitrary characters including '/','{', or '}'
url
+ "/browse_media/{media_content_type}/{media_content_id:.+}",
url + "/browse_media/{media_content_type}/{media_content_id:.+}",
]
def __init__(self, component: EntityComponent[MediaPlayerEntity]) -> None:

View File

@ -470,9 +470,10 @@ class MqttTemperatureControlEntity(MqttEntity, ABC):
except ValueError:
_LOGGER.error("Could not parse %s from %s", template_name, payload)
def prepare_subscribe_topics(
self, topics: dict[str, dict[str, Any]]
) -> None: # noqa: C901
def prepare_subscribe_topics( # noqa: C901
self,
topics: dict[str, dict[str, Any]],
) -> None:
"""(Re)Subscribe to topics."""
@callback

View File

@ -63,9 +63,8 @@ async def async_wait_for_mqtt_client(hass: HomeAssistant) -> bool:
state_reached_future: asyncio.Future[bool]
if DATA_MQTT_AVAILABLE not in hass.data:
hass.data[
DATA_MQTT_AVAILABLE
] = state_reached_future = hass.loop.create_future()
state_reached_future = hass.loop.create_future()
hass.data[DATA_MQTT_AVAILABLE] = state_reached_future
else:
state_reached_future = hass.data[DATA_MQTT_AVAILABLE]
if state_reached_future.done():

View File

@ -34,9 +34,9 @@ UNIT_OF_LOAD: Final[str] = "load"
class NextcloudSensorEntityDescription(SensorEntityDescription):
"""Describes Nextcloud sensor entity."""
value_fn: Callable[
[str | int | float], str | int | float | datetime
] = lambda value: value
value_fn: Callable[[str | int | float], str | int | float | datetime] = (
lambda value: value
)
SENSORS: Final[list[NextcloudSensorEntityDescription]] = [

View File

@ -32,8 +32,7 @@ class ONVIFBaseEntity(Entity):
See: https://github.com/home-assistant/core/issues/35883
"""
return (
self.device.info.mac
or self.device.info.serial_number # type:ignore[return-value]
self.device.info.mac or self.device.info.serial_number # type:ignore[return-value]
)
@property

View File

@ -245,12 +245,13 @@ class HitachiAirToAirHeatPumpHLRRWIFI(OverkizEntity, ClimateEntity):
MODE_CHANGE_STATE,
OverkizCommandParam.AUTO,
).lower() # Overkiz can return states that have uppercase characters which are not accepted back as commands
if hvac_mode.replace(
" ", ""
) in [ # Overkiz can return states like 'auto cooling' or 'autoHeating' that are not valid commands and need to be converted to 'auto'
OverkizCommandParam.AUTOCOOLING,
OverkizCommandParam.AUTOHEATING,
]:
if (
hvac_mode.replace(" ", "")
in [ # Overkiz can return states like 'auto cooling' or 'autoHeating' that are not valid commands and need to be converted to 'auto'
OverkizCommandParam.AUTOCOOLING,
OverkizCommandParam.AUTOHEATING,
]
):
hvac_mode = OverkizCommandParam.AUTO
swing_mode = self._control_backfill(

View File

@ -83,13 +83,17 @@ SENSOR_DESCRIPTIONS = (
native_unit_of_measurement=UnitOfTime.SECONDS,
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda hass, service_info: bluetooth.async_get_learned_advertising_interval(
hass, service_info.address
)
or bluetooth.async_get_fallback_availability_interval(
hass, service_info.address
)
or bluetooth.FALLBACK_MAXIMUM_STALE_ADVERTISEMENT_SECONDS,
value_fn=(
lambda hass, service_info: (
bluetooth.async_get_learned_advertising_interval(
hass, service_info.address
)
or bluetooth.async_get_fallback_availability_interval(
hass, service_info.address
)
or bluetooth.FALLBACK_MAXIMUM_STALE_ADVERTISEMENT_SECONDS
)
),
suggested_display_precision=1,
),
)

View File

@ -101,9 +101,8 @@ def _validate_table_schema_has_correct_collation(
collate = (
dialect_kwargs.get("mysql_collate")
or dialect_kwargs.get(
"mariadb_collate"
) # pylint: disable-next=protected-access
or dialect_kwargs.get("mariadb_collate")
# pylint: disable-next=protected-access
or connection.dialect._fetch_setting(connection, "collation_server") # type: ignore[attr-defined]
)
if collate and collate != "utf8mb4_unicode_ci":

View File

@ -176,13 +176,17 @@ class NativeLargeBinary(LargeBinary):
# For MariaDB and MySQL we can use an unsigned integer type since it will fit 2**32
# for sqlite and postgresql we use a bigint
UINT_32_TYPE = BigInteger().with_variant(
mysql.INTEGER(unsigned=True), "mysql", "mariadb" # type: ignore[no-untyped-call]
mysql.INTEGER(unsigned=True), # type: ignore[no-untyped-call]
"mysql",
"mariadb",
)
JSON_VARIANT_CAST = Text().with_variant(
postgresql.JSON(none_as_null=True), "postgresql" # type: ignore[no-untyped-call]
postgresql.JSON(none_as_null=True), # type: ignore[no-untyped-call]
"postgresql",
)
JSONB_VARIANT_CAST = Text().with_variant(
postgresql.JSONB(none_as_null=True), "postgresql" # type: ignore[no-untyped-call]
postgresql.JSONB(none_as_null=True), # type: ignore[no-untyped-call]
"postgresql",
)
DATETIME_TYPE = (
DateTime(timezone=True)

View File

@ -244,7 +244,8 @@ class Filters:
),
# Needs https://github.com/bdraco/home-assistant/commit/bba91945006a46f3a01870008eb048e4f9cbb1ef
self._generate_filter_for_columns(
(ENTITY_ID_IN_EVENT, OLD_ENTITY_ID_IN_EVENT), _encoder # type: ignore[arg-type]
(ENTITY_ID_IN_EVENT, OLD_ENTITY_ID_IN_EVENT), # type: ignore[arg-type]
_encoder,
).self_group(),
)

View File

@ -532,7 +532,9 @@ def _update_states_table_with_foreign_key_options(
states_key_constraints = Base.metadata.tables[TABLE_STATES].foreign_key_constraints
old_states_table = Table( # noqa: F841
TABLE_STATES, MetaData(), *(alter["old_fk"] for alter in alters) # type: ignore[arg-type]
TABLE_STATES,
MetaData(),
*(alter["old_fk"] for alter in alters), # type: ignore[arg-type]
)
for alter in alters:

View File

@ -89,9 +89,9 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
async with asyncio.timeout(host.api.timeout * (RETRY_ATTEMPTS + 2)):
await host.renew()
async def async_check_firmware_update() -> str | Literal[
False
] | NewSoftwareVersion:
async def async_check_firmware_update() -> (
str | Literal[False] | NewSoftwareVersion
):
"""Check for firmware updates."""
if not host.api.supported(None, "update"):
return False

View File

@ -566,10 +566,9 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
ports = await self.hass.async_add_executor_job(serial.tools.list_ports.comports)
list_of_ports = {}
for port in ports:
list_of_ports[
port.device
] = f"{port}, s/n: {port.serial_number or 'n/a'}" + (
f" - {port.manufacturer}" if port.manufacturer else ""
list_of_ports[port.device] = (
f"{port}, s/n: {port.serial_number or 'n/a'}"
+ (f" - {port.manufacturer}" if port.manufacturer else "")
)
list_of_ports[CONF_MANUAL_PATH] = CONF_MANUAL_PATH

View File

@ -280,9 +280,9 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
async def _async_fallback_poll(self) -> None:
"""Retrieve latest state by polling."""
await self.hass.data[DATA_SONOS].favorites[
self.speaker.household_id
].async_poll()
await (
self.hass.data[DATA_SONOS].favorites[self.speaker.household_id].async_poll()
)
await self.hass.async_add_executor_job(self._update)
def _update(self) -> None:

View File

@ -78,7 +78,9 @@ class RecorderOutput(StreamOutput):
def write_segment(segment: Segment) -> None:
"""Write a segment to output."""
# fmt: off
nonlocal output, output_v, output_a, last_stream_id, running_duration, last_sequence
# fmt: on
# Because the stream_worker is in a different thread from the record service,
# the lookback segments may still have some overlap with the recorder segments
if segment.sequence <= last_sequence:

View File

@ -153,7 +153,9 @@ class SynoDSMCamera(SynologyDSMBaseEntity[SynologyDSMCameraUpdateCoordinator], C
if not self.available:
return None
try:
return await self._api.surveillance_station.get_camera_image(self.entity_description.key, self.snapshot_quality) # type: ignore[no-any-return]
return await self._api.surveillance_station.get_camera_image( # type: ignore[no-any-return]
self.entity_description.key, self.snapshot_quality
)
except (
SynologyDSMAPIErrorException,
SynologyDSMRequestException,

View File

@ -57,7 +57,8 @@ from .template_entity import TemplateEntity, rewrite_common_legacy_to_modern_con
from .trigger_entity import TriggerEntity
CHECK_FORECAST_KEYS = (
set().union(Forecast.__annotations__.keys())
set()
.union(Forecast.__annotations__.keys())
# Manually add the forecast resulting attributes that only exists
# as native_* in the Forecast definition
.union(("apparent_temperature", "wind_gust_speed", "dew_point"))

View File

@ -119,9 +119,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Handle Memo Text service call."""
memo_text = call.data[CONF_MEMO_TEXT]
memo_text.hass = hass
await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].get_module(
call.data[CONF_ADDRESS]
).set_memo_text(memo_text.async_render())
await (
hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"]
.get_module(call.data[CONF_ADDRESS])
.set_memo_text(memo_text.async_render())
)
hass.services.async_register(
DOMAIN,

View File

@ -48,12 +48,12 @@ class VeSyncSensorEntityDescription(
):
"""Describe VeSync sensor entity."""
exists_fn: Callable[
[VeSyncAirBypass | VeSyncOutlet | VeSyncSwitch], bool
] = lambda _: True
update_fn: Callable[
[VeSyncAirBypass | VeSyncOutlet | VeSyncSwitch], None
] = lambda _: None
exists_fn: Callable[[VeSyncAirBypass | VeSyncOutlet | VeSyncSwitch], bool] = (
lambda _: True
)
update_fn: Callable[[VeSyncAirBypass | VeSyncOutlet | VeSyncSwitch], None] = (
lambda _: None
)
def update_energy(device):

View File

@ -28,9 +28,9 @@ NOT_AVAILABLE: list = ["", "N/A", "0.0.0.0"]
class VodafoneStationBaseEntityDescription:
"""Vodafone Station entity base description."""
value: Callable[
[Any, Any], Any
] = lambda coordinator, key: coordinator.data.sensors[key]
value: Callable[[Any, Any], Any] = (
lambda coordinator, key: coordinator.data.sensors[key]
)
is_suitable: Callable[[dict], bool] = lambda val: True

View File

@ -111,11 +111,13 @@ class HassVoipDatagramProtocol(VoipDatagramProtocol):
valid_protocol_factory=lambda call_info, rtcp_state: make_protocol(
hass, devices, call_info, rtcp_state
),
invalid_protocol_factory=lambda call_info, rtcp_state: PreRecordMessageProtocol(
hass,
"not_configured.pcm",
opus_payload_type=call_info.opus_payload_type,
rtcp_state=rtcp_state,
invalid_protocol_factory=(
lambda call_info, rtcp_state: PreRecordMessageProtocol(
hass,
"not_configured.pcm",
opus_payload_type=call_info.opus_payload_type,
rtcp_state=rtcp_state,
)
),
)
self.hass = hass

View File

@ -95,9 +95,7 @@ class MusicCastFlowHandler(ConfigFlow, domain=DOMAIN):
self.upnp_description = discovery_info.ssdp_location
# ssdp_location and hostname have been checked in check_yamaha_ssdp so it is safe to ignore type assignment
self.host = urlparse(
discovery_info.ssdp_location
).hostname # type: ignore[assignment]
self.host = urlparse(discovery_info.ssdp_location).hostname # type: ignore[assignment]
await self.async_set_unique_id(self.serial_number)
self._abort_if_unique_id_configured(

View File

@ -276,9 +276,7 @@ async def async_setup_entry(
if state_key == "0":
continue
notification_description: NotificationZWaveJSEntityDescription | None = (
None
)
notification_description: NotificationZWaveJSEntityDescription | None = None
for description in NOTIFICATION_SENSOR_MAPPINGS:
if (

View File

@ -344,7 +344,8 @@ class ZWaveNodeFirmwareUpdate(UpdateEntity):
is not None
and (extra_data := await self.async_get_last_extra_data())
and (
latest_version_firmware := ZWaveNodeFirmwareUpdateExtraStoredData.from_dict(
latest_version_firmware
:= ZWaveNodeFirmwareUpdateExtraStoredData.from_dict(
extra_data.as_dict()
).latest_version_firmware
)

View File

@ -251,7 +251,9 @@ def async_track_state_change(
return async_track_state_change_event(hass, entity_ids, state_change_listener)
return hass.bus.async_listen(
EVENT_STATE_CHANGED, state_change_dispatcher, event_filter=state_change_filter # type: ignore[arg-type]
EVENT_STATE_CHANGED,
state_change_dispatcher, # type: ignore[arg-type]
event_filter=state_change_filter, # type: ignore[arg-type]
)
@ -761,7 +763,8 @@ class _TrackStateChangeFiltered:
@callback
def _setup_all_listener(self) -> None:
self._listeners[_ALL_LISTENER] = self.hass.bus.async_listen(
EVENT_STATE_CHANGED, self._action # type: ignore[arg-type]
EVENT_STATE_CHANGED,
self._action, # type: ignore[arg-type]
)
@ -1335,7 +1338,8 @@ def async_track_same_state(
if entity_ids == MATCH_ALL:
async_remove_state_for_cancel = hass.bus.async_listen(
EVENT_STATE_CHANGED, state_for_cancel_listener # type: ignore[arg-type]
EVENT_STATE_CHANGED,
state_for_cancel_listener, # type: ignore[arg-type]
)
else:
async_remove_state_for_cancel = async_track_state_change_event(

View File

@ -190,7 +190,8 @@ class RestoreStateData:
state, self.entities[state.entity_id].extra_restore_state_data, now
)
for state in all_states
if state.entity_id in self.entities and
if state.entity_id in self.entities
and
# Ignore all states that are entity registry placeholders
not state.attributes.get(ATTR_RESTORED)
]

View File

@ -99,8 +99,7 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]):
# Pick a random microsecond in range 0.05..0.50 to stagger the refreshes
# and avoid a thundering herd.
self._microsecond = (
randint(event.RANDOM_MICROSECOND_MIN, event.RANDOM_MICROSECOND_MAX)
/ 10**6
randint(event.RANDOM_MICROSECOND_MIN, event.RANDOM_MICROSECOND_MAX) / 10**6
)
self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {}

View File

@ -403,9 +403,7 @@ async def async_get_zeroconf(
hass: HomeAssistant,
) -> dict[str, list[dict[str, str | dict[str, str]]]]:
"""Return cached list of zeroconf types."""
zeroconf: dict[
str, list[dict[str, str | dict[str, str]]]
] = ZEROCONF.copy() # type: ignore[assignment]
zeroconf: dict[str, list[dict[str, str | dict[str, str]]]] = ZEROCONF.copy() # type: ignore[assignment]
integrations = await async_get_custom_components(hass)
for integration in integrations.values():
@ -1013,9 +1011,7 @@ def _load_file(
Async friendly.
"""
with suppress(KeyError):
return hass.data[DATA_COMPONENTS][ # type: ignore[no-any-return]
comp_or_platform
]
return hass.data[DATA_COMPONENTS][comp_or_platform] # type: ignore[no-any-return]
cache = hass.data[DATA_COMPONENTS]

View File

@ -57,7 +57,8 @@ def json_loads_object(__obj: bytes | bytearray | memoryview | str) -> JsonObject
def load_json(
filename: str | PathLike, default: JsonValueType = _SENTINEL # type: ignore[assignment]
filename: str | PathLike,
default: JsonValueType = _SENTINEL, # type: ignore[assignment]
) -> JsonValueType:
"""Load JSON data from a file.
@ -79,7 +80,8 @@ def load_json(
def load_json_array(
filename: str | PathLike, default: JsonArrayType = _SENTINEL # type: ignore[assignment]
filename: str | PathLike,
default: JsonArrayType = _SENTINEL, # type: ignore[assignment]
) -> JsonArrayType:
"""Load JSON data from a file and return as list.
@ -98,7 +100,8 @@ def load_json_array(
def load_json_object(
filename: str | PathLike, default: JsonObjectType = _SENTINEL # type: ignore[assignment]
filename: str | PathLike,
default: JsonObjectType = _SENTINEL, # type: ignore[assignment]
) -> JsonObjectType:
"""Load JSON data from a file and return as dict.

View File

@ -129,6 +129,7 @@ def vincenty(
uSq = cosSqAlpha * (AXIS_A**2 - AXIS_B**2) / (AXIS_B**2)
A = 1 + uSq / 16384 * (4096 + uSq * (-768 + uSq * (320 - 175 * uSq)))
B = uSq / 1024 * (256 + uSq * (-128 + uSq * (74 - 47 * uSq)))
# fmt: off
deltaSigma = (
B
* sinSigma
@ -141,11 +142,12 @@ def vincenty(
- B
/ 6
* cos2SigmaM
* (-3 + 4 * sinSigma**2)
* (-3 + 4 * cos2SigmaM**2)
* (-3 + 4 * sinSigma ** 2)
* (-3 + 4 * cos2SigmaM ** 2)
)
)
)
# fmt: on
s = AXIS_B * A * (sigma - deltaSigma)
s /= 1000 # Conversion of meters to kilometers

View File

@ -340,7 +340,12 @@ def _handle_mapping_tag(
raise yaml.MarkedYAMLError(
context=f'invalid key: "{key}"',
context_mark=yaml.Mark(
fname, 0, line, -1, None, None # type: ignore[arg-type]
fname,
0,
line,
-1,
None,
None, # type: ignore[arg-type]
),
) from exc

View File

@ -79,9 +79,6 @@ include-package-data = true
[tool.setuptools.packages.find]
include = ["homeassistant*"]
[tool.black]
extend-exclude = "/generated/"
[tool.pylint.MAIN]
py-version = "3.11"
ignore = [

View File

@ -1,6 +1,5 @@
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
black==23.11.0
codespell==2.2.2
ruff==0.1.1
ruff==0.1.6
yamllint==1.32.0

View File

@ -1,10 +1,10 @@
#!/bin/sh
# Format code with black.
# Format code with ruff-format.
cd "$(dirname "$0")/.."
black \
ruff \
format \
--check \
--fast \
--quiet \
homeassistant tests script *.py

View File

@ -192,6 +192,7 @@ IGNORE_PRE_COMMIT_HOOK_ID = (
"no-commit-to-branch",
"prettier",
"python-typing-update",
"ruff-format", # it's just ruff
)
PACKAGE_REGEX = re.compile(r"^(?:--.+\s)?([-_\.\w\d]+).*==.+$")
@ -394,7 +395,8 @@ def requirements_test_all_output(reqs: dict[str, list[str]]) -> str:
for requirement, modules in reqs.items()
if any(
# Always install requirements that are not part of integrations
not mdl.startswith("homeassistant.components.") or
not mdl.startswith("homeassistant.components.")
or
# Install tests for integrations that have tests
has_tests(mdl)
for mdl in modules

View File

@ -2,11 +2,10 @@
from __future__ import annotations
from collections.abc import Collection, Iterable, Mapping
import shutil
import subprocess
from typing import Any
import black
from black.mode import Mode
DEFAULT_GENERATOR = "script.hassfest"
@ -72,7 +71,14 @@ To update, run python3 -m {generator}
{content}
"""
return black.format_str(content.strip(), mode=Mode())
ruff = shutil.which("ruff")
if not ruff:
raise RuntimeError("ruff not found")
return subprocess.check_output(
[ruff, "format", "-"],
input=content.strip(),
encoding="utf-8",
)
def format_python_namespace(

View File

@ -267,7 +267,7 @@ async def async_test_home_assistant(event_loop, load_registries=True):
"homeassistant.helpers.restore_state.RestoreStateData.async_setup_dump",
return_value=None,
), patch(
"homeassistant.helpers.restore_state.start.async_at_start"
"homeassistant.helpers.restore_state.start.async_at_start",
):
await asyncio.gather(
ar.async_load(hass),

View File

@ -78,9 +78,7 @@ async def setup_airvisual_pro_fixture(hass, config, pro):
"homeassistant.components.airvisual_pro.config_flow.NodeSamba", return_value=pro
), patch(
"homeassistant.components.airvisual_pro.NodeSamba", return_value=pro
), patch(
"homeassistant.components.airvisual.PLATFORMS", []
):
), patch("homeassistant.components.airvisual.PLATFORMS", []):
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
yield

View File

@ -180,9 +180,11 @@ async def test_send_base_with_supervisor(
"homeassistant.components.hassio.is_hassio",
side_effect=Mock(return_value=True),
), patch(
"uuid.UUID.hex", new_callable=PropertyMock
"uuid.UUID.hex",
new_callable=PropertyMock,
) as hex, patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION
"homeassistant.components.analytics.analytics.HA_VERSION",
MOCK_VERSION,
):
hex.return_value = MOCK_UUID
await analytics.load()
@ -289,7 +291,8 @@ async def test_send_usage_with_supervisor(
"homeassistant.components.hassio.is_hassio",
side_effect=Mock(return_value=True),
), patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION
"homeassistant.components.analytics.analytics.HA_VERSION",
MOCK_VERSION,
):
await analytics.send_analytics()
assert (
@ -492,7 +495,8 @@ async def test_send_statistics_with_supervisor(
"homeassistant.components.hassio.is_hassio",
side_effect=Mock(return_value=True),
), patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION
"homeassistant.components.analytics.analytics.HA_VERSION",
MOCK_VERSION,
):
await analytics.send_analytics()
assert "'addon_count': 1" in caplog.text

View File

@ -51,7 +51,7 @@ async def async_init_integration(
) as update_patch, patch(
"homeassistant.components.anova.AnovaApi.authenticate"
), patch(
"homeassistant.components.anova.AnovaApi.get_devices"
"homeassistant.components.anova.AnovaApi.get_devices",
) as device_patch:
update_patch.return_value = ONLINE_UPDATE
device_patch.return_value = [

View File

@ -92,7 +92,8 @@ async def test_load_backups(hass: HomeAssistant) -> None:
"date": TEST_BACKUP.date,
},
), patch(
"pathlib.Path.stat", return_value=MagicMock(st_size=TEST_BACKUP.size)
"pathlib.Path.stat",
return_value=MagicMock(st_size=TEST_BACKUP.size),
):
await manager.load_backups()
backups = await manager.get_backups()

View File

@ -120,7 +120,8 @@ async def test_form_2fa_connect_error(hass: HomeAssistant) -> None:
"homeassistant.components.blink.config_flow.Blink.setup_urls",
side_effect=BlinkSetupError,
), patch(
"homeassistant.components.blink.async_setup_entry", return_value=True
"homeassistant.components.blink.async_setup_entry",
return_value=True,
):
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"], {"pin": "1234"}
@ -161,7 +162,8 @@ async def test_form_2fa_invalid_key(hass: HomeAssistant) -> None:
"homeassistant.components.blink.config_flow.Blink.setup_urls",
return_value=True,
), patch(
"homeassistant.components.blink.async_setup_entry", return_value=True
"homeassistant.components.blink.async_setup_entry",
return_value=True,
):
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"], {"pin": "1234"}
@ -200,7 +202,8 @@ async def test_form_2fa_unknown_error(hass: HomeAssistant) -> None:
"homeassistant.components.blink.config_flow.Blink.setup_urls",
side_effect=KeyError,
), patch(
"homeassistant.components.blink.async_setup_entry", return_value=True
"homeassistant.components.blink.async_setup_entry",
return_value=True,
):
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"], {"pin": "1234"}

View File

@ -47,12 +47,14 @@ def mock_operating_system_90():
def macos_adapter():
"""Fixture that mocks the macos adapter."""
with patch("bleak.get_platform_scanner_backend_type"), patch(
"homeassistant.components.bluetooth.platform.system", return_value="Darwin"
"homeassistant.components.bluetooth.platform.system",
return_value="Darwin",
), patch(
"homeassistant.components.bluetooth.scanner.platform.system",
return_value="Darwin",
), patch(
"bluetooth_adapters.systems.platform.system", return_value="Darwin"
"bluetooth_adapters.systems.platform.system",
return_value="Darwin",
):
yield
@ -71,14 +73,16 @@ def windows_adapter():
def no_adapter_fixture():
"""Fixture that mocks no adapters on Linux."""
with patch(
"homeassistant.components.bluetooth.platform.system", return_value="Linux"
"homeassistant.components.bluetooth.platform.system",
return_value="Linux",
), patch(
"homeassistant.components.bluetooth.scanner.platform.system",
return_value="Linux",
), patch(
"bluetooth_adapters.systems.platform.system", return_value="Linux"
"bluetooth_adapters.systems.platform.system",
return_value="Linux",
), patch(
"bluetooth_adapters.systems.linux.LinuxAdapters.refresh"
"bluetooth_adapters.systems.linux.LinuxAdapters.refresh",
), patch(
"bluetooth_adapters.systems.linux.LinuxAdapters.adapters",
{},
@ -90,14 +94,16 @@ def no_adapter_fixture():
def one_adapter_fixture():
"""Fixture that mocks one adapter on Linux."""
with patch(
"homeassistant.components.bluetooth.platform.system", return_value="Linux"
"homeassistant.components.bluetooth.platform.system",
return_value="Linux",
), patch(
"homeassistant.components.bluetooth.scanner.platform.system",
return_value="Linux",
), patch(
"bluetooth_adapters.systems.platform.system", return_value="Linux"
"bluetooth_adapters.systems.platform.system",
return_value="Linux",
), patch(
"bluetooth_adapters.systems.linux.LinuxAdapters.refresh"
"bluetooth_adapters.systems.linux.LinuxAdapters.refresh",
), patch(
"bluetooth_adapters.systems.linux.LinuxAdapters.adapters",
{
@ -124,9 +130,7 @@ def two_adapters_fixture():
), patch(
"homeassistant.components.bluetooth.scanner.platform.system",
return_value="Linux",
), patch(
"bluetooth_adapters.systems.platform.system", return_value="Linux"
), patch(
), patch("bluetooth_adapters.systems.platform.system", return_value="Linux"), patch(
"bluetooth_adapters.systems.linux.LinuxAdapters.refresh"
), patch(
"bluetooth_adapters.systems.linux.LinuxAdapters.adapters",
@ -166,9 +170,7 @@ def one_adapter_old_bluez():
), patch(
"homeassistant.components.bluetooth.scanner.platform.system",
return_value="Linux",
), patch(
"bluetooth_adapters.systems.platform.system", return_value="Linux"
), patch(
), patch("bluetooth_adapters.systems.platform.system", return_value="Linux"), patch(
"bluetooth_adapters.systems.linux.LinuxAdapters.refresh"
), patch(
"bluetooth_adapters.systems.linux.LinuxAdapters.adapters",

View File

@ -67,13 +67,9 @@ async def setup_bond_entity(
enabled=patch_token
), patch_bond_version(enabled=patch_version), patch_bond_device_ids(
enabled=patch_device_ids
), patch_setup_entry(
"cover", enabled=patch_platforms
), patch_setup_entry(
), patch_setup_entry("cover", enabled=patch_platforms), patch_setup_entry(
"fan", enabled=patch_platforms
), patch_setup_entry(
"light", enabled=patch_platforms
), patch_setup_entry(
), patch_setup_entry("light", enabled=patch_platforms), patch_setup_entry(
"switch", enabled=patch_platforms
):
return await hass.config_entries.async_setup(config_entry.entry_id)
@ -102,15 +98,11 @@ async def setup_platform(
"homeassistant.components.bond.PLATFORMS", [platform]
), patch_bond_version(return_value=bond_version), patch_bond_bridge(
return_value=bridge
), patch_bond_token(
return_value=token
), patch_bond_device_ids(
), patch_bond_token(return_value=token), patch_bond_device_ids(
return_value=[bond_device_id]
), patch_start_bpup(), patch_bond_device(
return_value=discovered_device
), patch_bond_device_properties(
return_value=props
), patch_bond_device_state(
), patch_bond_device_properties(return_value=props), patch_bond_device_state(
return_value=state
):
assert await async_setup_component(hass, BOND_DOMAIN, {})

View File

@ -184,9 +184,7 @@ async def test_old_identifiers_are_removed(
"name": "test1",
"type": DeviceType.GENERIC_DEVICE,
}
), patch_bond_device_properties(
return_value={}
), patch_bond_device_state(
), patch_bond_device_properties(return_value={}), patch_bond_device_state(
return_value={}
):
assert await hass.config_entries.async_setup(config_entry.entry_id) is True
@ -228,9 +226,7 @@ async def test_smart_by_bond_device_suggested_area(
"type": DeviceType.GENERIC_DEVICE,
"location": "Den",
}
), patch_bond_device_properties(
return_value={}
), patch_bond_device_state(
), patch_bond_device_properties(return_value={}), patch_bond_device_state(
return_value={}
):
assert await hass.config_entries.async_setup(config_entry.entry_id) is True
@ -275,9 +271,7 @@ async def test_bridge_device_suggested_area(
"type": DeviceType.GENERIC_DEVICE,
"location": "Bathroom",
}
), patch_bond_device_properties(
return_value={}
), patch_bond_device_state(
), patch_bond_device_properties(return_value={}), patch_bond_device_state(
return_value={}
):
assert await hass.config_entries.async_setup(config_entry.entry_id) is True

View File

@ -19,7 +19,7 @@ async def test_creating_entry_sets_up_media_player(hass: HomeAssistant) -> None:
) as mock_setup, patch(
"pychromecast.discovery.discover_chromecasts", return_value=(True, None)
), patch(
"pychromecast.discovery.stop_discovery"
"pychromecast.discovery.stop_discovery",
):
result = await hass.config_entries.flow.async_init(
cast.DOMAIN, context={"source": config_entries.SOURCE_USER}

View File

@ -24,7 +24,7 @@ async def test_user(hass: HomeAssistant) -> None:
), patch(
"homeassistant.components.comelit.async_setup_entry"
) as mock_setup_entry, patch(
"requests.get"
"requests.get",
) as mock_request_get:
mock_request_get.return_value.status_code = 200
@ -70,7 +70,7 @@ async def test_exception_connection(hass: HomeAssistant, side_effect, error) ->
), patch(
"aiocomelit.api.ComeliteSerialBridgeApi.logout",
), patch(
"homeassistant.components.comelit.async_setup_entry"
"homeassistant.components.comelit.async_setup_entry",
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_USER_DATA
@ -135,9 +135,7 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) ->
"aiocomelit.api.ComeliteSerialBridgeApi.login", side_effect=side_effect
), patch(
"aiocomelit.api.ComeliteSerialBridgeApi.logout",
), patch(
"homeassistant.components.comelit.async_setup_entry"
):
), patch("homeassistant.components.comelit.async_setup_entry"):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id},

View File

@ -23,7 +23,9 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None:
@pytest.fixture
async def setup_automation(
hass, automation_config, stub_blueprint_populate # noqa: F811
hass,
automation_config,
stub_blueprint_populate, # noqa: F811
):
"""Set up automation integration."""
assert await async_setup_component(

View File

@ -65,7 +65,8 @@ def denonavr_connect_fixture():
"homeassistant.components.denonavr.receiver.DenonAVR.receiver_type",
TEST_RECEIVER_TYPE,
), patch(
"homeassistant.components.denonavr.async_setup_entry", return_value=True
"homeassistant.components.denonavr.async_setup_entry",
return_value=True,
):
yield

View File

@ -151,8 +151,11 @@ async def _async_get_handle_dhcp_packet(hass, integration_matchers):
with patch(
"homeassistant.components.dhcp._verify_l2socket_setup",
), patch(
"scapy.arch.common.compile_filter"
), patch("scapy.sendrecv.AsyncSniffer", _mock_sniffer):
"scapy.arch.common.compile_filter",
), patch(
"scapy.sendrecv.AsyncSniffer",
_mock_sniffer,
):
await dhcp_watcher.async_start()
return async_handle_dhcp_packet

View File

@ -198,9 +198,7 @@ async def test_import_flow_triggered_with_ecobee_conf_and_valid_data_and_stale_t
return_value=MOCK_ECOBEE_CONF,
), patch(
"homeassistant.components.ecobee.config_flow.Ecobee"
) as mock_ecobee, patch.object(
flow, "async_step_user"
) as mock_async_step_user:
) as mock_ecobee, patch.object(flow, "async_step_user") as mock_async_step_user:
mock_ecobee = mock_ecobee.return_value
mock_ecobee.refresh_tokens.return_value = False

View File

@ -55,7 +55,8 @@ async def test_one_time_password(hass: HomeAssistant):
"electrasmart.api.ElectraAPI.validate_one_time_password",
return_value=mock_otp_response,
), patch(
"electrasmart.api.ElectraAPI.fetch_devices", return_value=[]
"electrasmart.api.ElectraAPI.fetch_devices",
return_value=[],
):
result = await hass.config_entries.flow.async_init(
DOMAIN,

View File

@ -229,9 +229,7 @@ async def test_form_user_with_insecure_elk_times_out(hass: HomeAssistant) -> Non
0,
), patch(
"homeassistant.components.elkm1.config_flow.LOGIN_TIMEOUT", 0
), _patch_discovery(), _patch_elk(
elk=mocked_elk
):
), _patch_discovery(), _patch_elk(elk=mocked_elk):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{

View File

@ -89,7 +89,8 @@ async def setup_enphase_envoy_fixture(hass, config, mock_envoy):
"homeassistant.components.enphase_envoy.Envoy",
return_value=mock_envoy,
), patch(
"homeassistant.components.enphase_envoy.PLATFORMS", []
"homeassistant.components.enphase_envoy.PLATFORMS",
[],
):
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()

View File

@ -38,7 +38,7 @@ async def test_set_unique_id(
), patch(
"homeassistant.components.epson.Projector.get_serial_number", return_value="123"
), patch(
"homeassistant.components.epson.Projector.get_property"
"homeassistant.components.epson.Projector.get_property",
):
freezer.tick(timedelta(seconds=30))
async_fire_time_changed(hass)

View File

@ -100,7 +100,8 @@ async def test_update_entity(
) as mock_compile, patch(
"esphome_dashboard_api.ESPHomeDashboardAPI.upload", return_value=True
) as mock_upload, pytest.raises(
HomeAssistantError, match="compiling"
HomeAssistantError,
match="compiling",
):
await hass.services.async_call(
"update",
@ -120,7 +121,8 @@ async def test_update_entity(
) as mock_compile, patch(
"esphome_dashboard_api.ESPHomeDashboardAPI.upload", return_value=False
) as mock_upload, pytest.raises(
HomeAssistantError, match="OTA"
HomeAssistantError,
match="OTA",
):
await hass.services.async_call(
"update",

View File

@ -51,7 +51,8 @@ async def setup_evil_genius_labs(
"pyevilgenius.EvilGeniusDevice.get_product",
return_value=product_fixture,
), patch(
"homeassistant.components.evil_genius_labs.PLATFORMS", platforms
"homeassistant.components.evil_genius_labs.PLATFORMS",
platforms,
):
assert await async_setup_component(hass, "evil_genius_labs", {})
await hass.async_block_till_done()

View File

@ -48,9 +48,9 @@ async def test_user(hass: HomeAssistant, fc_class_mock, mock_get_source_ip) -> N
), patch(
"homeassistant.components.fritz.async_setup_entry"
) as mock_setup_entry, patch(
"requests.get"
"requests.get",
) as mock_request_get, patch(
"requests.post"
"requests.post",
) as mock_request_post, patch(
"homeassistant.components.fritz.config_flow.socket.gethostbyname",
return_value=MOCK_IPS["fritz.box"],
@ -98,9 +98,9 @@ async def test_user_already_configured(
"homeassistant.components.fritz.common.FritzBoxTools._update_device_info",
return_value=MOCK_FIRMWARE_INFO,
), patch(
"requests.get"
"requests.get",
) as mock_request_get, patch(
"requests.post"
"requests.post",
) as mock_request_post, patch(
"homeassistant.components.fritz.config_flow.socket.gethostbyname",
return_value=MOCK_IPS["fritz.box"],
@ -211,11 +211,11 @@ async def test_reauth_successful(
"homeassistant.components.fritz.common.FritzBoxTools._update_device_info",
return_value=MOCK_FIRMWARE_INFO,
), patch(
"homeassistant.components.fritz.async_setup_entry"
"homeassistant.components.fritz.async_setup_entry",
) as mock_setup_entry, patch(
"requests.get"
"requests.get",
) as mock_request_get, patch(
"requests.post"
"requests.post",
) as mock_request_post:
mock_request_get.return_value.status_code = 200
mock_request_get.return_value.content = MOCK_REQUEST
@ -399,9 +399,7 @@ async def test_ssdp(hass: HomeAssistant, fc_class_mock, mock_get_source_ip) -> N
return_value=MOCK_FIRMWARE_INFO,
), patch(
"homeassistant.components.fritz.async_setup_entry"
) as mock_setup_entry, patch(
"requests.get"
) as mock_request_get, patch(
) as mock_setup_entry, patch("requests.get") as mock_request_get, patch(
"requests.post"
) as mock_request_post:
mock_request_get.return_value.status_code = 200

View File

@ -43,7 +43,8 @@ async def init_integration(
"homeassistant.components.gios.Gios._get_all_sensors",
return_value=sensors,
), patch(
"homeassistant.components.gios.Gios._get_indexes", return_value=indexes
"homeassistant.components.gios.Gios._get_indexes",
return_value=indexes,
):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)

View File

@ -55,7 +55,8 @@ async def test_invalid_sensor_data(hass: HomeAssistant) -> None:
"homeassistant.components.gios.Gios._get_station",
return_value=json.loads(load_fixture("gios/station.json")),
), patch(
"homeassistant.components.gios.Gios._get_sensor", return_value={}
"homeassistant.components.gios.Gios._get_sensor",
return_value={},
):
flow = config_flow.GiosFlowHandler()
flow.hass = hass
@ -83,7 +84,8 @@ async def test_cannot_connect(hass: HomeAssistant) -> None:
async def test_create_entry(hass: HomeAssistant) -> None:
"""Test that the user step works."""
with patch(
"homeassistant.components.gios.Gios._get_stations", return_value=STATIONS
"homeassistant.components.gios.Gios._get_stations",
return_value=STATIONS,
), patch(
"homeassistant.components.gios.Gios._get_station",
return_value=json.loads(load_fixture("gios/station.json")),

View File

@ -82,9 +82,7 @@ async def test_migrate_device_and_config_entry(
), patch(
"homeassistant.components.gios.Gios._get_all_sensors",
return_value=sensors,
), patch(
"homeassistant.components.gios.Gios._get_indexes", return_value=indexes
):
), patch("homeassistant.components.gios.Gios._get_indexes", return_value=indexes):
config_entry.add_to_hass(hass)
device_entry = device_registry.async_get_or_create(

View File

@ -92,7 +92,7 @@ async def test_update_access_token(hass: HomeAssistant) -> None:
) as mock_get_token, patch(
"homeassistant.components.google_assistant.http._get_homegraph_jwt"
) as mock_get_jwt, patch(
"homeassistant.core.dt_util.utcnow"
"homeassistant.core.dt_util.utcnow",
) as mock_utcnow:
mock_utcnow.return_value = base_time
mock_get_jwt.return_value = jwt

View File

@ -66,7 +66,12 @@ async def test_broadcast_no_targets(
"Anuncia en el salón Es hora de hacer los deberes",
),
("ko-KR", "숙제할 시간이야", "거실", "숙제할 시간이야 라고 거실에 방송해 줘"),
("ja-JP", "宿題の時間だよ", "リビング", "宿題の時間だよとリビングにブロードキャストして"),
(
"ja-JP",
"宿題の時間だよ",
"リビング",
"宿題の時間だよとリビングにブロードキャストして",
),
],
ids=["english", "spanish", "korean", "japanese"],
)

View File

@ -131,9 +131,10 @@ async def setup_guardian_fixture(
"aioguardian.commands.wifi.WiFiCommands.status",
return_value=data_wifi_status,
), patch(
"aioguardian.client.Client.disconnect"
"aioguardian.client.Client.disconnect",
), patch(
"homeassistant.components.guardian.PLATFORMS", []
"homeassistant.components.guardian.PLATFORMS",
[],
):
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()

View File

@ -54,9 +54,9 @@ def hassio_stubs(hassio_env, hass, hass_client, aioclient_mock):
"homeassistant.components.hassio.HassIO.get_ingress_panels",
return_value={"panels": []},
), patch(
"homeassistant.components.hassio.issues.SupervisorIssues.setup"
"homeassistant.components.hassio.issues.SupervisorIssues.setup",
), patch(
"homeassistant.components.hassio.HassIO.refresh_updates"
"homeassistant.components.hassio.HassIO.refresh_updates",
):
hass.state = CoreState.starting
hass.loop.run_until_complete(async_setup_component(hass, "hassio", {}))

View File

@ -31,7 +31,7 @@ def run_driver(hass, event_loop, iid_storage):
), patch("pyhap.accessory_driver.HAPServer"), patch(
"pyhap.accessory_driver.AccessoryDriver.publish"
), patch(
"pyhap.accessory_driver.AccessoryDriver.persist"
"pyhap.accessory_driver.AccessoryDriver.persist",
):
yield HomeDriver(
hass,
@ -53,9 +53,9 @@ def hk_driver(hass, event_loop, iid_storage):
), patch("pyhap.accessory_driver.HAPServer.async_stop"), patch(
"pyhap.accessory_driver.HAPServer.async_start"
), patch(
"pyhap.accessory_driver.AccessoryDriver.publish"
"pyhap.accessory_driver.AccessoryDriver.publish",
), patch(
"pyhap.accessory_driver.AccessoryDriver.persist"
"pyhap.accessory_driver.AccessoryDriver.persist",
):
yield HomeDriver(
hass,
@ -77,13 +77,13 @@ def mock_hap(hass, event_loop, iid_storage, mock_zeroconf):
), patch("pyhap.accessory_driver.HAPServer.async_stop"), patch(
"pyhap.accessory_driver.HAPServer.async_start"
), patch(
"pyhap.accessory_driver.AccessoryDriver.publish"
"pyhap.accessory_driver.AccessoryDriver.publish",
), patch(
"pyhap.accessory_driver.AccessoryDriver.async_start"
"pyhap.accessory_driver.AccessoryDriver.async_start",
), patch(
"pyhap.accessory_driver.AccessoryDriver.async_stop"
"pyhap.accessory_driver.AccessoryDriver.async_stop",
), patch(
"pyhap.accessory_driver.AccessoryDriver.persist"
"pyhap.accessory_driver.AccessoryDriver.persist",
):
yield HomeDriver(
hass,

View File

@ -1202,9 +1202,7 @@ async def test_homekit_reset_accessories_not_supported(
"pyhap.accessory_driver.AccessoryDriver.async_update_advertisement"
) as hk_driver_async_update_advertisement, patch(
"pyhap.accessory_driver.AccessoryDriver.async_start"
), patch.object(
homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0
):
), patch.object(homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0):
await async_init_entry(hass, entry)
acc_mock = MagicMock()
@ -1247,9 +1245,7 @@ async def test_homekit_reset_accessories_state_missing(
"pyhap.accessory_driver.AccessoryDriver.config_changed"
) as hk_driver_config_changed, patch(
"pyhap.accessory_driver.AccessoryDriver.async_start"
), patch.object(
homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0
):
), patch.object(homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0):
await async_init_entry(hass, entry)
acc_mock = MagicMock()
@ -1291,9 +1287,7 @@ async def test_homekit_reset_accessories_not_bridged(
"pyhap.accessory_driver.AccessoryDriver.async_update_advertisement"
) as hk_driver_async_update_advertisement, patch(
"pyhap.accessory_driver.AccessoryDriver.async_start"
), patch.object(
homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0
):
), patch.object(homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0):
await async_init_entry(hass, entry)
assert hk_driver_async_update_advertisement.call_count == 0
@ -1338,7 +1332,7 @@ async def test_homekit_reset_single_accessory(
) as hk_driver_async_update_advertisement, patch(
"pyhap.accessory_driver.AccessoryDriver.async_start"
), patch(
f"{PATH_HOMEKIT}.accessories.HomeAccessory.run"
f"{PATH_HOMEKIT}.accessories.HomeAccessory.run",
) as mock_run:
await async_init_entry(hass, entry)
homekit.status = STATUS_RUNNING
@ -2071,9 +2065,9 @@ async def test_reload(hass: HomeAssistant, mock_async_zeroconf: None) -> None:
) as mock_homekit2, patch.object(homekit.bridge, "add_accessory"), patch(
f"{PATH_HOMEKIT}.async_show_setup_message"
), patch(
f"{PATH_HOMEKIT}.get_accessory"
f"{PATH_HOMEKIT}.get_accessory",
), patch(
"pyhap.accessory_driver.AccessoryDriver.async_start"
"pyhap.accessory_driver.AccessoryDriver.async_start",
), patch(
"homeassistant.components.network.async_get_source_ip", return_value="1.2.3.4"
):

View File

@ -102,7 +102,7 @@ async def test_hmip_add_device(
), patch.object(reloaded_hap, "async_connect"), patch.object(
reloaded_hap, "get_hap", return_value=mock_hap.home
), patch(
"homeassistant.components.homematicip_cloud.hap.asyncio.sleep"
"homeassistant.components.homematicip_cloud.hap.asyncio.sleep",
):
mock_hap.home.fire_create_event(event_type=EventType.DEVICE_ADDED)
await hass.async_block_till_done()

View File

@ -53,7 +53,8 @@ async def test_auth_auth_check_and_register(hass: HomeAssistant) -> None:
), patch.object(
hmip_auth.auth, "requestAuthToken", return_value="ABC"
), patch.object(
hmip_auth.auth, "confirmAuthToken"
hmip_auth.auth,
"confirmAuthToken",
):
assert await hmip_auth.async_checkbutton()
assert await hmip_auth.async_register() == "ABC"

Some files were not shown because too many files have changed in this diff Show More