pytest-homeassistant-custom-component 0.13.289__py3-none-any.whl → 0.13.291__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pytest_homeassistant_custom_component/components/recorder/common.py +115 -0
- pytest_homeassistant_custom_component/const.py +2 -2
- pytest_homeassistant_custom_component/patch_time.py +25 -2
- pytest_homeassistant_custom_component/plugins.py +54 -35
- {pytest_homeassistant_custom_component-0.13.289.dist-info → pytest_homeassistant_custom_component-0.13.291.dist-info}/METADATA +6 -5
- {pytest_homeassistant_custom_component-0.13.289.dist-info → pytest_homeassistant_custom_component-0.13.291.dist-info}/RECORD +11 -11
- {pytest_homeassistant_custom_component-0.13.289.dist-info → pytest_homeassistant_custom_component-0.13.291.dist-info}/WHEEL +0 -0
- {pytest_homeassistant_custom_component-0.13.289.dist-info → pytest_homeassistant_custom_component-0.13.291.dist-info}/entry_points.txt +0 -0
- {pytest_homeassistant_custom_component-0.13.289.dist-info → pytest_homeassistant_custom_component-0.13.291.dist-info}/licenses/LICENSE +0 -0
- {pytest_homeassistant_custom_component-0.13.289.dist-info → pytest_homeassistant_custom_component-0.13.291.dist-info}/licenses/LICENSE_HA_CORE.md +0 -0
- {pytest_homeassistant_custom_component-0.13.289.dist-info → pytest_homeassistant_custom_component-0.13.291.dist-info}/top_level.txt +0 -0
|
@@ -15,10 +15,12 @@ from functools import partial
|
|
|
15
15
|
import importlib
|
|
16
16
|
import sys
|
|
17
17
|
import time
|
|
18
|
+
from types import ModuleType
|
|
18
19
|
from typing import Any, Literal, cast
|
|
19
20
|
from unittest.mock import MagicMock, patch, sentinel
|
|
20
21
|
|
|
21
22
|
from freezegun import freeze_time
|
|
23
|
+
import pytest
|
|
22
24
|
from sqlalchemy import create_engine, event as sqlalchemy_event
|
|
23
25
|
from sqlalchemy.orm.session import Session
|
|
24
26
|
|
|
@@ -32,18 +34,25 @@ from homeassistant.components.recorder import (
|
|
|
32
34
|
statistics,
|
|
33
35
|
)
|
|
34
36
|
from homeassistant.components.recorder.db_schema import (
|
|
37
|
+
EventData,
|
|
35
38
|
Events,
|
|
36
39
|
EventTypes,
|
|
37
40
|
RecorderRuns,
|
|
41
|
+
StateAttributes,
|
|
38
42
|
States,
|
|
39
43
|
StatesMeta,
|
|
40
44
|
)
|
|
45
|
+
from homeassistant.components.recorder.models import (
|
|
46
|
+
bytes_to_ulid_or_none,
|
|
47
|
+
bytes_to_uuid_hex_or_none,
|
|
48
|
+
)
|
|
41
49
|
from homeassistant.components.recorder.tasks import RecorderTask, StatisticsTask
|
|
42
50
|
from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass
|
|
43
51
|
from homeassistant.const import DEGREE, UnitOfTemperature
|
|
44
52
|
from homeassistant.core import Event, HomeAssistant, State
|
|
45
53
|
from homeassistant.helpers import recorder as recorder_helper
|
|
46
54
|
from homeassistant.util import dt as dt_util
|
|
55
|
+
from homeassistant.util.json import json_loads, json_loads_object
|
|
47
56
|
|
|
48
57
|
from . import db_schema_0
|
|
49
58
|
|
|
@@ -456,6 +465,13 @@ def get_schema_module_path(schema_version_postfix: str) -> str:
|
|
|
456
465
|
return f"...components.recorder.db_schema_{schema_version_postfix}"
|
|
457
466
|
|
|
458
467
|
|
|
468
|
+
def get_patched_live_version(old_db_schema: ModuleType) -> int:
|
|
469
|
+
"""Return the patched live migration version."""
|
|
470
|
+
return min(
|
|
471
|
+
migration.LIVE_MIGRATION_MIN_SCHEMA_VERSION, old_db_schema.SCHEMA_VERSION
|
|
472
|
+
)
|
|
473
|
+
|
|
474
|
+
|
|
459
475
|
@contextmanager
|
|
460
476
|
def old_db_schema(hass: HomeAssistant, schema_version_postfix: str) -> Iterator[None]:
|
|
461
477
|
"""Fixture to initialize the db with the old schema."""
|
|
@@ -466,6 +482,11 @@ def old_db_schema(hass: HomeAssistant, schema_version_postfix: str) -> Iterator[
|
|
|
466
482
|
with (
|
|
467
483
|
patch.object(recorder, "db_schema", old_db_schema),
|
|
468
484
|
patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION),
|
|
485
|
+
patch.object(
|
|
486
|
+
migration,
|
|
487
|
+
"LIVE_MIGRATION_MIN_SCHEMA_VERSION",
|
|
488
|
+
get_patched_live_version(old_db_schema),
|
|
489
|
+
),
|
|
469
490
|
patch.object(migration, "non_live_data_migration_needed", return_value=False),
|
|
470
491
|
patch.object(core, "StatesMeta", old_db_schema.StatesMeta),
|
|
471
492
|
patch.object(core, "EventTypes", old_db_schema.EventTypes),
|
|
@@ -496,3 +517,97 @@ async def async_attach_db_engine(hass: HomeAssistant) -> None:
|
|
|
496
517
|
)
|
|
497
518
|
|
|
498
519
|
await instance.async_add_executor_job(_mock_setup_recorder_connection)
|
|
520
|
+
|
|
521
|
+
|
|
522
|
+
EVENT_ORIGIN_ORDER = [ha.EventOrigin.local, ha.EventOrigin.remote]
|
|
523
|
+
|
|
524
|
+
|
|
525
|
+
def db_event_to_native(event: Events, validate_entity_id: bool = True) -> Event | None:
|
|
526
|
+
"""Convert to a native HA Event."""
|
|
527
|
+
context = ha.Context(
|
|
528
|
+
id=bytes_to_ulid_or_none(event.context_id_bin),
|
|
529
|
+
user_id=bytes_to_uuid_hex_or_none(event.context_user_id_bin),
|
|
530
|
+
parent_id=bytes_to_ulid_or_none(event.context_parent_id_bin),
|
|
531
|
+
)
|
|
532
|
+
return Event(
|
|
533
|
+
event.event_type or "",
|
|
534
|
+
json_loads_object(event.event_data) if event.event_data else {},
|
|
535
|
+
ha.EventOrigin(event.origin)
|
|
536
|
+
if event.origin
|
|
537
|
+
else EVENT_ORIGIN_ORDER[event.origin_idx or 0],
|
|
538
|
+
event.time_fired_ts or 0,
|
|
539
|
+
context=context,
|
|
540
|
+
)
|
|
541
|
+
|
|
542
|
+
|
|
543
|
+
def db_event_data_to_native(event_data: EventData) -> dict[str, Any]:
|
|
544
|
+
"""Convert to an event data dictionary."""
|
|
545
|
+
shared_data = event_data.shared_data
|
|
546
|
+
if shared_data is None:
|
|
547
|
+
return {}
|
|
548
|
+
return cast(dict[str, Any], json_loads(shared_data))
|
|
549
|
+
|
|
550
|
+
|
|
551
|
+
def db_state_to_native(state: States, validate_entity_id: bool = True) -> State | None:
|
|
552
|
+
"""Convert to an HA state object."""
|
|
553
|
+
context = ha.Context(
|
|
554
|
+
id=bytes_to_ulid_or_none(state.context_id_bin),
|
|
555
|
+
user_id=bytes_to_uuid_hex_or_none(state.context_user_id_bin),
|
|
556
|
+
parent_id=bytes_to_ulid_or_none(state.context_parent_id_bin),
|
|
557
|
+
)
|
|
558
|
+
attrs = json_loads_object(state.attributes) if state.attributes else {}
|
|
559
|
+
last_updated = dt_util.utc_from_timestamp(state.last_updated_ts or 0)
|
|
560
|
+
if state.last_changed_ts is None or state.last_changed_ts == state.last_updated_ts:
|
|
561
|
+
last_changed = dt_util.utc_from_timestamp(state.last_updated_ts or 0)
|
|
562
|
+
else:
|
|
563
|
+
last_changed = dt_util.utc_from_timestamp(state.last_changed_ts or 0)
|
|
564
|
+
if (
|
|
565
|
+
state.last_reported_ts is None
|
|
566
|
+
or state.last_reported_ts == state.last_updated_ts
|
|
567
|
+
):
|
|
568
|
+
last_reported = dt_util.utc_from_timestamp(state.last_updated_ts or 0)
|
|
569
|
+
else:
|
|
570
|
+
last_reported = dt_util.utc_from_timestamp(state.last_reported_ts or 0)
|
|
571
|
+
return State(
|
|
572
|
+
state.entity_id or "",
|
|
573
|
+
state.state, # type: ignore[arg-type]
|
|
574
|
+
# Join the state_attributes table on attributes_id to get the attributes
|
|
575
|
+
# for newer states
|
|
576
|
+
attrs,
|
|
577
|
+
last_changed=last_changed,
|
|
578
|
+
last_reported=last_reported,
|
|
579
|
+
last_updated=last_updated,
|
|
580
|
+
context=context,
|
|
581
|
+
validate_entity_id=validate_entity_id,
|
|
582
|
+
)
|
|
583
|
+
|
|
584
|
+
|
|
585
|
+
def db_state_attributes_to_native(state_attrs: StateAttributes) -> dict[str, Any]:
|
|
586
|
+
"""Convert to a state attributes dictionary."""
|
|
587
|
+
shared_attrs = state_attrs.shared_attrs
|
|
588
|
+
if shared_attrs is None:
|
|
589
|
+
return {}
|
|
590
|
+
return cast(dict[str, Any], json_loads(shared_attrs))
|
|
591
|
+
|
|
592
|
+
|
|
593
|
+
async def async_drop_index(
|
|
594
|
+
recorder: Recorder, table: str, index: str, caplog: pytest.LogCaptureFixture
|
|
595
|
+
) -> None:
|
|
596
|
+
"""Drop an index from the database.
|
|
597
|
+
|
|
598
|
+
migration._drop_index does not return or raise, so we verify the result
|
|
599
|
+
by checking the log for success or failure messages.
|
|
600
|
+
"""
|
|
601
|
+
|
|
602
|
+
finish_msg = f"Finished dropping index `{index}` from table `{table}`"
|
|
603
|
+
fail_msg = f"Failed to drop index `{index}` from table `{table}`"
|
|
604
|
+
|
|
605
|
+
count_finish = caplog.text.count(finish_msg)
|
|
606
|
+
count_fail = caplog.text.count(fail_msg)
|
|
607
|
+
|
|
608
|
+
await recorder.async_add_executor_job(
|
|
609
|
+
migration._drop_index, recorder.get_session, table, index
|
|
610
|
+
)
|
|
611
|
+
|
|
612
|
+
assert caplog.text.count(finish_msg) == count_finish + 1
|
|
613
|
+
assert caplog.text.count(fail_msg) == count_fail
|
|
@@ -5,8 +5,8 @@ This file is originally from homeassistant/core and modified by pytest-homeassis
|
|
|
5
5
|
"""
|
|
6
6
|
from typing import TYPE_CHECKING, Final
|
|
7
7
|
MAJOR_VERSION: Final = 2025
|
|
8
|
-
MINOR_VERSION: Final =
|
|
9
|
-
PATCH_VERSION: Final = "
|
|
8
|
+
MINOR_VERSION: Final = 11
|
|
9
|
+
PATCH_VERSION: Final = "0b1"
|
|
10
10
|
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
|
11
11
|
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
|
12
12
|
CONF_API_VERSION: Final = "api_version"
|
|
@@ -30,8 +30,31 @@ def ha_datetime_to_fakedatetime(datetime) -> freezegun.api.FakeDatetime: # type
|
|
|
30
30
|
)
|
|
31
31
|
|
|
32
32
|
|
|
33
|
-
class
|
|
34
|
-
"""Modified to
|
|
33
|
+
class HAFakeDateMeta(freezegun.api.FakeDateMeta):
|
|
34
|
+
"""Modified to override the string representation."""
|
|
35
|
+
|
|
36
|
+
def __str__(cls) -> str: # noqa: N805 (ruff doesn't know this is a metaclass)
|
|
37
|
+
"""Return the string representation of the class."""
|
|
38
|
+
return "<class 'datetime.date'>"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class HAFakeDate(freezegun.api.FakeDate, metaclass=HAFakeDateMeta): # type: ignore[name-defined]
|
|
42
|
+
"""Modified to improve class str."""
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class HAFakeDatetimeMeta(freezegun.api.FakeDatetimeMeta):
|
|
46
|
+
"""Modified to override the string representation."""
|
|
47
|
+
|
|
48
|
+
def __str__(cls) -> str: # noqa: N805 (ruff doesn't know this is a metaclass)
|
|
49
|
+
"""Return the string representation of the class."""
|
|
50
|
+
return "<class 'datetime.datetime'>"
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class HAFakeDatetime(freezegun.api.FakeDatetime, metaclass=HAFakeDatetimeMeta): # type: ignore[name-defined]
|
|
54
|
+
"""Modified to include basic fold support and improve class str.
|
|
55
|
+
|
|
56
|
+
Fold support submitted to upstream in https://github.com/spulec/freezegun/pull/424.
|
|
57
|
+
"""
|
|
35
58
|
|
|
36
59
|
@classmethod
|
|
37
60
|
def now(cls, tz=None):
|
|
@@ -163,6 +163,7 @@ asyncio.set_event_loop_policy = lambda policy: None
|
|
|
163
163
|
def pytest_addoption(parser: pytest.Parser) -> None:
|
|
164
164
|
"""Register custom pytest options."""
|
|
165
165
|
parser.addoption("--dburl", action="store", default="sqlite://")
|
|
166
|
+
parser.addoption("--drop-existing-db", action="store_const", const=True)
|
|
166
167
|
|
|
167
168
|
|
|
168
169
|
def pytest_configure(config: pytest.Config) -> None:
|
|
@@ -190,11 +191,13 @@ def pytest_runtest_setup() -> None:
|
|
|
190
191
|
destinations will be allowed.
|
|
191
192
|
|
|
192
193
|
freezegun:
|
|
193
|
-
Modified to include https://github.com/spulec/freezegun/pull/424
|
|
194
|
+
Modified to include https://github.com/spulec/freezegun/pull/424 and improve class str.
|
|
194
195
|
"""
|
|
195
196
|
pytest_socket.socket_allow_hosts(["127.0.0.1"])
|
|
196
197
|
pytest_socket.disable_socket(allow_unix_socket=True)
|
|
197
198
|
|
|
199
|
+
freezegun.api.FakeDate = patch_time.HAFakeDate # type: ignore[attr-defined]
|
|
200
|
+
|
|
198
201
|
freezegun.api.datetime_to_fakedatetime = patch_time.ha_datetime_to_fakedatetime # type: ignore[attr-defined]
|
|
199
202
|
freezegun.api.FakeDatetime = patch_time.HAFakeDatetime # type: ignore[attr-defined]
|
|
200
203
|
|
|
@@ -1494,44 +1497,58 @@ def recorder_db_url(
|
|
|
1494
1497
|
assert not hass_fixture_setup
|
|
1495
1498
|
|
|
1496
1499
|
db_url = cast(str, pytestconfig.getoption("dburl"))
|
|
1500
|
+
drop_existing_db = pytestconfig.getoption("drop_existing_db")
|
|
1501
|
+
|
|
1502
|
+
def drop_db() -> None:
|
|
1503
|
+
import sqlalchemy as sa # noqa: PLC0415
|
|
1504
|
+
import sqlalchemy_utils # noqa: PLC0415
|
|
1505
|
+
|
|
1506
|
+
if db_url.startswith("mysql://"):
|
|
1507
|
+
made_url = sa.make_url(db_url)
|
|
1508
|
+
db = made_url.database
|
|
1509
|
+
engine = sa.create_engine(db_url)
|
|
1510
|
+
# Check for any open connections to the database before dropping it
|
|
1511
|
+
# to ensure that InnoDB does not deadlock.
|
|
1512
|
+
with engine.begin() as connection:
|
|
1513
|
+
query = sa.text(
|
|
1514
|
+
"select id FROM information_schema.processlist WHERE db=:db and id != CONNECTION_ID()"
|
|
1515
|
+
)
|
|
1516
|
+
rows = connection.execute(query, parameters={"db": db}).fetchall()
|
|
1517
|
+
if rows:
|
|
1518
|
+
raise RuntimeError(
|
|
1519
|
+
f"Unable to drop database {db} because it is in use by {rows}"
|
|
1520
|
+
)
|
|
1521
|
+
engine.dispose()
|
|
1522
|
+
sqlalchemy_utils.drop_database(db_url)
|
|
1523
|
+
elif db_url.startswith("postgresql://"):
|
|
1524
|
+
sqlalchemy_utils.drop_database(db_url)
|
|
1525
|
+
|
|
1497
1526
|
if db_url == "sqlite://" and persistent_database:
|
|
1498
1527
|
tmp_path = tmp_path_factory.mktemp("recorder")
|
|
1499
1528
|
db_url = "sqlite:///" + str(tmp_path / "pytest.db")
|
|
1500
|
-
elif db_url.startswith("mysql://"):
|
|
1529
|
+
elif db_url.startswith(("mysql://", "postgresql://")):
|
|
1501
1530
|
import sqlalchemy_utils # noqa: PLC0415
|
|
1502
1531
|
|
|
1503
|
-
|
|
1504
|
-
|
|
1505
|
-
sqlalchemy_utils.create_database(db_url, encoding=charset)
|
|
1506
|
-
elif db_url.startswith("postgresql://"):
|
|
1507
|
-
import sqlalchemy_utils # noqa: PLC0415
|
|
1532
|
+
if drop_existing_db and sqlalchemy_utils.database_exists(db_url):
|
|
1533
|
+
drop_db()
|
|
1508
1534
|
|
|
1509
|
-
|
|
1510
|
-
|
|
1535
|
+
if sqlalchemy_utils.database_exists(db_url):
|
|
1536
|
+
raise RuntimeError(
|
|
1537
|
+
f"Database {db_url} already exists. Use --drop-existing-db "
|
|
1538
|
+
"to automatically drop existing database before start of test."
|
|
1539
|
+
)
|
|
1540
|
+
|
|
1541
|
+
sqlalchemy_utils.create_database(
|
|
1542
|
+
db_url,
|
|
1543
|
+
encoding="utf8mb4' COLLATE = 'utf8mb4_unicode_ci"
|
|
1544
|
+
if db_url.startswith("mysql://")
|
|
1545
|
+
else "utf8",
|
|
1546
|
+
)
|
|
1511
1547
|
yield db_url
|
|
1512
1548
|
if db_url == "sqlite://" and persistent_database:
|
|
1513
1549
|
rmtree(tmp_path, ignore_errors=True)
|
|
1514
|
-
elif db_url.startswith("mysql://"):
|
|
1515
|
-
|
|
1516
|
-
|
|
1517
|
-
made_url = sa.make_url(db_url)
|
|
1518
|
-
db = made_url.database
|
|
1519
|
-
engine = sa.create_engine(db_url)
|
|
1520
|
-
# Check for any open connections to the database before dropping it
|
|
1521
|
-
# to ensure that InnoDB does not deadlock.
|
|
1522
|
-
with engine.begin() as connection:
|
|
1523
|
-
query = sa.text(
|
|
1524
|
-
"select id FROM information_schema.processlist WHERE db=:db and id != CONNECTION_ID()"
|
|
1525
|
-
)
|
|
1526
|
-
rows = connection.execute(query, parameters={"db": db}).fetchall()
|
|
1527
|
-
if rows:
|
|
1528
|
-
raise RuntimeError(
|
|
1529
|
-
f"Unable to drop database {db} because it is in use by {rows}"
|
|
1530
|
-
)
|
|
1531
|
-
engine.dispose()
|
|
1532
|
-
sqlalchemy_utils.drop_database(db_url)
|
|
1533
|
-
elif db_url.startswith("postgresql://"):
|
|
1534
|
-
sqlalchemy_utils.drop_database(db_url)
|
|
1550
|
+
elif db_url.startswith(("mysql://", "postgresql://")):
|
|
1551
|
+
drop_db()
|
|
1535
1552
|
|
|
1536
1553
|
|
|
1537
1554
|
async def _async_init_recorder_component(
|
|
@@ -1661,10 +1678,12 @@ async def async_test_recorder(
|
|
|
1661
1678
|
migrate_entity_ids = (
|
|
1662
1679
|
migration.EntityIDMigration.migrate_data if enable_migrate_entity_ids else None
|
|
1663
1680
|
)
|
|
1664
|
-
|
|
1665
|
-
migration.EventIDPostMigration.
|
|
1681
|
+
post_migrate_event_ids = (
|
|
1682
|
+
migration.EventIDPostMigration.needs_migrate_impl
|
|
1666
1683
|
if enable_migrate_event_ids
|
|
1667
|
-
else lambda
|
|
1684
|
+
else lambda _1, _2, _3: migration.DataMigrationStatus(
|
|
1685
|
+
needs_migrate=False, migration_done=True
|
|
1686
|
+
)
|
|
1668
1687
|
)
|
|
1669
1688
|
with (
|
|
1670
1689
|
patch(
|
|
@@ -1703,8 +1722,8 @@ async def async_test_recorder(
|
|
|
1703
1722
|
autospec=True,
|
|
1704
1723
|
),
|
|
1705
1724
|
patch(
|
|
1706
|
-
"homeassistant.components.recorder.migration.EventIDPostMigration.
|
|
1707
|
-
side_effect=
|
|
1725
|
+
"homeassistant.components.recorder.migration.EventIDPostMigration.needs_migrate_impl",
|
|
1726
|
+
side_effect=post_migrate_event_ids,
|
|
1708
1727
|
autospec=True,
|
|
1709
1728
|
),
|
|
1710
1729
|
patch(
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pytest-homeassistant-custom-component
|
|
3
|
-
Version: 0.13.
|
|
3
|
+
Version: 0.13.291
|
|
4
4
|
Summary: Experimental package to automatically extract test plugins for Home Assistant custom components
|
|
5
5
|
Home-page: https://github.com/MatthewFlamm/pytest-homeassistant-custom-component
|
|
6
6
|
Author: Matthew Flamm
|
|
@@ -21,9 +21,10 @@ Requires-Dist: sqlalchemy
|
|
|
21
21
|
Requires-Dist: coverage==7.10.6
|
|
22
22
|
Requires-Dist: freezegun==1.5.2
|
|
23
23
|
Requires-Dist: go2rtc-client==0.2.1
|
|
24
|
+
Requires-Dist: librt==0.2.1
|
|
24
25
|
Requires-Dist: license-expression==30.4.3
|
|
25
26
|
Requires-Dist: mock-open==1.4.0
|
|
26
|
-
Requires-Dist: pydantic==2.
|
|
27
|
+
Requires-Dist: pydantic==2.12.2
|
|
27
28
|
Requires-Dist: pylint-per-file-ignores==1.4.0
|
|
28
29
|
Requires-Dist: pipdeptree==2.26.1
|
|
29
30
|
Requires-Dist: pytest-asyncio==1.2.0
|
|
@@ -40,9 +41,9 @@ Requires-Dist: pytest-xdist==3.8.0
|
|
|
40
41
|
Requires-Dist: pytest==8.4.2
|
|
41
42
|
Requires-Dist: requests-mock==1.12.1
|
|
42
43
|
Requires-Dist: respx==0.22.0
|
|
43
|
-
Requires-Dist: syrupy==
|
|
44
|
+
Requires-Dist: syrupy==5.0.0
|
|
44
45
|
Requires-Dist: tqdm==4.67.1
|
|
45
|
-
Requires-Dist: homeassistant==2025.
|
|
46
|
+
Requires-Dist: homeassistant==2025.11.0b1
|
|
46
47
|
Requires-Dist: SQLAlchemy==2.0.41
|
|
47
48
|
Requires-Dist: paho-mqtt==2.1.0
|
|
48
49
|
Requires-Dist: numpy==2.3.2
|
|
@@ -60,7 +61,7 @@ Dynamic: summary
|
|
|
60
61
|
|
|
61
62
|
# pytest-homeassistant-custom-component
|
|
62
63
|
|
|
63
|
-

|
|
64
65
|
|
|
65
66
|
[](https://gitpod.io/#https://github.com/MatthewFlamm/pytest-homeassistant-custom-component)
|
|
66
67
|
|
|
@@ -1,28 +1,28 @@
|
|
|
1
1
|
pytest_homeassistant_custom_component/__init__.py,sha256=pUI8j-H-57ncCLnvZSDWZPCtJpvi3ACZqPtH5SbedZA,138
|
|
2
2
|
pytest_homeassistant_custom_component/asyncio_legacy.py,sha256=UdkV2mKqeS21QX9LSdBYsBRbm2h4JCVVZeesaOLKOAE,3886
|
|
3
3
|
pytest_homeassistant_custom_component/common.py,sha256=51NiAKz0WppfuJzZq4d2ljFrXMpWBhvZSwCglw86ahI,65438
|
|
4
|
-
pytest_homeassistant_custom_component/const.py,sha256=
|
|
4
|
+
pytest_homeassistant_custom_component/const.py,sha256=26IVwahF1Vn0xRHF3twU5nbROhOHL0BRlpjQOS3SDYY,442
|
|
5
5
|
pytest_homeassistant_custom_component/ignore_uncaught_exceptions.py,sha256=rilak_dQGMNhDqST1ZzhjZl_qmytFjkcez0vYmLMQ4Q,1601
|
|
6
6
|
pytest_homeassistant_custom_component/patch_json.py,sha256=hNUeb1yxAr7ONfvX-o_WkI6zhQDCdKl7GglPjkVUiHo,1063
|
|
7
7
|
pytest_homeassistant_custom_component/patch_recorder.py,sha256=lW8N_3ZIKQ5lsVjRc-ROo7d0egUZcpjquWKqe7iEF94,819
|
|
8
|
-
pytest_homeassistant_custom_component/patch_time.py,sha256=
|
|
9
|
-
pytest_homeassistant_custom_component/plugins.py,sha256=
|
|
8
|
+
pytest_homeassistant_custom_component/patch_time.py,sha256=jdnOAXDxUA0AKqvyeSrRC18rHDGfcpWYuLhmUglebCE,3374
|
|
9
|
+
pytest_homeassistant_custom_component/plugins.py,sha256=B5EXdcXOTZLyfL-Wm5yrK7fgJNmwCP5mopYfqNgSEy0,69609
|
|
10
10
|
pytest_homeassistant_custom_component/syrupy.py,sha256=N_g_90dWqruzUogQi0rJsuN0XRbA6ffJen62r8P9cdo,15588
|
|
11
11
|
pytest_homeassistant_custom_component/typing.py,sha256=zGhdf6U6aRq5cPwIfRUdtZeApLOyPD2EArjznKoIRZM,1734
|
|
12
12
|
pytest_homeassistant_custom_component/components/__init__.py,sha256=0BHCdArl5gPjDJWaZrqvApHvzL_29FbE1RMg_mg__Qs,138
|
|
13
13
|
pytest_homeassistant_custom_component/components/diagnostics/__init__.py,sha256=O_ys8t0iHvRorFr4TrR9k3sa3Xh5qBb4HsylY775UFA,2431
|
|
14
14
|
pytest_homeassistant_custom_component/components/recorder/__init__.py,sha256=ugrLzvjSQFSmYRjy88ZZSiyA-NLgKlLkFp0OKguy6a4,225
|
|
15
|
-
pytest_homeassistant_custom_component/components/recorder/common.py,sha256=
|
|
15
|
+
pytest_homeassistant_custom_component/components/recorder/common.py,sha256=8c_oqbQtg7dI-JoOaZrLYKFAjEJvjvSIA1atfui-WpQ,22091
|
|
16
16
|
pytest_homeassistant_custom_component/components/recorder/db_schema_0.py,sha256=0mez9slhL-I286dDAxq06UDvWRU6RzCA2GKOwtj9JOI,5547
|
|
17
17
|
pytest_homeassistant_custom_component/test_util/__init__.py,sha256=ljLmNeblq1vEgP0vhf2P1-SuyGSHvLKVA0APSYA0Xl8,1034
|
|
18
18
|
pytest_homeassistant_custom_component/test_util/aiohttp.py,sha256=oPQaFRgXcAfHj9dE2Rjl1UJCBfhQp80CnQV02rXMYLo,11520
|
|
19
19
|
pytest_homeassistant_custom_component/testing_config/__init__.py,sha256=SRp6h9HJi2I_vA6cPNkMiR0BTYib5XVmL03H-l3BPL0,158
|
|
20
20
|
pytest_homeassistant_custom_component/testing_config/custom_components/__init__.py,sha256=-l6KCBLhwEDkCztlY6S-j53CjmKY6-A_3eX5JVS02NY,173
|
|
21
21
|
pytest_homeassistant_custom_component/testing_config/custom_components/test_constant_deprecation/__init__.py,sha256=2vF_C-VP9tDjZMX7h6iJRAugtH2Bf3b4fE3i9j4vGeY,383
|
|
22
|
-
pytest_homeassistant_custom_component-0.13.
|
|
23
|
-
pytest_homeassistant_custom_component-0.13.
|
|
24
|
-
pytest_homeassistant_custom_component-0.13.
|
|
25
|
-
pytest_homeassistant_custom_component-0.13.
|
|
26
|
-
pytest_homeassistant_custom_component-0.13.
|
|
27
|
-
pytest_homeassistant_custom_component-0.13.
|
|
28
|
-
pytest_homeassistant_custom_component-0.13.
|
|
22
|
+
pytest_homeassistant_custom_component-0.13.291.dist-info/licenses/LICENSE,sha256=7h-vqUxyeQNXiQgRJ8350CSHOy55M07DZuv4KG70AS8,1070
|
|
23
|
+
pytest_homeassistant_custom_component-0.13.291.dist-info/licenses/LICENSE_HA_CORE.md,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
24
|
+
pytest_homeassistant_custom_component-0.13.291.dist-info/METADATA,sha256=NitW4dKADWyWswh6h_SsO4DMtBWUDD4f91L8y9V8Hj0,5962
|
|
25
|
+
pytest_homeassistant_custom_component-0.13.291.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
26
|
+
pytest_homeassistant_custom_component-0.13.291.dist-info/entry_points.txt,sha256=bOCTSuP8RSPg0QfwdfurUShvMGWg4MI2F8rxbWx-VtQ,73
|
|
27
|
+
pytest_homeassistant_custom_component-0.13.291.dist-info/top_level.txt,sha256=PR2cize2la22eOO7dQChJWK8dkJnuMmDC-fhafmdOWw,38
|
|
28
|
+
pytest_homeassistant_custom_component-0.13.291.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|