ha-mcp-dev 7.2.0.dev347__tar.gz → 7.2.0.dev348__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. {ha_mcp_dev-7.2.0.dev347/src/ha_mcp_dev.egg-info → ha_mcp_dev-7.2.0.dev348}/PKG-INFO +1 -1
  2. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/pyproject.toml +1 -1
  3. ha_mcp_dev-7.2.0.dev348/src/ha_mcp/tools/tools_history.py +563 -0
  4. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348/src/ha_mcp_dev.egg-info}/PKG-INFO +1 -1
  5. ha_mcp_dev-7.2.0.dev347/src/ha_mcp/tools/tools_history.py +0 -725
  6. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/LICENSE +0 -0
  7. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/MANIFEST.in +0 -0
  8. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/README.md +0 -0
  9. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/setup.cfg +0 -0
  10. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/__init__.py +0 -0
  11. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/__main__.py +0 -0
  12. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/_pypi_marker +0 -0
  13. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/auth/__init__.py +0 -0
  14. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/auth/consent_form.py +0 -0
  15. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/auth/provider.py +0 -0
  16. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/client/__init__.py +0 -0
  17. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/client/rest_client.py +0 -0
  18. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/client/websocket_client.py +0 -0
  19. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/client/websocket_listener.py +0 -0
  20. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/config.py +0 -0
  21. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/errors.py +0 -0
  22. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/py.typed +0 -0
  23. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/.claude/settings.json +0 -0
  24. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/.claude-plugin/marketplace.json +0 -0
  25. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/.claude-plugin/plugin.json +0 -0
  26. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/.github/ISSUE_TEMPLATE/skill-rca.md +0 -0
  27. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/AGENTS.md +0 -0
  28. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/CLAUDE.md +0 -0
  29. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/CONTRIBUTING.md +0 -0
  30. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/LICENSE +0 -0
  31. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/README.md +0 -0
  32. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/skills/home-assistant-best-practices/SKILL.md +0 -0
  33. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/skills/home-assistant-best-practices/evals/evals.json +0 -0
  34. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/skills/home-assistant-best-practices/references/automation-patterns.md +0 -0
  35. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/skills/home-assistant-best-practices/references/dashboard-cards.md +0 -0
  36. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/skills/home-assistant-best-practices/references/dashboard-guide.md +0 -0
  37. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/skills/home-assistant-best-practices/references/device-control.md +0 -0
  38. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/skills/home-assistant-best-practices/references/domain-docs.md +0 -0
  39. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/skills/home-assistant-best-practices/references/examples.yaml +0 -0
  40. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/skills/home-assistant-best-practices/references/helper-selection.md +0 -0
  41. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/skills/home-assistant-best-practices/references/safe-refactoring.md +0 -0
  42. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/resources/skills-vendor/skills/home-assistant-best-practices/references/template-guidelines.md +0 -0
  43. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/server.py +0 -0
  44. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/smoke_test.py +0 -0
  45. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/__init__.py +0 -0
  46. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/backup.py +0 -0
  47. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/best_practice_checker.py +0 -0
  48. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/device_control.py +0 -0
  49. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/enhanced.py +0 -0
  50. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/helpers.py +0 -0
  51. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/registry.py +0 -0
  52. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/smart_search.py +0 -0
  53. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_addons.py +0 -0
  54. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_areas.py +0 -0
  55. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_blueprints.py +0 -0
  56. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_bug_report.py +0 -0
  57. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_calendar.py +0 -0
  58. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_camera.py +0 -0
  59. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_categories.py +0 -0
  60. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_config_automations.py +0 -0
  61. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_config_dashboards.py +0 -0
  62. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_config_entry_flow.py +0 -0
  63. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_config_helpers.py +0 -0
  64. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_config_scripts.py +0 -0
  65. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_entities.py +0 -0
  66. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_filesystem.py +0 -0
  67. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_groups.py +0 -0
  68. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_hacs.py +0 -0
  69. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_integrations.py +0 -0
  70. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_labels.py +0 -0
  71. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_mcp_component.py +0 -0
  72. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_registry.py +0 -0
  73. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_resources.py +0 -0
  74. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_search.py +0 -0
  75. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_service.py +0 -0
  76. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_services.py +0 -0
  77. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_system.py +0 -0
  78. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_todo.py +0 -0
  79. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_traces.py +0 -0
  80. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_updates.py +0 -0
  81. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_utility.py +0 -0
  82. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_voice_assistant.py +0 -0
  83. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_yaml_config.py +0 -0
  84. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/tools_zones.py +0 -0
  85. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/tools/util_helpers.py +0 -0
  86. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/transforms/__init__.py +0 -0
  87. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/transforms/categorized_search.py +0 -0
  88. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/utils/__init__.py +0 -0
  89. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/utils/domain_handlers.py +0 -0
  90. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/utils/fuzzy_search.py +0 -0
  91. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/utils/operation_manager.py +0 -0
  92. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/utils/python_sandbox.py +0 -0
  93. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp/utils/usage_logger.py +0 -0
  94. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp_dev.egg-info/SOURCES.txt +0 -0
  95. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp_dev.egg-info/dependency_links.txt +0 -0
  96. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp_dev.egg-info/entry_points.txt +0 -0
  97. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp_dev.egg-info/requires.txt +0 -0
  98. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/src/ha_mcp_dev.egg-info/top_level.txt +0 -0
  99. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/tests/__init__.py +0 -0
  100. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/tests/test_constants.py +0 -0
  101. {ha_mcp_dev-7.2.0.dev347 → ha_mcp_dev-7.2.0.dev348}/tests/test_env_manager.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ha-mcp-dev
3
- Version: 7.2.0.dev347
3
+ Version: 7.2.0.dev348
4
4
  Summary: Home Assistant MCP Server - Complete control of Home Assistant through MCP
5
5
  Author-email: Julien <github@qc-h.net>
6
6
  License: MIT
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "ha-mcp-dev"
7
- version = "7.2.0.dev347"
7
+ version = "7.2.0.dev348"
8
8
  description = "Home Assistant MCP Server - Complete control of Home Assistant through MCP"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.13,<3.14"
@@ -0,0 +1,563 @@
1
+ """
2
+ Historical data access tools for Home Assistant MCP server.
3
+
4
+ This module provides tools for accessing historical data from Home Assistant's
5
+ recorder component via a single consolidated tool:
6
+
7
+ ha_get_history -- Retrieve historical data with source-selectable mode:
8
+ - source="history" (default): Raw state changes, ~10 day retention
9
+ - source="statistics": Pre-aggregated long-term statistics, permanent retention
10
+ """
11
+
12
+ import logging
13
+ import re
14
+ from datetime import UTC, datetime, timedelta
15
+ from typing import Annotated, Any, Literal
16
+
17
+ from fastmcp.exceptions import ToolError
18
+ from pydantic import Field
19
+
20
+ from ..errors import ErrorCode, create_error_response
21
+ from .helpers import (
22
+ exception_to_structured_error,
23
+ get_connected_ws_client,
24
+ log_tool_usage,
25
+ raise_tool_error,
26
+ )
27
+ from .util_helpers import (
28
+ add_timezone_metadata,
29
+ coerce_int_param,
30
+ parse_string_list_param,
31
+ )
32
+
33
+ logger = logging.getLogger(__name__)
34
+
35
+
36
+ def _convert_timestamp(value: Any) -> str | None:
37
+ """Convert a timestamp value to ISO format string.
38
+
39
+ Handles both Unix epoch floats (from WebSocket short-form responses)
40
+ and string timestamps (from long-form responses).
41
+
42
+ Args:
43
+ value: Timestamp as Unix epoch float, ISO string, or None
44
+
45
+ Returns:
46
+ ISO format string or None if value is None/invalid
47
+ """
48
+ if value is None:
49
+ return None
50
+ if isinstance(value, (int, float)):
51
+ return datetime.fromtimestamp(value, tz=UTC).isoformat()
52
+ if isinstance(value, str):
53
+ return value
54
+ return None
55
+
56
+
57
+ def parse_relative_time(time_str: str | None, default_hours: int = 24) -> datetime:
58
+ """
59
+ Parse a time string that can be either ISO format or relative (e.g., '24h', '7d').
60
+
61
+ Args:
62
+ time_str: Time string in ISO format or relative format (e.g., "24h", "7d", "2w", "1m" where 1m = 30 days)
63
+ default_hours: Default hours to go back if time_str is None
64
+
65
+ Returns:
66
+ datetime object in UTC
67
+ """
68
+ if time_str is None:
69
+ return datetime.now(UTC) - timedelta(hours=default_hours)
70
+
71
+ # Check for relative time format
72
+ relative_pattern = r"^(\d+)([hdwm])$"
73
+ match = re.match(relative_pattern, time_str.lower().strip())
74
+
75
+ if match:
76
+ value = int(match.group(1))
77
+ unit = match.group(2)
78
+
79
+ if unit == "h":
80
+ return datetime.now(UTC) - timedelta(hours=value)
81
+ elif unit == "d":
82
+ return datetime.now(UTC) - timedelta(days=value)
83
+ elif unit == "w":
84
+ return datetime.now(UTC) - timedelta(weeks=value)
85
+ elif unit == "m":
86
+ # Approximate month as 30 days
87
+ return datetime.now(UTC) - timedelta(days=value * 30)
88
+
89
+ # Try parsing as ISO format
90
+ try:
91
+ # Handle various ISO formats
92
+ if time_str.endswith("Z"):
93
+ time_str = time_str[:-1] + "+00:00"
94
+ dt = datetime.fromisoformat(time_str)
95
+ # Ensure timezone awareness
96
+ if dt.tzinfo is None:
97
+ dt = dt.replace(tzinfo=UTC)
98
+ return dt
99
+ except ValueError as e:
100
+ raise ValueError(
101
+ f"Invalid time format: {time_str}. Use ISO format or relative (e.g., '24h', '7d', '2w', '1m')"
102
+ ) from e
103
+
104
+
105
+ # Source-dependent default look-back periods
106
+ _DEFAULT_START_HOURS_BY_SOURCE: dict[str, int] = {"history": 24, "statistics": 30 * 24}
107
+
108
+
109
+ def register_history_tools(mcp: Any, client: Any, **kwargs: Any) -> None:
110
+ """Register historical data access tools with the MCP server."""
111
+
112
+ # Default and maximum limits for history entries
113
+ DEFAULT_HISTORY_LIMIT = 100
114
+ MAX_HISTORY_LIMIT = 1000
115
+
116
+ @mcp.tool(
117
+ tags={"History & Statistics"},
118
+ annotations={
119
+ "idempotentHint": True,
120
+ "readOnlyHint": True,
121
+ "title": "Get Entity History or Statistics",
122
+ },
123
+ )
124
+ @log_tool_usage
125
+ async def ha_get_history(
126
+ entity_ids: Annotated[
127
+ str | list[str],
128
+ Field(
129
+ description="Entity ID(s) to query. Can be a single ID, comma-separated string, or JSON array."
130
+ ),
131
+ ],
132
+ source: Annotated[
133
+ Literal["history", "statistics"],
134
+ Field(
135
+ description=(
136
+ 'Data source: "history" (default) for raw state changes (~10 day retention), '
137
+ 'or "statistics" for pre-aggregated long-term data (permanent, requires state_class).'
138
+ ),
139
+ default="history",
140
+ ),
141
+ ] = "history",
142
+ start_time: Annotated[
143
+ str | None,
144
+ Field(
145
+ description="Start time: ISO datetime or relative (e.g., '24h', '7d', '30d'). Default: 24h ago for history, 30d ago for statistics",
146
+ default=None,
147
+ ),
148
+ ] = None,
149
+ end_time: Annotated[
150
+ str | None,
151
+ Field(
152
+ description="End time: ISO datetime. Default: now",
153
+ default=None,
154
+ ),
155
+ ] = None,
156
+ # History-specific (ignored when source="statistics")
157
+ minimal_response: Annotated[
158
+ bool,
159
+ Field(
160
+ description='Return only states/timestamps without attributes. Default: true. Ignored when source="statistics"',
161
+ default=True,
162
+ ),
163
+ ] = True,
164
+ significant_changes_only: Annotated[
165
+ bool,
166
+ Field(
167
+ description='Filter to significant state changes only. Default: true. Ignored when source="statistics"',
168
+ default=True,
169
+ ),
170
+ ] = True,
171
+ limit: Annotated[
172
+ int | str | None,
173
+ Field(
174
+ description='Max state changes per entity. Default: 100, Max: 1000. Ignored when source="statistics"',
175
+ default=None,
176
+ ),
177
+ ] = None,
178
+ # Statistics-specific (ignored when source="history")
179
+ period: Annotated[
180
+ str,
181
+ Field(
182
+ description='Aggregation period: "5minute", "hour", "day", "week", "month". Default: "day". Ignored when source="history"',
183
+ default="day",
184
+ ),
185
+ ] = "day",
186
+ statistic_types: Annotated[
187
+ str | list[str] | None,
188
+ Field(
189
+ description='Statistics types: "mean", "min", "max", "sum", "state", "change". Default: all. Ignored when source="history"',
190
+ default=None,
191
+ ),
192
+ ] = None,
193
+ ) -> dict[str, Any]:
194
+ """
195
+ Retrieve historical data from Home Assistant's recorder.
196
+
197
+ **Sources:**
198
+ - "history" (default): Raw state changes, ~10 day retention, full resolution
199
+ - "statistics": Pre-aggregated data, permanent retention, requires state_class
200
+
201
+ **Shared params:** entity_ids, start_time, end_time
202
+ **History params:** minimal_response, significant_changes_only, limit (ignored for statistics)
203
+ **Statistics params:** period, statistic_types (ignored for history)
204
+
205
+ **Default time range:** 24h for history, 30 days for statistics
206
+
207
+ **Use ha_get_history (default) when:**
208
+ - Troubleshooting why a value changed ("Why was my bedroom cold last night?")
209
+ - Checking event sequences ("Did my garage door open while I was away?")
210
+ - Analyzing recent patterns ("What time does motion usually trigger?")
211
+
212
+ **Use ha_get_history(source="statistics") when:**
213
+ - Tracking long-term trends beyond 10 days ("Energy use this month vs last month?")
214
+ - Computing period averages ("Average living room temperature over 6 months?")
215
+ - Entities must have state_class (measurement, total, total_increasing)
216
+
217
+ **Example — history (default):**
218
+ ```python
219
+ ha_get_history(entity_ids="sensor.bedroom_temperature", start_time="24h")
220
+ ha_get_history(entity_ids=["sensor.temperature", "sensor.humidity"], start_time="7d", limit=500)
221
+ ```
222
+
223
+ **Example — statistics:**
224
+ ```python
225
+ ha_get_history(source="statistics", entity_ids="sensor.total_energy_kwh", start_time="30d", period="day")
226
+ ha_get_history(source="statistics", entity_ids="sensor.living_room_temperature",
227
+ start_time="6m", period="month", statistic_types=["mean", "min", "max"])
228
+ ```
229
+ """
230
+ try:
231
+ # Parse entity_ids - handle string, list, or comma-separated
232
+ if isinstance(entity_ids, str):
233
+ if entity_ids.startswith("["):
234
+ # JSON array string
235
+ parsed_ids = parse_string_list_param(entity_ids, "entity_ids")
236
+ if parsed_ids is None:
237
+ raise_tool_error(create_error_response(
238
+ ErrorCode.VALIDATION_MISSING_PARAMETER,
239
+ "entity_ids is required",
240
+ suggestions=["Provide at least one entity ID"],
241
+ ))
242
+ entity_id_list = parsed_ids
243
+ elif "," in entity_ids:
244
+ # Comma-separated string
245
+ entity_id_list = [e.strip() for e in entity_ids.split(",") if e.strip()]
246
+ else:
247
+ # Single entity ID
248
+ entity_id_list = [entity_ids.strip()]
249
+ else:
250
+ entity_id_list = entity_ids
251
+
252
+ if not entity_id_list:
253
+ raise_tool_error(create_error_response(
254
+ ErrorCode.VALIDATION_MISSING_PARAMETER,
255
+ "entity_ids is required",
256
+ suggestions=["Provide at least one entity ID"],
257
+ ))
258
+
259
+ # Source-dependent default hours
260
+ default_hours = _DEFAULT_START_HOURS_BY_SOURCE[source]
261
+
262
+ # Parse time parameters
263
+ try:
264
+ start_dt = parse_relative_time(start_time, default_hours=default_hours)
265
+ except ValueError as e:
266
+ raise_tool_error(create_error_response(
267
+ ErrorCode.VALIDATION_INVALID_PARAMETER,
268
+ str(e),
269
+ context={"parameter": "start_time"},
270
+ suggestions=[
271
+ "Use ISO format: '2025-01-25T00:00:00Z'",
272
+ "Use relative format: '24h', '7d', '2w', '1m'",
273
+ ],
274
+ ))
275
+
276
+ if end_time:
277
+ try:
278
+ end_dt = parse_relative_time(end_time, default_hours=0)
279
+ except ValueError as e:
280
+ raise_tool_error(create_error_response(
281
+ ErrorCode.VALIDATION_INVALID_PARAMETER,
282
+ str(e),
283
+ context={"parameter": "end_time"},
284
+ suggestions=["Use ISO format: '2025-01-26T00:00:00Z'"],
285
+ ))
286
+ else:
287
+ end_dt = datetime.now(UTC)
288
+
289
+ # Connect to WebSocket (shared by both sources)
290
+ ws_client, error = await get_connected_ws_client(
291
+ client.base_url, client.token
292
+ )
293
+ if error or ws_client is None:
294
+ raise_tool_error(error or create_error_response(
295
+ ErrorCode.CONNECTION_FAILED,
296
+ "Failed to connect to Home Assistant WebSocket",
297
+ ))
298
+
299
+ try:
300
+ if source == "statistics":
301
+ return await _fetch_statistics(
302
+ ws_client, client, entity_id_list,
303
+ start_dt, end_dt, period, statistic_types,
304
+ )
305
+ else:
306
+ return await _fetch_history(
307
+ ws_client, client, entity_id_list,
308
+ start_dt, end_dt, minimal_response,
309
+ significant_changes_only, limit,
310
+ DEFAULT_HISTORY_LIMIT, MAX_HISTORY_LIMIT,
311
+ )
312
+ finally:
313
+ if ws_client:
314
+ await ws_client.disconnect()
315
+
316
+ except ToolError:
317
+ raise
318
+ except Exception as e:
319
+ if source == "statistics":
320
+ suggestions = [
321
+ "Check Home Assistant connection",
322
+ "Verify entities have state_class attribute",
323
+ "Ensure recorder component is enabled with statistics",
324
+ ]
325
+ else:
326
+ suggestions = [
327
+ "Check Home Assistant connection",
328
+ "Verify entity IDs are correct",
329
+ "Ensure recorder component is enabled",
330
+ ]
331
+ exception_to_structured_error(e, suggestions=suggestions)
332
+
333
+
334
+ async def _fetch_history(
335
+ ws_client: Any,
336
+ client: Any,
337
+ entity_id_list: list[str],
338
+ start_dt: datetime,
339
+ end_dt: datetime,
340
+ minimal_response: bool,
341
+ significant_changes_only: bool,
342
+ limit: int | str | None,
343
+ default_limit: int,
344
+ max_limit: int,
345
+ ) -> dict[str, Any]:
346
+ """Execute the history/history_during_period WebSocket call."""
347
+ try:
348
+ effective_limit = coerce_int_param(
349
+ limit,
350
+ param_name="limit",
351
+ default=default_limit,
352
+ min_value=1,
353
+ max_value=max_limit,
354
+ )
355
+ if effective_limit is None:
356
+ effective_limit = default_limit
357
+ except ValueError as e:
358
+ raise_tool_error(create_error_response(
359
+ ErrorCode.VALIDATION_INVALID_PARAMETER,
360
+ str(e),
361
+ context={"parameter": "limit"},
362
+ suggestions=["Provide limit as an integer (e.g., 100)"],
363
+ ))
364
+
365
+ command_params = {
366
+ "start_time": start_dt.isoformat(),
367
+ "end_time": end_dt.isoformat(),
368
+ "entity_ids": entity_id_list,
369
+ "minimal_response": minimal_response,
370
+ "significant_changes_only": significant_changes_only,
371
+ "no_attributes": minimal_response,
372
+ }
373
+
374
+ response = await ws_client.send_command(
375
+ "history/history_during_period", **command_params
376
+ )
377
+
378
+ if not response.get("success"):
379
+ error_msg = response.get("error", "Unknown error")
380
+ raise_tool_error(create_error_response(
381
+ ErrorCode.SERVICE_CALL_FAILED,
382
+ f"Failed to retrieve history: {error_msg}",
383
+ context={"entity_ids": entity_id_list},
384
+ suggestions=[
385
+ "Verify entity IDs exist using ha_search_entities()",
386
+ "Check that entities are recorded (not excluded from recorder)",
387
+ "Ensure time range is within recorder retention period (~10 days)",
388
+ ],
389
+ ))
390
+
391
+ result_data = response.get("result", {})
392
+ entities_history = []
393
+
394
+ for entity_id in entity_id_list:
395
+ entity_states = result_data.get(entity_id, [])
396
+ limited_states = entity_states[:effective_limit]
397
+
398
+ formatted_states = []
399
+ for state in limited_states:
400
+ last_updated_raw = state.get("lu", state.get("last_updated"))
401
+ last_changed_raw = state.get("lc", state.get("last_changed"))
402
+ if last_changed_raw is None and last_updated_raw is not None:
403
+ last_changed_raw = last_updated_raw
404
+
405
+ state_entry = {
406
+ "state": state.get("s", state.get("state")),
407
+ "last_changed": _convert_timestamp(last_changed_raw),
408
+ "last_updated": _convert_timestamp(last_updated_raw),
409
+ }
410
+ if not minimal_response:
411
+ state_entry["attributes"] = state.get("a", state.get("attributes", {}))
412
+ formatted_states.append(state_entry)
413
+
414
+ entities_history.append({
415
+ "entity_id": entity_id,
416
+ "period": {
417
+ "start": start_dt.isoformat(),
418
+ "end": end_dt.isoformat(),
419
+ },
420
+ "states": formatted_states,
421
+ "count": len(formatted_states),
422
+ "total_available": len(entity_states),
423
+ "truncated": len(entity_states) > effective_limit,
424
+ })
425
+
426
+ history_data = {
427
+ "success": True,
428
+ "source": "history",
429
+ "entities": entities_history,
430
+ "period": {
431
+ "start": start_dt.isoformat(),
432
+ "end": end_dt.isoformat(),
433
+ },
434
+ "query_params": {
435
+ "minimal_response": minimal_response,
436
+ "significant_changes_only": significant_changes_only,
437
+ "limit": effective_limit,
438
+ },
439
+ }
440
+
441
+ return await add_timezone_metadata(client, history_data)
442
+
443
+
444
+ async def _fetch_statistics(
445
+ ws_client: Any,
446
+ client: Any,
447
+ entity_id_list: list[str],
448
+ start_dt: datetime,
449
+ end_dt: datetime,
450
+ period: str,
451
+ statistic_types: str | list[str] | None,
452
+ ) -> dict[str, Any]:
453
+ """Execute the recorder/statistics_during_period WebSocket call."""
454
+ # Validate period
455
+ valid_periods = ["5minute", "hour", "day", "week", "month"]
456
+ if period not in valid_periods:
457
+ raise_tool_error(create_error_response(
458
+ ErrorCode.VALIDATION_INVALID_PARAMETER,
459
+ f"Invalid period: {period}",
460
+ context={"period": period, "valid_periods": valid_periods},
461
+ suggestions=[f"Use one of: {', '.join(valid_periods)}"],
462
+ ))
463
+
464
+ # Parse statistic_types
465
+ stat_types_list: list[str] | None = None
466
+ if statistic_types:
467
+ if isinstance(statistic_types, str):
468
+ if statistic_types.startswith("["):
469
+ stat_types_list = parse_string_list_param(statistic_types, "statistic_types")
470
+ elif "," in statistic_types:
471
+ stat_types_list = [s.strip() for s in statistic_types.split(",") if s.strip()]
472
+ else:
473
+ stat_types_list = [statistic_types.strip()]
474
+ else:
475
+ stat_types_list = list(statistic_types)
476
+
477
+ valid_types = ["mean", "min", "max", "sum", "state", "change"]
478
+ if stat_types_list is None:
479
+ stat_types_list = []
480
+ invalid_types = [t for t in stat_types_list if t not in valid_types]
481
+ if invalid_types:
482
+ raise_tool_error(create_error_response(
483
+ ErrorCode.VALIDATION_INVALID_PARAMETER,
484
+ f"Invalid statistic types: {invalid_types}",
485
+ context={"invalid_types": invalid_types, "valid_types": valid_types},
486
+ suggestions=[f"Use one or more of: {', '.join(valid_types)}"],
487
+ ))
488
+
489
+ command_params: dict[str, Any] = {
490
+ "start_time": start_dt.isoformat(),
491
+ "end_time": end_dt.isoformat(),
492
+ "statistic_ids": entity_id_list,
493
+ "period": period,
494
+ }
495
+ if stat_types_list is not None:
496
+ command_params["types"] = stat_types_list
497
+
498
+ response = await ws_client.send_command(
499
+ "recorder/statistics_during_period", **command_params
500
+ )
501
+
502
+ if not response.get("success"):
503
+ error_msg = response.get("error", "Unknown error")
504
+ raise_tool_error(create_error_response(
505
+ ErrorCode.SERVICE_CALL_FAILED,
506
+ f"Failed to retrieve statistics: {error_msg}",
507
+ context={"entity_ids": entity_id_list},
508
+ suggestions=[
509
+ "Verify entities have state_class attribute (measurement, total, total_increasing)",
510
+ "Use ha_search_entities() to check entity attributes",
511
+ "Statistics are only available for entities that track numeric values",
512
+ ],
513
+ ))
514
+
515
+ result_data = response.get("result", {})
516
+ entities_statistics = []
517
+ all_stat_types = stat_types_list or ["mean", "min", "max", "sum", "state", "change"]
518
+
519
+ for entity_id in entity_id_list:
520
+ entity_stats = result_data.get(entity_id, [])
521
+ formatted_stats = []
522
+ unit = None
523
+
524
+ for stat in entity_stats:
525
+ stat_entry: dict[str, Any] = {"start": stat.get("start")}
526
+ for stat_type in all_stat_types:
527
+ if stat_type in stat:
528
+ stat_entry[stat_type] = stat[stat_type]
529
+ if unit is None and "unit_of_measurement" in stat:
530
+ unit = stat["unit_of_measurement"]
531
+ formatted_stats.append(stat_entry)
532
+
533
+ entities_statistics.append({
534
+ "entity_id": entity_id,
535
+ "period": period,
536
+ "statistics": formatted_stats,
537
+ "count": len(formatted_stats),
538
+ "unit_of_measurement": unit,
539
+ })
540
+
541
+ empty_entities: list[str] = [
542
+ str(e["entity_id"]) for e in entities_statistics if e["count"] == 0
543
+ ]
544
+
545
+ statistics_data: dict[str, Any] = {
546
+ "success": True,
547
+ "source": "statistics",
548
+ "entities": entities_statistics,
549
+ "period_type": period,
550
+ "time_range": {
551
+ "start": start_dt.isoformat(),
552
+ "end": end_dt.isoformat(),
553
+ },
554
+ "statistic_types": all_stat_types,
555
+ }
556
+
557
+ if empty_entities:
558
+ statistics_data["warnings"] = [
559
+ f"No statistics found for: {', '.join(empty_entities)}. "
560
+ "These entities may not have state_class attribute or may not have recorded data yet."
561
+ ]
562
+
563
+ return await add_timezone_metadata(client, statistics_data)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ha-mcp-dev
3
- Version: 7.2.0.dev347
3
+ Version: 7.2.0.dev348
4
4
  Summary: Home Assistant MCP Server - Complete control of Home Assistant through MCP
5
5
  Author-email: Julien <github@qc-h.net>
6
6
  License: MIT