pypetkitapi 1.9.3__py3-none-any.whl → 1.9.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pypetkitapi/__init__.py +5 -4
- pypetkitapi/client.py +61 -20
- pypetkitapi/command.py +14 -22
- pypetkitapi/const.py +35 -8
- pypetkitapi/exceptions.py +9 -0
- pypetkitapi/media.py +423 -0
- pypetkitapi/schedule_container.py +67 -0
- {pypetkitapi-1.9.3.dist-info → pypetkitapi-1.9.4.dist-info}/METADATA +6 -3
- pypetkitapi-1.9.4.dist-info/RECORD +18 -0
- {pypetkitapi-1.9.3.dist-info → pypetkitapi-1.9.4.dist-info}/WHEEL +1 -1
- pypetkitapi/medias.py +0 -199
- pypetkitapi-1.9.3.dist-info/RECORD +0 -17
- {pypetkitapi-1.9.3.dist-info → pypetkitapi-1.9.4.dist-info}/LICENSE +0 -0
pypetkitapi/__init__.py
CHANGED
@@ -36,11 +36,11 @@ from .containers import Pet
|
|
36
36
|
from .exceptions import PetkitAuthenticationError, PypetkitError
|
37
37
|
from .feeder_container import Feeder, RecordsItems
|
38
38
|
from .litter_container import Litter, LitterRecord, WorkState
|
39
|
-
from .
|
39
|
+
from .media import DownloadDecryptMedia, MediaFile, MediaManager
|
40
40
|
from .purifier_container import Purifier
|
41
41
|
from .water_fountain_container import WaterFountain
|
42
42
|
|
43
|
-
__version__ = "1.9.
|
43
|
+
__version__ = "1.9.4"
|
44
44
|
|
45
45
|
__all__ = [
|
46
46
|
"CTW3",
|
@@ -65,8 +65,9 @@ __all__ = [
|
|
65
65
|
"Litter",
|
66
66
|
"LitterCommand",
|
67
67
|
"LitterRecord",
|
68
|
-
"
|
69
|
-
"
|
68
|
+
"MediaManager",
|
69
|
+
"DownloadDecryptMedia",
|
70
|
+
"MediaFile",
|
70
71
|
"Pet",
|
71
72
|
"PetCommand",
|
72
73
|
"PetKitClient",
|
pypetkitapi/client.py
CHANGED
@@ -11,6 +11,7 @@ import urllib.parse
|
|
11
11
|
|
12
12
|
import aiohttp
|
13
13
|
from aiohttp import ContentTypeError
|
14
|
+
import m3u8
|
14
15
|
|
15
16
|
from pypetkitapi.command import ACTIONS_MAP, FOUNTAIN_COMMAND, FountainAction
|
16
17
|
from pypetkitapi.const import (
|
@@ -53,6 +54,7 @@ from pypetkitapi.exceptions import (
|
|
53
54
|
PetkitInvalidHTTPResponseCodeError,
|
54
55
|
PetkitInvalidResponseFormat,
|
55
56
|
PetkitRegionalServerNotFoundError,
|
57
|
+
PetkitSessionError,
|
56
58
|
PetkitSessionExpiredError,
|
57
59
|
PetkitTimeoutError,
|
58
60
|
PypetkitError,
|
@@ -148,6 +150,7 @@ class PetKitClient:
|
|
148
150
|
async def login(self, valid_code: str | None = None) -> None:
|
149
151
|
"""Login to the PetKit service and retrieve the appropriate server."""
|
150
152
|
# Retrieve the list of servers
|
153
|
+
self._session = None
|
151
154
|
await self._get_base_url()
|
152
155
|
|
153
156
|
_LOGGER.info("Logging in to PetKit server")
|
@@ -180,6 +183,8 @@ class PetKitClient:
|
|
180
183
|
)
|
181
184
|
session_data = response["session"]
|
182
185
|
self._session = SessionInfo(**session_data)
|
186
|
+
expiration_date = datetime.now() + timedelta(seconds=self._session.expires_in)
|
187
|
+
_LOGGER.debug("Login successful (token expiration %s)", expiration_date)
|
183
188
|
|
184
189
|
async def refresh_session(self) -> None:
|
185
190
|
"""Refresh the session."""
|
@@ -193,6 +198,7 @@ class PetKitClient:
|
|
193
198
|
session_data = response["session"]
|
194
199
|
self._session = SessionInfo(**session_data)
|
195
200
|
self._session.refreshed_at = datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")
|
201
|
+
_LOGGER.debug("Session refreshed at %s", self._session.refreshed_at)
|
196
202
|
|
197
203
|
async def validate_session(self) -> None:
|
198
204
|
"""Check if the session is still valid and refresh or re-login if necessary."""
|
@@ -201,31 +207,27 @@ class PetKitClient:
|
|
201
207
|
await self.login()
|
202
208
|
return
|
203
209
|
|
204
|
-
|
205
|
-
|
206
|
-
|
210
|
+
created = datetime.strptime(self._session.created_at, "%Y-%m-%dT%H:%M:%S.%f%z")
|
211
|
+
is_expired = datetime.now(tz=created.tzinfo) - created >= timedelta(
|
212
|
+
seconds=self._session.expires_in
|
207
213
|
)
|
208
|
-
current_time = datetime.now(tz=created_at.tzinfo)
|
209
|
-
token_age = current_time - created_at
|
210
|
-
max_age = timedelta(seconds=self._session.expires_in)
|
211
|
-
half_max_age = max_age / 2
|
212
214
|
|
213
|
-
if
|
215
|
+
if is_expired:
|
214
216
|
_LOGGER.debug("Token expired, re-logging in")
|
215
217
|
await self.login()
|
216
|
-
elif
|
217
|
-
|
218
|
-
|
218
|
+
# elif (max_age / 2) < token_age < max_age:
|
219
|
+
# _LOGGER.debug("Token still OK, but refreshing session")
|
220
|
+
# await self.refresh_session()
|
219
221
|
|
220
222
|
async def get_session_id(self) -> dict:
|
221
223
|
"""Return the session ID."""
|
224
|
+
await self.validate_session()
|
222
225
|
if self._session is None:
|
223
|
-
raise
|
226
|
+
raise PetkitSessionError("No session ID available")
|
224
227
|
return {"F-Session": self._session.id, "X-Session": self._session.id}
|
225
228
|
|
226
229
|
async def _get_account_data(self) -> None:
|
227
230
|
"""Get the account data from the PetKit service."""
|
228
|
-
await self.validate_session()
|
229
231
|
_LOGGER.debug("Fetching account data")
|
230
232
|
response = await self.req.request(
|
231
233
|
method=HTTPMethod.GET,
|
@@ -252,8 +254,6 @@ class PetKitClient:
|
|
252
254
|
|
253
255
|
async def get_devices_data(self) -> None:
|
254
256
|
"""Get the devices data from the PetKit servers."""
|
255
|
-
await self.validate_session()
|
256
|
-
|
257
257
|
start_time = datetime.now()
|
258
258
|
if not self.account_data:
|
259
259
|
await self._get_account_data()
|
@@ -661,6 +661,46 @@ class PetKitClient:
|
|
661
661
|
_LOGGER.info("BLE command sent successfully.")
|
662
662
|
return True
|
663
663
|
|
664
|
+
async def get_cloud_video(self, video_url: str) -> dict[str, str | int]:
|
665
|
+
"""Get the video m3u8 link from the cloud."""
|
666
|
+
response = await self.req.request(
|
667
|
+
method=HTTPMethod.POST,
|
668
|
+
url=video_url,
|
669
|
+
headers=await self.get_session_id(),
|
670
|
+
)
|
671
|
+
return response[0]
|
672
|
+
|
673
|
+
async def extract_segments_m3u8(self, m3u8_url: str) -> tuple[str, str, list[str]]:
|
674
|
+
"""Extract segments from the m3u8 file.
|
675
|
+
:param: m3u8_url: URL of the m3u8 file
|
676
|
+
:return: aes_key, key_iv, segment_lst
|
677
|
+
"""
|
678
|
+
# Extract segments from m3u8 file
|
679
|
+
response = await self.req.request(
|
680
|
+
method=HTTPMethod.GET,
|
681
|
+
url=m3u8_url,
|
682
|
+
headers=await self.get_session_id(),
|
683
|
+
)
|
684
|
+
m3u8_obj = m3u8.loads(response[RES_KEY])
|
685
|
+
|
686
|
+
if not m3u8_obj.segments or not m3u8_obj.keys:
|
687
|
+
raise PetkitInvalidResponseFormat("No segments or key found in m3u8 file.")
|
688
|
+
|
689
|
+
# Extract segments from m3u8 file
|
690
|
+
segment_lst = [segment.uri for segment in m3u8_obj.segments]
|
691
|
+
# Extract key_uri and key_iv from m3u8 file
|
692
|
+
key_uri = m3u8_obj.keys[0].uri
|
693
|
+
key_iv = str(m3u8_obj.keys[0].iv)
|
694
|
+
|
695
|
+
# Extract aes_key from video segments
|
696
|
+
response = await self.req.request(
|
697
|
+
method=HTTPMethod.GET,
|
698
|
+
url=key_uri,
|
699
|
+
full_url=True,
|
700
|
+
headers=await self.get_session_id(),
|
701
|
+
)
|
702
|
+
return response[RES_KEY], key_iv, segment_lst
|
703
|
+
|
664
704
|
async def send_api_request(
|
665
705
|
self,
|
666
706
|
device_id: int,
|
@@ -668,8 +708,6 @@ class PetKitClient:
|
|
668
708
|
setting: dict | None = None,
|
669
709
|
) -> bool:
|
670
710
|
"""Control the device using the PetKit API."""
|
671
|
-
await self.validate_session()
|
672
|
-
|
673
711
|
device = self.petkit_entities.get(device_id, None)
|
674
712
|
if not device:
|
675
713
|
raise PypetkitError(f"Device with ID {device_id} not found.")
|
@@ -767,12 +805,13 @@ class PrepReq:
|
|
767
805
|
self,
|
768
806
|
method: str,
|
769
807
|
url: str,
|
808
|
+
full_url: bool = False,
|
770
809
|
params=None,
|
771
810
|
data=None,
|
772
811
|
headers=None,
|
773
812
|
) -> dict:
|
774
813
|
"""Make a request to the PetKit API."""
|
775
|
-
_url = "/".join(s.strip("/") for s in [self.base_url, url])
|
814
|
+
_url = url if full_url else "/".join(s.strip("/") for s in [self.base_url, url])
|
776
815
|
_headers = {**self.base_headers, **(headers or {})}
|
777
816
|
_LOGGER.debug("Request: %s %s", method, _url)
|
778
817
|
try:
|
@@ -798,12 +837,14 @@ class PrepReq:
|
|
798
837
|
) from e
|
799
838
|
|
800
839
|
try:
|
801
|
-
|
840
|
+
if response.content_type == "application/json":
|
841
|
+
response_json = await response.json()
|
842
|
+
else:
|
843
|
+
return {RES_KEY: await response.text()}
|
802
844
|
except ContentTypeError:
|
803
845
|
raise PetkitInvalidResponseFormat(
|
804
846
|
"Response is not in JSON format"
|
805
847
|
) from None
|
806
|
-
|
807
848
|
# Check for errors in the response
|
808
849
|
if ERR_KEY in response_json:
|
809
850
|
error_code = int(response_json[ERR_KEY].get("code", 0))
|
pypetkitapi/command.py
CHANGED
@@ -9,7 +9,6 @@ import json
|
|
9
9
|
from pypetkitapi.const import (
|
10
10
|
ALL_DEVICES,
|
11
11
|
D3,
|
12
|
-
D4,
|
13
12
|
D4H,
|
14
13
|
D4S,
|
15
14
|
D4SH,
|
@@ -145,25 +144,28 @@ class CmdData:
|
|
145
144
|
|
146
145
|
def get_endpoint_manual_feed(device):
|
147
146
|
"""Get the endpoint for the device"""
|
148
|
-
if device.device_nfo.device_type
|
149
|
-
return PetkitEndpoint.
|
150
|
-
|
151
|
-
return PetkitEndpoint.MANUAL_FEED_FRESH_ELEMENT
|
152
|
-
return PetkitEndpoint.MANUAL_FEED_DUAL
|
147
|
+
if device.device_nfo.device_type in [FEEDER_MINI, FEEDER]:
|
148
|
+
return PetkitEndpoint.MANUAL_FEED_OLD # Old endpoint snakecase
|
149
|
+
return PetkitEndpoint.MANUAL_FEED_NEW # New endpoint camelcase
|
153
150
|
|
154
151
|
|
155
152
|
def get_endpoint_reset_desiccant(device):
|
153
|
+
"""Get the endpoint for the device"""
|
154
|
+
if device.device_nfo.device_type in [FEEDER_MINI, FEEDER]:
|
155
|
+
return PetkitEndpoint.DESICCANT_RESET_OLD # Old endpoint snakecase
|
156
|
+
return PetkitEndpoint.DESICCANT_RESET_NEW # New endpoint camelcase
|
157
|
+
|
158
|
+
|
159
|
+
def get_endpoint_update_setting(device):
|
156
160
|
"""Get the endpoint for the device"""
|
157
161
|
if device.device_nfo.device_type == FEEDER_MINI:
|
158
|
-
return PetkitEndpoint.
|
159
|
-
|
160
|
-
return PetkitEndpoint.FRESH_ELEMENT_DESICCANT_RESET
|
161
|
-
return PetkitEndpoint.DESICCANT_RESET
|
162
|
+
return PetkitEndpoint.UPDATE_SETTING_FEEDER_MINI
|
163
|
+
return PetkitEndpoint.UPDATE_SETTING
|
162
164
|
|
163
165
|
|
164
166
|
ACTIONS_MAP = {
|
165
167
|
DeviceCommand.UPDATE_SETTING: CmdData(
|
166
|
-
endpoint=
|
168
|
+
endpoint=lambda device: get_endpoint_update_setting(device),
|
167
169
|
params=lambda device, setting: {
|
168
170
|
"id": device.id,
|
169
171
|
"kv": json.dumps(setting),
|
@@ -199,16 +201,6 @@ ACTIONS_MAP = {
|
|
199
201
|
),
|
200
202
|
FeederCommand.MANUAL_FEED: CmdData(
|
201
203
|
endpoint=lambda device: get_endpoint_manual_feed(device),
|
202
|
-
params=lambda device, setting: {
|
203
|
-
"day": datetime.datetime.now().strftime("%Y%m%d"),
|
204
|
-
"deviceId": device.id,
|
205
|
-
"time": "-1",
|
206
|
-
**setting,
|
207
|
-
},
|
208
|
-
supported_device=[FEEDER, FEEDER_MINI, D3, D4, D4H],
|
209
|
-
),
|
210
|
-
FeederCommand.MANUAL_FEED_DUAL: CmdData(
|
211
|
-
endpoint=PetkitEndpoint.MANUAL_FEED_DUAL,
|
212
204
|
params=lambda device, setting: {
|
213
205
|
"day": datetime.datetime.now().strftime("%Y%m%d"),
|
214
206
|
"deviceId": device.id,
|
@@ -216,7 +208,7 @@ ACTIONS_MAP = {
|
|
216
208
|
"time": "-1",
|
217
209
|
**setting,
|
218
210
|
},
|
219
|
-
supported_device=
|
211
|
+
supported_device=DEVICES_FEEDER,
|
220
212
|
),
|
221
213
|
FeederCommand.CANCEL_MANUAL_FEED: CmdData(
|
222
214
|
endpoint=lambda device: (
|
pypetkitapi/const.py
CHANGED
@@ -93,6 +93,20 @@ LOGIN_DATA = {
|
|
93
93
|
}
|
94
94
|
|
95
95
|
|
96
|
+
class MediaType(StrEnum):
|
97
|
+
"""Record Type constants"""
|
98
|
+
|
99
|
+
VIDEO = "avi"
|
100
|
+
IMAGE = "jpg"
|
101
|
+
|
102
|
+
|
103
|
+
class VideoType(StrEnum):
|
104
|
+
"""Record Type constants"""
|
105
|
+
|
106
|
+
HIGHLIGHT = "highlight"
|
107
|
+
PLAYBACK = "playback"
|
108
|
+
|
109
|
+
|
96
110
|
class RecordType(StrEnum):
|
97
111
|
"""Record Type constants"""
|
98
112
|
|
@@ -100,9 +114,16 @@ class RecordType(StrEnum):
|
|
100
114
|
FEED = "feed"
|
101
115
|
MOVE = "move"
|
102
116
|
PET = "pet"
|
117
|
+
TOILETING = "toileting"
|
103
118
|
|
104
119
|
|
105
|
-
RecordTypeLST = [
|
120
|
+
RecordTypeLST = [
|
121
|
+
RecordType.EAT,
|
122
|
+
RecordType.FEED,
|
123
|
+
RecordType.MOVE,
|
124
|
+
RecordType.PET,
|
125
|
+
RecordType.TOILETING,
|
126
|
+
]
|
106
127
|
|
107
128
|
|
108
129
|
class PetkitEndpoint(StrEnum):
|
@@ -121,6 +142,7 @@ class PetkitEndpoint(StrEnum):
|
|
121
142
|
GET_DEVICE_RECORD = "getDeviceRecord"
|
122
143
|
GET_DEVICE_RECORD_RELEASE = "getDeviceRecordRelease"
|
123
144
|
UPDATE_SETTING = "updateSettings"
|
145
|
+
UPDATE_SETTING_FEEDER_MINI = "update"
|
124
146
|
|
125
147
|
# Bluetooth
|
126
148
|
BLE_AS_RELAY = "ble/ownSupportBleDevices"
|
@@ -140,25 +162,30 @@ class PetkitEndpoint(StrEnum):
|
|
140
162
|
GET_PET_OUT_GRAPH = "getPetOutGraph"
|
141
163
|
|
142
164
|
# Video features
|
165
|
+
GET_M3U8 = "getM3u8"
|
143
166
|
CLOUD_VIDEO = "cloud/video"
|
144
167
|
GET_DOWNLOAD_M3U8 = "getDownloadM3u8"
|
145
|
-
GET_M3U8 = "getM3u8"
|
146
168
|
|
147
169
|
# Feeders
|
148
170
|
REPLENISHED_FOOD = "added"
|
149
171
|
FRESH_ELEMENT_CALIBRATION = "food_reset"
|
150
172
|
FRESH_ELEMENT_CANCEL_FEED = "cancel_realtime_feed"
|
151
|
-
|
152
|
-
|
153
|
-
FRESH_ELEMENT_DESICCANT_RESET = "feeder/desiccant_reset"
|
173
|
+
DESICCANT_RESET_OLD = "desiccant_reset"
|
174
|
+
DESICCANT_RESET_NEW = "desiccantReset"
|
154
175
|
CALL_PET = "callPet"
|
155
176
|
CANCEL_FEED = "cancelRealtimeFeed"
|
156
|
-
|
157
|
-
|
158
|
-
MANUAL_FEED_DUAL = "saveDailyFeed"
|
177
|
+
MANUAL_FEED_OLD = "save_dailyfeed" # For Feeder/FeederMini
|
178
|
+
MANUAL_FEED_NEW = "saveDailyFeed" # For all other feeders
|
159
179
|
DAILY_FEED_AND_EAT = "dailyFeedAndEat" # D3
|
160
180
|
FEED_STATISTIC = "feedStatistic" # D4
|
161
181
|
DAILY_FEED = "dailyFeeds" # D4S
|
162
182
|
REMOVE_DAILY_FEED = "removeDailyFeed"
|
163
183
|
RESTORE_DAILY_FEED = "restoreDailyFeed"
|
164
184
|
SAVE_FEED = "saveFeed" # For Feeding plan
|
185
|
+
|
186
|
+
# Schedule
|
187
|
+
SCHEDULE = "schedule/schedules"
|
188
|
+
SCHEDULE_SAVE = "schedule/save"
|
189
|
+
SCHEDULE_REMOVE = "schedule/remove"
|
190
|
+
SCHEDULE_COMPLETE = "schedule/complete"
|
191
|
+
SCHEDULE_HISTORY = "schedule/userHistorySchedules"
|
pypetkitapi/exceptions.py
CHANGED
@@ -11,6 +11,10 @@ class PetkitTimeoutError(PypetkitError):
|
|
11
11
|
"""Class for PyPetkit timeout exceptions."""
|
12
12
|
|
13
13
|
|
14
|
+
class PetkitSessionError(PypetkitError):
|
15
|
+
"""Class for PyPetkit connection exceptions."""
|
16
|
+
|
17
|
+
|
14
18
|
class PetkitSessionExpiredError(PypetkitError):
|
15
19
|
"""Class for PyPetkit connection exceptions."""
|
16
20
|
|
@@ -18,6 +22,11 @@ class PetkitSessionExpiredError(PypetkitError):
|
|
18
22
|
class PetkitAuthenticationUnregisteredEmailError(PypetkitError):
|
19
23
|
"""Exception raised when the email is not registered with Petkit."""
|
20
24
|
|
25
|
+
def __init__(self, region: str):
|
26
|
+
"""Initialize the exception."""
|
27
|
+
self.message = "The email you provided is not registered on Petkit's servers. Please check your email, or you are using the correct region."
|
28
|
+
super().__init__(self.message)
|
29
|
+
|
21
30
|
|
22
31
|
class PetkitRegionalServerNotFoundError(PypetkitError):
|
23
32
|
"""Exception raised when the specified region server is not found."""
|
pypetkitapi/media.py
ADDED
@@ -0,0 +1,423 @@
|
|
1
|
+
"""Module to manage media files from PetKit devices."""
|
2
|
+
|
3
|
+
from __future__ import annotations
|
4
|
+
|
5
|
+
import asyncio
|
6
|
+
from dataclasses import dataclass
|
7
|
+
from datetime import datetime
|
8
|
+
import logging
|
9
|
+
from pathlib import Path
|
10
|
+
from typing import Any
|
11
|
+
from urllib.parse import parse_qs, urlparse
|
12
|
+
|
13
|
+
from aiofiles import open as aio_open
|
14
|
+
import aiohttp
|
15
|
+
from Crypto.Cipher import AES
|
16
|
+
from Crypto.Util.Padding import unpad
|
17
|
+
|
18
|
+
from pypetkitapi import Feeder, Litter, PetKitClient, RecordType
|
19
|
+
from pypetkitapi.const import D4H, D4SH, T5, T6, RecordTypeLST
|
20
|
+
|
21
|
+
_LOGGER = logging.getLogger(__name__)
|
22
|
+
|
23
|
+
|
24
|
+
@dataclass
|
25
|
+
class MediaFile:
|
26
|
+
"""Dataclass MediaFile.
|
27
|
+
Represents a media file from a PetKit device.
|
28
|
+
"""
|
29
|
+
|
30
|
+
event_id: str
|
31
|
+
event_type: RecordType
|
32
|
+
device_id: int
|
33
|
+
user_id: str
|
34
|
+
image: str | None
|
35
|
+
video: str | None
|
36
|
+
filepath: str
|
37
|
+
aes_key: str
|
38
|
+
timestamp: int
|
39
|
+
is_available: bool = False
|
40
|
+
|
41
|
+
|
42
|
+
class MediaManager:
|
43
|
+
"""Class to manage media files from PetKit devices."""
|
44
|
+
|
45
|
+
async def get_all_media_files(
|
46
|
+
self, devices: list[Feeder | Litter]
|
47
|
+
) -> list[MediaFile]:
|
48
|
+
"""Get all media files from all devices and return a list of MediaFile."""
|
49
|
+
media_files: list[MediaFile] = []
|
50
|
+
|
51
|
+
for device in devices:
|
52
|
+
if isinstance(device, Feeder):
|
53
|
+
if device.device_nfo and device.device_nfo.device_type in [D4SH, D4H]:
|
54
|
+
media_files.extend(self._process_feeder(device))
|
55
|
+
else:
|
56
|
+
_LOGGER.debug(
|
57
|
+
"Feeder %s does not support media file extraction",
|
58
|
+
device.name,
|
59
|
+
)
|
60
|
+
elif isinstance(device, Litter):
|
61
|
+
if device.device_nfo and device.device_nfo.device_type in [T5, T6]:
|
62
|
+
media_files.extend(self._process_litter(device))
|
63
|
+
else:
|
64
|
+
_LOGGER.debug(
|
65
|
+
"Litter %s does not support media file extraction",
|
66
|
+
device.name,
|
67
|
+
)
|
68
|
+
|
69
|
+
return media_files
|
70
|
+
|
71
|
+
def _process_feeder(self, feeder: Feeder) -> list[MediaFile]:
|
72
|
+
"""Process media files for a Feeder device."""
|
73
|
+
media_files: list[MediaFile] = []
|
74
|
+
records = feeder.device_records
|
75
|
+
|
76
|
+
device_id = (
|
77
|
+
feeder.device_nfo.device_id
|
78
|
+
if feeder.device_nfo and feeder.device_nfo.device_type
|
79
|
+
else None
|
80
|
+
)
|
81
|
+
if device_id is None:
|
82
|
+
raise ValueError("Missing device ID for feeder")
|
83
|
+
|
84
|
+
if not records:
|
85
|
+
return media_files
|
86
|
+
|
87
|
+
for record_type in RecordTypeLST:
|
88
|
+
record_list = getattr(records, record_type, [])
|
89
|
+
for record in record_list:
|
90
|
+
media_files.extend(
|
91
|
+
self._process_feeder_record(
|
92
|
+
record, RecordType(record_type), device_id
|
93
|
+
)
|
94
|
+
)
|
95
|
+
|
96
|
+
return media_files
|
97
|
+
|
98
|
+
def _process_feeder_record(
|
99
|
+
self, record, record_type: RecordType, device_id: int
|
100
|
+
) -> list[MediaFile]:
|
101
|
+
"""Process individual feeder records."""
|
102
|
+
media_files: list[MediaFile] = []
|
103
|
+
user_id = record.user_id
|
104
|
+
|
105
|
+
if not record.items:
|
106
|
+
return media_files
|
107
|
+
|
108
|
+
for item in record.items:
|
109
|
+
timestamp = self._get_timestamp(item)
|
110
|
+
date_str = (
|
111
|
+
datetime.fromtimestamp(timestamp).strftime("%Y%m%d")
|
112
|
+
if timestamp
|
113
|
+
else "unknown"
|
114
|
+
)
|
115
|
+
if not item.event_id:
|
116
|
+
_LOGGER.error("Missing event_id for record item")
|
117
|
+
continue
|
118
|
+
if not user_id:
|
119
|
+
_LOGGER.error("Missing user_id for record item")
|
120
|
+
continue
|
121
|
+
if not item.aes_key:
|
122
|
+
_LOGGER.error("Missing aes_key for record item")
|
123
|
+
continue
|
124
|
+
if timestamp is None:
|
125
|
+
_LOGGER.error("Missing timestamp for record item")
|
126
|
+
continue
|
127
|
+
|
128
|
+
filepath = f"{device_id}/{date_str}/{record_type.name.lower()}"
|
129
|
+
media_files.append(
|
130
|
+
MediaFile(
|
131
|
+
event_id=item.event_id,
|
132
|
+
event_type=record_type,
|
133
|
+
device_id=device_id,
|
134
|
+
user_id=user_id,
|
135
|
+
image=item.preview,
|
136
|
+
video=self.construct_video_url(item.media_api, user_id),
|
137
|
+
filepath=filepath,
|
138
|
+
aes_key=item.aes_key,
|
139
|
+
timestamp=self._get_timestamp(item),
|
140
|
+
)
|
141
|
+
)
|
142
|
+
return media_files
|
143
|
+
|
144
|
+
def _process_litter(self, litter: Litter) -> list[MediaFile]:
|
145
|
+
"""Process media files for a Litter device."""
|
146
|
+
media_files: list[MediaFile] = []
|
147
|
+
records = litter.device_records
|
148
|
+
|
149
|
+
if not records:
|
150
|
+
return media_files
|
151
|
+
|
152
|
+
for record in records:
|
153
|
+
timestamp = record.timestamp or None
|
154
|
+
date_str = (
|
155
|
+
datetime.fromtimestamp(timestamp).strftime("%Y%m%d")
|
156
|
+
if timestamp
|
157
|
+
else "unknown"
|
158
|
+
)
|
159
|
+
if not record.event_id:
|
160
|
+
_LOGGER.error("Missing event_id for record item")
|
161
|
+
continue
|
162
|
+
if not record.device_id:
|
163
|
+
_LOGGER.error("Missing event_id for record item")
|
164
|
+
continue
|
165
|
+
if not record.user_id:
|
166
|
+
_LOGGER.error("Missing user_id for record item")
|
167
|
+
continue
|
168
|
+
if not record.aes_key:
|
169
|
+
_LOGGER.error("Missing aes_key for record item")
|
170
|
+
continue
|
171
|
+
if record.timestamp is None:
|
172
|
+
_LOGGER.error("Missing timestamp for record item")
|
173
|
+
continue
|
174
|
+
|
175
|
+
filepath = f"{record.device_id}/{date_str}/toileting"
|
176
|
+
media_files.append(
|
177
|
+
MediaFile(
|
178
|
+
event_id=record.event_id,
|
179
|
+
event_type=RecordType.TOILETING,
|
180
|
+
device_id=record.device_id,
|
181
|
+
user_id=record.user_id,
|
182
|
+
image=record.preview,
|
183
|
+
video=self.construct_video_url(record.media_api, record.user_id),
|
184
|
+
filepath=filepath,
|
185
|
+
aes_key=record.aes_key,
|
186
|
+
timestamp=record.timestamp,
|
187
|
+
)
|
188
|
+
)
|
189
|
+
return media_files
|
190
|
+
|
191
|
+
@staticmethod
|
192
|
+
def construct_video_url(media_url: str | None, user_id: str | None) -> str | None:
|
193
|
+
"""Construct the video URL."""
|
194
|
+
if not media_url or not user_id:
|
195
|
+
return None
|
196
|
+
params = parse_qs(urlparse(media_url).query)
|
197
|
+
param_dict = {k: v[0] for k, v in params.items()}
|
198
|
+
return f"/d4sh/cloud/video?startTime={param_dict.get("startTime")}&deviceId={param_dict.get("deviceId")}&userId={user_id}&mark={param_dict.get("mark")}"
|
199
|
+
|
200
|
+
@staticmethod
|
201
|
+
def _get_timestamp(item) -> int:
|
202
|
+
"""Extract timestamp from a record item and raise an exception if it is None."""
|
203
|
+
timestamp = (
|
204
|
+
item.timestamp
|
205
|
+
or item.completed_at
|
206
|
+
or item.eat_start_time
|
207
|
+
or item.eat_end_time
|
208
|
+
or item.start_time
|
209
|
+
or item.end_time
|
210
|
+
or item.time
|
211
|
+
or None
|
212
|
+
)
|
213
|
+
if timestamp is None:
|
214
|
+
raise ValueError("Can't find timestamp in record item")
|
215
|
+
return timestamp
|
216
|
+
|
217
|
+
|
218
|
+
class DownloadDecryptMedia:
|
219
|
+
"""Class to download and decrypt media files from PetKit devices."""
|
220
|
+
|
221
|
+
file_data: MediaFile
|
222
|
+
|
223
|
+
def __init__(self, download_path: Path, client: PetKitClient):
|
224
|
+
"""Initialize the class."""
|
225
|
+
self.download_path = download_path
|
226
|
+
self.client = client
|
227
|
+
|
228
|
+
async def get_fpath(self, file_name: str) -> Path:
|
229
|
+
"""Return the full path of the file."""
|
230
|
+
subdir = ""
|
231
|
+
if file_name.endswith(".jpg"):
|
232
|
+
subdir = "snapshot"
|
233
|
+
elif file_name.endswith(".avi"):
|
234
|
+
subdir = "video"
|
235
|
+
return Path(self.download_path / self.file_data.filepath / subdir / file_name)
|
236
|
+
|
237
|
+
async def download_file(self, file_data: MediaFile) -> None:
|
238
|
+
"""Get image and video file"""
|
239
|
+
self.file_data = file_data
|
240
|
+
|
241
|
+
if self.file_data.image:
|
242
|
+
# Download image file
|
243
|
+
await self._get_file(
|
244
|
+
self.file_data.image,
|
245
|
+
self.file_data.aes_key,
|
246
|
+
f"{self.file_data.event_id}.jpg",
|
247
|
+
)
|
248
|
+
|
249
|
+
if self.file_data.video:
|
250
|
+
# Download video file
|
251
|
+
await self._get_video_m3u8()
|
252
|
+
|
253
|
+
async def _get_video_m3u8(self) -> None:
|
254
|
+
"""Iterate through m3u8 file and return all the ts file urls"""
|
255
|
+
aes_key, iv_key, segments_lst = await self._get_m3u8_segments()
|
256
|
+
|
257
|
+
segment_files = []
|
258
|
+
for index, segment in enumerate(segments_lst, start=1):
|
259
|
+
segment_file = await self._get_file(
|
260
|
+
segment, aes_key, f"{index}_{self.file_data.event_id}.avi"
|
261
|
+
)
|
262
|
+
if segment_file:
|
263
|
+
segment_files.append(
|
264
|
+
await self.get_fpath(f"{index}_{self.file_data.event_id}.avi")
|
265
|
+
)
|
266
|
+
|
267
|
+
if len(segment_files) > 1:
|
268
|
+
_LOGGER.debug("Concatenating segments %s", len(segment_files))
|
269
|
+
await self._concat_segments(segment_files, f"{self.file_data.event_id}.avi")
|
270
|
+
elif len(segment_files) == 1:
|
271
|
+
_LOGGER.debug("Single file segment, no need to concatenate")
|
272
|
+
|
273
|
+
async def _get_m3u8_segments(self) -> tuple[str, str, list[str]]:
|
274
|
+
"""Extract the segments from a m3u8 file.
|
275
|
+
:return: Tuple of AES key, IV key, and list of segment URLs
|
276
|
+
"""
|
277
|
+
if not self.file_data.video:
|
278
|
+
raise ValueError("Missing video URL")
|
279
|
+
video_data = await self.client.get_cloud_video(self.file_data.video)
|
280
|
+
|
281
|
+
media_api = video_data.get("mediaApi", None)
|
282
|
+
if not media_api:
|
283
|
+
_LOGGER.error("Missing mediaApi in video data")
|
284
|
+
raise ValueError("Missing mediaApi in video data")
|
285
|
+
return await self.client.extract_segments_m3u8(str(media_api))
|
286
|
+
|
287
|
+
async def _get_file(self, url: str, aes_key: str, full_filename: str) -> bool:
|
288
|
+
"""Download a file from a URL and decrypt it."""
|
289
|
+
|
290
|
+
full_file_path = await self.get_fpath(full_filename)
|
291
|
+
if full_file_path.exists():
|
292
|
+
_LOGGER.debug("File already exist : %s don't re-download it", full_filename)
|
293
|
+
return True
|
294
|
+
|
295
|
+
# Download the file
|
296
|
+
async with aiohttp.ClientSession() as session, session.get(url) as response:
|
297
|
+
if response.status != 200:
|
298
|
+
_LOGGER.error(
|
299
|
+
"Failed to download %s, status code: %s", url, response.status
|
300
|
+
)
|
301
|
+
return False
|
302
|
+
|
303
|
+
content = await response.read()
|
304
|
+
|
305
|
+
encrypted_file_path = await self._save_file(content, f"{full_filename}.enc")
|
306
|
+
# Decrypt the image
|
307
|
+
decrypted_data = await self._decrypt_file(encrypted_file_path, aes_key)
|
308
|
+
|
309
|
+
if decrypted_data:
|
310
|
+
_LOGGER.debug("Decrypt was successful")
|
311
|
+
await self._save_file(decrypted_data, full_filename)
|
312
|
+
return True
|
313
|
+
return False
|
314
|
+
|
315
|
+
async def _save_file(self, content: bytes, filename: str) -> Path:
|
316
|
+
"""Save content to a file asynchronously and return the file path."""
|
317
|
+
file_path = await self.get_fpath(filename)
|
318
|
+
try:
|
319
|
+
# Ensure the directory exists
|
320
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
321
|
+
|
322
|
+
async with aio_open(file_path, "wb") as file:
|
323
|
+
await file.write(content)
|
324
|
+
_LOGGER.debug("Save file OK : %s", file_path)
|
325
|
+
except PermissionError as e:
|
326
|
+
_LOGGER.error("Save file, permission denied %s: %s", file_path, e)
|
327
|
+
except FileNotFoundError as e:
|
328
|
+
_LOGGER.error("Save file, file/folder not found %s: %s", file_path, e)
|
329
|
+
except OSError as e:
|
330
|
+
_LOGGER.error("Save file, error saving file %s: %s", file_path, e)
|
331
|
+
except Exception as e: # noqa: BLE001
|
332
|
+
_LOGGER.error(
|
333
|
+
"Save file, unexpected error saving file %s: %s", file_path, e
|
334
|
+
)
|
335
|
+
return file_path
|
336
|
+
|
337
|
+
@staticmethod
|
338
|
+
async def _decrypt_file(file_path: Path, aes_key: str) -> bytes | None:
|
339
|
+
"""Decrypt a file using AES encryption.
|
340
|
+
:param file_path: Path to the encrypted file.
|
341
|
+
:param aes_key: AES key used for decryption.
|
342
|
+
:return: Decrypted bytes data.
|
343
|
+
"""
|
344
|
+
aes_key = aes_key.removesuffix("\n")
|
345
|
+
key_bytes: bytes = aes_key.encode("utf-8")
|
346
|
+
iv: bytes = b"\x61" * 16
|
347
|
+
cipher: Any = AES.new(key_bytes, AES.MODE_CBC, iv)
|
348
|
+
|
349
|
+
async with aio_open(file_path, "rb") as encrypted_file:
|
350
|
+
encrypted_data: bytes = await encrypted_file.read()
|
351
|
+
|
352
|
+
decrypted_data: bytes = cipher.decrypt(encrypted_data)
|
353
|
+
|
354
|
+
try:
|
355
|
+
decrypted_data = unpad(decrypted_data, AES.block_size)
|
356
|
+
except ValueError as e:
|
357
|
+
_LOGGER.debug("Warning: Padding error occurred, ignoring error: %s", e)
|
358
|
+
|
359
|
+
if Path(file_path).exists():
|
360
|
+
Path(file_path).unlink()
|
361
|
+
return decrypted_data
|
362
|
+
|
363
|
+
async def _concat_segments(self, ts_files: list[Path], output_file):
|
364
|
+
"""Concatenate a list of .ts segments into a single output file without using a temporary file.
|
365
|
+
|
366
|
+
:param ts_files: List of absolute paths of .ts files
|
367
|
+
:param output_file: Path of the output file (e.g., "output.mp4")
|
368
|
+
"""
|
369
|
+
full_output_file = await self.get_fpath(output_file)
|
370
|
+
if full_output_file.exists():
|
371
|
+
_LOGGER.debug(
|
372
|
+
"Output file already exists: %s, skipping concatenation.", output_file
|
373
|
+
)
|
374
|
+
return
|
375
|
+
|
376
|
+
# Build the argument for `ffmpeg` with the files formatted for the command line
|
377
|
+
concat_input = "|".join(str(file) for file in ts_files)
|
378
|
+
command = [
|
379
|
+
"ffmpeg",
|
380
|
+
"-i",
|
381
|
+
f"concat:{concat_input}",
|
382
|
+
"-c",
|
383
|
+
"copy",
|
384
|
+
"-bsf:a",
|
385
|
+
"aac_adtstoasc",
|
386
|
+
str(full_output_file),
|
387
|
+
]
|
388
|
+
|
389
|
+
try:
|
390
|
+
# Run the subprocess asynchronously
|
391
|
+
process = await asyncio.create_subprocess_exec(
|
392
|
+
*command,
|
393
|
+
stdout=asyncio.subprocess.PIPE,
|
394
|
+
stderr=asyncio.subprocess.PIPE,
|
395
|
+
)
|
396
|
+
stdout, stderr = await process.communicate()
|
397
|
+
|
398
|
+
if process.returncode == 0:
|
399
|
+
_LOGGER.debug("File successfully concatenated: %s", full_output_file)
|
400
|
+
await self._delete_segments(ts_files)
|
401
|
+
else:
|
402
|
+
_LOGGER.error(
|
403
|
+
"Error during concatenation: %s\nStdout: %s\nStderr: %s",
|
404
|
+
process.returncode,
|
405
|
+
stdout.decode().strip(),
|
406
|
+
stderr.decode().strip(),
|
407
|
+
)
|
408
|
+
except FileNotFoundError as e:
|
409
|
+
_LOGGER.error("Error during concatenation: %s", e)
|
410
|
+
except OSError as e:
|
411
|
+
_LOGGER.error("OS error during concatenation: %s", e)
|
412
|
+
|
413
|
+
async def _delete_segments(self, ts_files: list[Path]) -> None:
|
414
|
+
"""Delete all segment files after concatenation."""
|
415
|
+
for file in ts_files:
|
416
|
+
if file.exists():
|
417
|
+
try:
|
418
|
+
file.unlink()
|
419
|
+
_LOGGER.debug("Deleted segment file: %s", file)
|
420
|
+
except OSError as e:
|
421
|
+
_LOGGER.debug("Error deleting segment file %s: %s", file, e)
|
422
|
+
else:
|
423
|
+
_LOGGER.debug("Segment file not found: %s", file)
|
@@ -0,0 +1,67 @@
|
|
1
|
+
"""Dataclasses for Schedule."""
|
2
|
+
|
3
|
+
from datetime import datetime
|
4
|
+
from typing import Any, ClassVar
|
5
|
+
|
6
|
+
from pydantic import BaseModel, Field
|
7
|
+
|
8
|
+
from pypetkitapi.const import DEVICE_DATA, PetkitEndpoint
|
9
|
+
from pypetkitapi.containers import Device
|
10
|
+
|
11
|
+
|
12
|
+
class Owner(BaseModel):
|
13
|
+
"""Dataclass for Owner Data."""
|
14
|
+
|
15
|
+
device_count: int | None = Field(0, alias="deviceCount")
|
16
|
+
id: str | None = None
|
17
|
+
pet_count: int | None = Field(0, alias="petCount")
|
18
|
+
user_count: int | None = Field(0, alias="userCount")
|
19
|
+
|
20
|
+
|
21
|
+
class Type(BaseModel):
|
22
|
+
"""Dataclass for Type Data."""
|
23
|
+
|
24
|
+
enable: int | None = None
|
25
|
+
id: str | None = None
|
26
|
+
img: str | None = None
|
27
|
+
is_custom: int | None = Field(0, alias="isCustom")
|
28
|
+
name: str | None = None
|
29
|
+
priority: int | None = None
|
30
|
+
repeat_option: str | None = Field(alias="repeatOption")
|
31
|
+
rpt: str | None = None
|
32
|
+
schedule_appoint: str | None = Field(alias="scheduleAppoint")
|
33
|
+
with_device_type: str | None = Field(alias="withDeviceType")
|
34
|
+
with_pet: int | None = Field(0, alias="withPet")
|
35
|
+
|
36
|
+
|
37
|
+
class Schedule(BaseModel):
|
38
|
+
"""Dataclass for Schedule Data."""
|
39
|
+
|
40
|
+
data_type: ClassVar[str] = DEVICE_DATA
|
41
|
+
|
42
|
+
alarm_before: int | None = Field(0, alias="alarmBefore")
|
43
|
+
created_at: datetime | None = Field(None, alias="createdAt")
|
44
|
+
device_id: str | None = Field(None, alias="deviceId")
|
45
|
+
device_type: str | None = Field(None, alias="deviceType")
|
46
|
+
id: str | None = None
|
47
|
+
name: str | None = None
|
48
|
+
owner: Owner | None = None
|
49
|
+
repeat: str | None = None
|
50
|
+
status: int | None = None
|
51
|
+
time: datetime | None = None
|
52
|
+
type: Type | None = None
|
53
|
+
user_custom_id: int | None = Field(0, alias="userCustomId")
|
54
|
+
|
55
|
+
@classmethod
|
56
|
+
def get_endpoint(cls, device_type: str) -> str:
|
57
|
+
"""Get the endpoint URL for the given device type."""
|
58
|
+
return PetkitEndpoint.SCHEDULE
|
59
|
+
|
60
|
+
@classmethod
|
61
|
+
def query_param(
|
62
|
+
cls,
|
63
|
+
device: Device,
|
64
|
+
device_data: Any | None = None,
|
65
|
+
) -> dict:
|
66
|
+
"""Generate query parameters including request_date."""
|
67
|
+
return {"limit": 20}
|
@@ -1,8 +1,7 @@
|
|
1
1
|
Metadata-Version: 2.3
|
2
2
|
Name: pypetkitapi
|
3
|
-
Version: 1.9.
|
3
|
+
Version: 1.9.4
|
4
4
|
Summary: Python client for PetKit API
|
5
|
-
Home-page: https://github.com/Jezza34000/pypetkit
|
6
5
|
License: MIT
|
7
6
|
Author: Jezza34000
|
8
7
|
Author-email: info@mail.com
|
@@ -14,8 +13,10 @@ Classifier: Programming Language :: Python :: 3.12
|
|
14
13
|
Classifier: Programming Language :: Python :: 3.13
|
15
14
|
Requires-Dist: aiofiles (>=24.1.0,<25.0.0)
|
16
15
|
Requires-Dist: aiohttp (>=3.10.10,<4.0.0)
|
16
|
+
Requires-Dist: m3u8 (>=6.0)
|
17
17
|
Requires-Dist: pycryptodome (>=3.19.1,<4.0.0)
|
18
18
|
Requires-Dist: pydantic (>=1.10.18,<3.0.0)
|
19
|
+
Project-URL: Homepage, https://github.com/Jezza34000/pypetkit
|
19
20
|
Description-Content-Type: text/markdown
|
20
21
|
|
21
22
|
# Petkit API Client
|
@@ -110,7 +111,9 @@ async def main():
|
|
110
111
|
# simple hopper :
|
111
112
|
await client.send_api_request(123456789, FeederCommand.MANUAL_FEED, {"amount": 1})
|
112
113
|
# dual hopper :
|
113
|
-
await client.send_api_request(123456789, FeederCommand.
|
114
|
+
await client.send_api_request(123456789, FeederCommand.MANUAL_FEED, {"amount1": 2})
|
115
|
+
# or
|
116
|
+
await client.send_api_request(123456789, FeederCommand.MANUAL_FEED, {"amount2": 2})
|
114
117
|
|
115
118
|
### Example 3 : Start the cleaning process
|
116
119
|
### Device_ID, Command, Payload
|
@@ -0,0 +1,18 @@
|
|
1
|
+
pypetkitapi/__init__.py,sha256=kp58MpP6LwBcz2o1IpOjgN7c8bm4qtH361l4A0tWA4M,1607
|
2
|
+
pypetkitapi/client.py,sha256=0vM-fsu_cGE2_XKd8kJFQdJTXyBEGvSt3zHjFU13dns,32169
|
3
|
+
pypetkitapi/command.py,sha256=cMCUutZCQo9Ddvjl_FYR5UjU_CqFz1iyetMznYwjpzM,7500
|
4
|
+
pypetkitapi/const.py,sha256=g_oz73Emiw7nMYi3ANaUkUVLNtdacST7weyui5FviYg,4516
|
5
|
+
pypetkitapi/containers.py,sha256=oJR22ZruMr-0IRgiucdnj_nutOH59MKvmaFTwLJNiJI,4635
|
6
|
+
pypetkitapi/exceptions.py,sha256=cBLj2kP70yd6rfWnOXTCXo1a2TXca8QtxiRMa1UrttU,1644
|
7
|
+
pypetkitapi/feeder_container.py,sha256=ZGJhgqP-gjTFB2q91XoyZQ_G1S5cAY37JoqqHbzoanU,14640
|
8
|
+
pypetkitapi/litter_container.py,sha256=-z2BtdtRg8RyLJzJYY3AIACs9GGZ0C64hVhW4do6yQo,19172
|
9
|
+
pypetkitapi/media.py,sha256=a-ZhM7khu50Htn1jYNnsQNmwhpG0CnT_QW752Is-J4Q,15611
|
10
|
+
pypetkitapi/purifier_container.py,sha256=ssyIxhNben5XJ4KlQTXTrtULg2ji6DqHqjzOq08d1-I,2491
|
11
|
+
pypetkitapi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
12
|
+
pypetkitapi/schedule_container.py,sha256=OjLAY6FY-g14JNJJnYMNFV5ZtdkjUzNBit1VUiiZKnQ,2053
|
13
|
+
pypetkitapi/utils.py,sha256=z7325kcJQUburnF28HSXrJMvY_gY9007K73Zwxp-4DQ,743
|
14
|
+
pypetkitapi/water_fountain_container.py,sha256=5J0b-fDZYcFLNX2El7fifv8H6JMhBCt-ttxSow1ozRQ,6787
|
15
|
+
pypetkitapi-1.9.4.dist-info/LICENSE,sha256=u5jNkZEn6YMrtN4Kr5rU3TcBJ5-eAt0qMx4JDsbsnzM,1074
|
16
|
+
pypetkitapi-1.9.4.dist-info/METADATA,sha256=1OcbAB1WBj1uCCVYeqLSOPY7Rj_uVBeivBAPm7sTQNk,6255
|
17
|
+
pypetkitapi-1.9.4.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
|
18
|
+
pypetkitapi-1.9.4.dist-info/RECORD,,
|
pypetkitapi/medias.py
DELETED
@@ -1,199 +0,0 @@
|
|
1
|
-
"""Module to handle media files from PetKit devices."""
|
2
|
-
|
3
|
-
from dataclasses import dataclass
|
4
|
-
import logging
|
5
|
-
from pathlib import Path
|
6
|
-
import re
|
7
|
-
from typing import Any
|
8
|
-
|
9
|
-
from aiofiles import open as aio_open
|
10
|
-
import aiohttp
|
11
|
-
from Crypto.Cipher import AES
|
12
|
-
from Crypto.Util.Padding import unpad
|
13
|
-
|
14
|
-
from pypetkitapi.feeder_container import Feeder, RecordsType
|
15
|
-
|
16
|
-
_LOGGER = logging.getLogger(__name__)
|
17
|
-
|
18
|
-
|
19
|
-
@dataclass
|
20
|
-
class MediasFiles:
|
21
|
-
"""Dataclass for media files.
|
22
|
-
Subclass of many other device dataclasses.
|
23
|
-
"""
|
24
|
-
|
25
|
-
filename: str
|
26
|
-
record_type: str
|
27
|
-
url: str
|
28
|
-
aes_key: str
|
29
|
-
timestamp: str | None = None
|
30
|
-
|
31
|
-
|
32
|
-
async def extract_filename_from_url(url: str) -> str:
|
33
|
-
"""Extract the filename from the URL and format it as requested."""
|
34
|
-
match = re.search(r"https?://[^/]+(/[^?]+)", url)
|
35
|
-
if match:
|
36
|
-
path = match.group(1)
|
37
|
-
formatted_filename = path.replace("/", "_").lstrip("_").lower()
|
38
|
-
return f"{formatted_filename}.jpg"
|
39
|
-
raise ValueError(f"Failed to extract filename from URL: {url}")
|
40
|
-
|
41
|
-
|
42
|
-
class MediaHandler:
|
43
|
-
"""Class to find media files from PetKit devices."""
|
44
|
-
|
45
|
-
def __init__(self, file_path: Path):
|
46
|
-
"""Initialize the class."""
|
47
|
-
self.media_download_decode = MediaDownloadDecode(file_path)
|
48
|
-
self.media_files: list[MediasFiles] = []
|
49
|
-
|
50
|
-
async def get_last_image(self, device: Feeder) -> list[MediasFiles]:
|
51
|
-
"""Process device records and extract media info."""
|
52
|
-
record_types = ["eat", "feed", "move", "pet"]
|
53
|
-
self.media_files = []
|
54
|
-
|
55
|
-
if not isinstance(device, Feeder):
|
56
|
-
_LOGGER.error("Device is not a Feeder")
|
57
|
-
return []
|
58
|
-
|
59
|
-
if not device.device_records:
|
60
|
-
_LOGGER.error("No device records found for feeder")
|
61
|
-
return []
|
62
|
-
|
63
|
-
for record_type in record_types:
|
64
|
-
records = getattr(device.device_records, record_type, None)
|
65
|
-
if records:
|
66
|
-
self.media_files.extend(
|
67
|
-
await self._process_records(records, record_type)
|
68
|
-
)
|
69
|
-
return self.media_files
|
70
|
-
|
71
|
-
async def _process_records(
|
72
|
-
self, records: RecordsType, record_type: str
|
73
|
-
) -> list[MediasFiles]:
|
74
|
-
"""Process individual records and return media info."""
|
75
|
-
media_files = []
|
76
|
-
|
77
|
-
async def process_item(record_items):
|
78
|
-
last_item = next(
|
79
|
-
(
|
80
|
-
item
|
81
|
-
for item in reversed(record_items)
|
82
|
-
if item.preview and item.aes_key
|
83
|
-
),
|
84
|
-
None,
|
85
|
-
)
|
86
|
-
if last_item:
|
87
|
-
filename = await extract_filename_from_url(last_item.preview)
|
88
|
-
await self.media_download_decode.get_file(
|
89
|
-
last_item.preview, last_item.aes_key
|
90
|
-
)
|
91
|
-
timestamp = (
|
92
|
-
last_item.eat_start_time
|
93
|
-
or last_item.completed_at
|
94
|
-
or last_item.timestamp
|
95
|
-
or None
|
96
|
-
)
|
97
|
-
media_files.append(
|
98
|
-
MediasFiles(
|
99
|
-
record_type=record_type,
|
100
|
-
filename=filename,
|
101
|
-
url=last_item.preview,
|
102
|
-
aes_key=last_item.aes_key,
|
103
|
-
timestamp=timestamp,
|
104
|
-
)
|
105
|
-
)
|
106
|
-
|
107
|
-
for record in records:
|
108
|
-
if hasattr(record, "items"):
|
109
|
-
await process_item(record.items) # type: ignore[attr-defined]
|
110
|
-
|
111
|
-
return media_files
|
112
|
-
|
113
|
-
|
114
|
-
class MediaDownloadDecode:
|
115
|
-
"""Class to download"""
|
116
|
-
|
117
|
-
def __init__(self, download_path: Path):
|
118
|
-
"""Initialize the class."""
|
119
|
-
self.download_path = download_path
|
120
|
-
|
121
|
-
async def get_file(self, url: str, aes_key: str) -> bool:
|
122
|
-
"""Download a file from a URL and decrypt it."""
|
123
|
-
# Check if the file already exists
|
124
|
-
filename = await extract_filename_from_url(url)
|
125
|
-
full_file_path = Path(self.download_path) / filename
|
126
|
-
if full_file_path.exists():
|
127
|
-
_LOGGER.debug("File already exist : %s don't need to download it", filename)
|
128
|
-
return True
|
129
|
-
|
130
|
-
# Download the file
|
131
|
-
async with aiohttp.ClientSession() as session, session.get(url) as response:
|
132
|
-
if response.status != 200:
|
133
|
-
_LOGGER.error(
|
134
|
-
"Failed to download %s, status code: %s", url, response.status
|
135
|
-
)
|
136
|
-
return False
|
137
|
-
|
138
|
-
content = await response.read()
|
139
|
-
|
140
|
-
encrypted_file_path = await self._save_file(content, f"{filename}.enc")
|
141
|
-
# Decrypt the image
|
142
|
-
decrypted_data = await self._decrypt_image_from_file(
|
143
|
-
encrypted_file_path, aes_key
|
144
|
-
)
|
145
|
-
|
146
|
-
if decrypted_data:
|
147
|
-
_LOGGER.debug("Decrypt was successful")
|
148
|
-
await self._save_file(decrypted_data, filename)
|
149
|
-
return True
|
150
|
-
return False
|
151
|
-
|
152
|
-
async def _save_file(self, content: bytes, filename: str) -> Path:
|
153
|
-
"""Save content to a file asynchronously and return the file path."""
|
154
|
-
file_path = Path(self.download_path) / filename
|
155
|
-
try:
|
156
|
-
# Ensure the directory exists
|
157
|
-
file_path.parent.mkdir(parents=True, exist_ok=True)
|
158
|
-
|
159
|
-
async with aio_open(file_path, "wb") as file:
|
160
|
-
await file.write(content)
|
161
|
-
_LOGGER.debug("Save file OK : %s", file_path)
|
162
|
-
except PermissionError as e:
|
163
|
-
_LOGGER.error("Save file, permission denied %s: %s", file_path, e)
|
164
|
-
except FileNotFoundError as e:
|
165
|
-
_LOGGER.error("Save file, file/folder not found %s: %s", file_path, e)
|
166
|
-
except OSError as e:
|
167
|
-
_LOGGER.error("Save file, error saving file %s: %s", file_path, e)
|
168
|
-
except Exception as e: # noqa: BLE001
|
169
|
-
_LOGGER.error(
|
170
|
-
"Save file, unexpected error saving file %s: %s", file_path, e
|
171
|
-
)
|
172
|
-
return file_path
|
173
|
-
|
174
|
-
@staticmethod
|
175
|
-
async def _decrypt_image_from_file(file_path: Path, aes_key: str) -> bytes | None:
|
176
|
-
"""Decrypt an image from a file using AES encryption.
|
177
|
-
:param file_path: Path to the encrypted image file.
|
178
|
-
:param aes_key: AES key used for decryption.
|
179
|
-
:return: Decrypted image data.
|
180
|
-
"""
|
181
|
-
try:
|
182
|
-
if aes_key.endswith("\n"):
|
183
|
-
aes_key = aes_key[:-1]
|
184
|
-
key_bytes: bytes = aes_key.encode("utf-8")
|
185
|
-
iv: bytes = b"\x61" * 16
|
186
|
-
cipher: Any = AES.new(key_bytes, AES.MODE_CBC, iv)
|
187
|
-
|
188
|
-
async with aio_open(file_path, "rb") as encrypted_file:
|
189
|
-
encrypted_data: bytes = await encrypted_file.read()
|
190
|
-
|
191
|
-
decrypted_data: bytes = unpad(
|
192
|
-
cipher.decrypt(encrypted_data), AES.block_size # type: ignore[attr-defined]
|
193
|
-
)
|
194
|
-
except Exception as e: # noqa: BLE001
|
195
|
-
logging.error("Error decrypting image from file %s: %s", file_path, e)
|
196
|
-
return None
|
197
|
-
if Path(file_path).exists():
|
198
|
-
Path(file_path).unlink()
|
199
|
-
return decrypted_data
|
@@ -1,17 +0,0 @@
|
|
1
|
-
pypetkitapi/__init__.py,sha256=dQiuXe1aOwEGmW27csDtKRVc4vhXrqvsW-0ElcbrTRY,1562
|
2
|
-
pypetkitapi/client.py,sha256=oi1GhGIcvWMP5J9ueN2Y1xDX-Wm91b7LfjTVSe_plk4,30357
|
3
|
-
pypetkitapi/command.py,sha256=G7AEtUcaK-lcRliNf4oUxPkvDO_GNBkJ-ZUcOo7DGHM,7697
|
4
|
-
pypetkitapi/const.py,sha256=xDsF6sdxqQPy0B0Qhpe0Nn5xrkDjfo_omL4XL_oXFDE,4050
|
5
|
-
pypetkitapi/containers.py,sha256=oJR22ZruMr-0IRgiucdnj_nutOH59MKvmaFTwLJNiJI,4635
|
6
|
-
pypetkitapi/exceptions.py,sha256=fuTLT6Iw2_kA7eOyNJPf59vQkgfByhAnTThY4lC0Rt0,1283
|
7
|
-
pypetkitapi/feeder_container.py,sha256=ZGJhgqP-gjTFB2q91XoyZQ_G1S5cAY37JoqqHbzoanU,14640
|
8
|
-
pypetkitapi/litter_container.py,sha256=-z2BtdtRg8RyLJzJYY3AIACs9GGZ0C64hVhW4do6yQo,19172
|
9
|
-
pypetkitapi/medias.py,sha256=ZFdiPj24crYYFwKBUqlxKhfKGrW2uXoXzDl2vWukZ-A,7036
|
10
|
-
pypetkitapi/purifier_container.py,sha256=ssyIxhNben5XJ4KlQTXTrtULg2ji6DqHqjzOq08d1-I,2491
|
11
|
-
pypetkitapi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
12
|
-
pypetkitapi/utils.py,sha256=z7325kcJQUburnF28HSXrJMvY_gY9007K73Zwxp-4DQ,743
|
13
|
-
pypetkitapi/water_fountain_container.py,sha256=5J0b-fDZYcFLNX2El7fifv8H6JMhBCt-ttxSow1ozRQ,6787
|
14
|
-
pypetkitapi-1.9.3.dist-info/LICENSE,sha256=u5jNkZEn6YMrtN4Kr5rU3TcBJ5-eAt0qMx4JDsbsnzM,1074
|
15
|
-
pypetkitapi-1.9.3.dist-info/METADATA,sha256=p0SF9NMmun43lj81I0Sbc24o7JwFhuQgHoOK4tTquC8,6115
|
16
|
-
pypetkitapi-1.9.3.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
|
17
|
-
pypetkitapi-1.9.3.dist-info/RECORD,,
|
File without changes
|