emerald-hws 0.0.16__py3-none-any.whl → 0.0.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- emerald_hws/emeraldhws.py +191 -118
- {emerald_hws-0.0.16.dist-info → emerald_hws-0.0.18.dist-info}/METADATA +1 -1
- emerald_hws-0.0.18.dist-info/RECORD +7 -0
- emerald_hws-0.0.16.dist-info/RECORD +0 -7
- {emerald_hws-0.0.16.dist-info → emerald_hws-0.0.18.dist-info}/WHEEL +0 -0
- {emerald_hws-0.0.16.dist-info → emerald_hws-0.0.18.dist-info}/top_level.txt +0 -0
emerald_hws/emeraldhws.py
CHANGED
@@ -40,23 +40,29 @@ class EmeraldHWS():
|
|
40
40
|
self.logger = logging.getLogger(__name__)
|
41
41
|
self.update_callback = update_callback
|
42
42
|
self._state_lock = threading.RLock() # Thread-safe lock for state operations
|
43
|
-
|
43
|
+
self._connection_event = threading.Event() # Event to signal when MQTT connection is established
|
44
|
+
self._connect_lock = threading.Lock() # Lock to prevent concurrent connect() calls
|
45
|
+
self._mqtt_lock = threading.RLock() # Lock to protect MQTT client lifecycle operations
|
46
|
+
self._is_connected = False # Flag to track connection state
|
47
|
+
self.mqttClient = None # Initialize to None
|
48
|
+
|
44
49
|
# Convert minutes to seconds for internal use
|
45
50
|
self.connection_timeout = connection_timeout_minutes * 60.0
|
46
51
|
self.health_check_interval = health_check_minutes * 60.0 if health_check_minutes > 0 else 0
|
47
52
|
self.last_message_time = None
|
48
53
|
self.health_check_timer = None
|
49
|
-
|
54
|
+
self.reconnect_timer = None
|
55
|
+
|
50
56
|
# Connection state tracking
|
51
57
|
self.connection_state = "initial" # possible states: initial, connected, failed
|
52
58
|
self.consecutive_failures = 0
|
53
59
|
self.max_backoff_seconds = 60 # Maximum backoff of 1 minute
|
54
|
-
|
60
|
+
|
55
61
|
# Ensure reasonable minimum values (e.g., at least 5 minutes for connection timeout)
|
56
62
|
if connection_timeout_minutes < 5 and connection_timeout_minutes != 0:
|
57
63
|
self.logger.warning("emeraldhws: Connection timeout too short, setting to minimum of 5 minutes")
|
58
64
|
self.connection_timeout = 5 * 60.0
|
59
|
-
|
65
|
+
|
60
66
|
# Ensure reasonable minimum values for health check (e.g., at least 5 minutes)
|
61
67
|
if 0 < health_check_minutes < 5:
|
62
68
|
self.logger.warning("emeraldhws: Health check interval too short, setting to minimum of 5 minutes")
|
@@ -117,84 +123,67 @@ class EmeraldHWS():
|
|
117
123
|
""" Stops an existing MQTT connection and creates a new one
|
118
124
|
:param reason: Reason for reconnection (scheduled, health_check, etc.)
|
119
125
|
"""
|
120
|
-
self.
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
self.mqttClient.stop()
|
132
|
-
self.connectMQTT()
|
133
|
-
self.subscribeAllHWS()
|
134
|
-
|
135
|
-
# After reconnection, check if temperatures have changed
|
136
|
-
def check_temp_changes():
|
137
|
-
for properties in self.properties:
|
138
|
-
heat_pumps = properties.get('heat_pump', [])
|
139
|
-
for heat_pump in heat_pumps:
|
140
|
-
hws_id = heat_pump['id']
|
141
|
-
if (hws_id in temp_values and
|
142
|
-
'last_state' in heat_pump and
|
143
|
-
'temp_current' in heat_pump['last_state']):
|
144
|
-
old_temp = temp_values[hws_id]
|
145
|
-
new_temp = heat_pump['last_state']['temp_current']
|
146
|
-
if old_temp != new_temp:
|
147
|
-
self.logger.info(f"emeraldhws: Temperature changed after reconnect for {hws_id}: {old_temp} → {new_temp}")
|
148
|
-
|
149
|
-
# Check for temperature changes after a short delay to allow for updates
|
150
|
-
threading.Timer(10.0, check_temp_changes).start()
|
126
|
+
with self._mqtt_lock:
|
127
|
+
self.logger.info(f"emeraldhws: awsiot: Reconnecting MQTT connection (reason: {reason})")
|
128
|
+
|
129
|
+
if self.mqttClient is not None:
|
130
|
+
self.mqttClient.stop()
|
131
|
+
self.mqttClient = None # Clear the client so a new one can be created
|
132
|
+
|
133
|
+
self.connectMQTT()
|
134
|
+
self.subscribeAllHWS()
|
135
|
+
|
136
|
+
self.logger.info(f"emeraldhws: awsiot: MQTT reconnection completed (reason: {reason})")
|
151
137
|
|
152
138
|
def connectMQTT(self):
|
153
139
|
""" Establishes a connection to Amazon IOT core's MQTT service
|
154
140
|
"""
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
credentials_provider =
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
self.
|
196
|
-
|
197
|
-
|
141
|
+
with self._mqtt_lock:
|
142
|
+
# If already connected, skip
|
143
|
+
if self.mqttClient is not None:
|
144
|
+
self.logger.debug("emeraldhws: awsiot: MQTT client already exists, skipping connection")
|
145
|
+
return
|
146
|
+
|
147
|
+
# Clear the connection event before starting new connection
|
148
|
+
self._connection_event.clear()
|
149
|
+
|
150
|
+
# Certificate path is available but not currently used in the connection
|
151
|
+
# os.path.join(os.path.dirname(__file__), '__assets__', 'SFSRootCAG2.pem')
|
152
|
+
identityPoolID = self.COGNITO_IDENTITY_POOL_ID
|
153
|
+
region = self.MQTT_HOST.split('.')[2]
|
154
|
+
cognito_endpoint = "cognito-identity." + region + ".amazonaws.com"
|
155
|
+
cognitoIdentityClient = boto3.client('cognito-identity', region_name=region)
|
156
|
+
|
157
|
+
temporaryIdentityId = cognitoIdentityClient.get_id(IdentityPoolId=identityPoolID)
|
158
|
+
identityID = temporaryIdentityId["IdentityId"]
|
159
|
+
self.logger.debug("emeraldhws: awsiot: AWS IoT IdentityID: {}".format(identityID))
|
160
|
+
|
161
|
+
credentials_provider = auth.AwsCredentialsProvider.new_cognito(
|
162
|
+
endpoint=cognito_endpoint,
|
163
|
+
identity=identityID,
|
164
|
+
tls_ctx=io.ClientTlsContext(io.TlsContextOptions()))
|
165
|
+
|
166
|
+
client = mqtt5_client_builder.websockets_with_default_aws_signing(
|
167
|
+
endpoint = self.MQTT_HOST,
|
168
|
+
region = region,
|
169
|
+
credentials_provider = credentials_provider,
|
170
|
+
on_connection_interrupted = self.on_connection_interrupted,
|
171
|
+
on_connection_resumed = self.on_connection_resumed,
|
172
|
+
on_lifecycle_connection_success = self.on_lifecycle_connection_success,
|
173
|
+
on_lifecycle_stopped = self.on_lifecycle_stopped,
|
174
|
+
on_lifecycle_attempting_connect = self.on_lifecycle_attempting_connect,
|
175
|
+
on_lifecycle_disconnection = self.on_lifecycle_disconnection,
|
176
|
+
on_lifecycle_connection_failure = self.on_lifecycle_connection_failure,
|
177
|
+
on_publish_received = self.mqttCallback
|
178
|
+
)
|
179
|
+
|
180
|
+
client.start()
|
181
|
+
self.mqttClient = client
|
182
|
+
|
183
|
+
# Block until connection is established or timeout (30 seconds)
|
184
|
+
if not self._connection_event.wait(timeout=30):
|
185
|
+
self.logger.warning("emeraldhws: awsiot: Connection establishment timed out after 30 seconds")
|
186
|
+
# Continue anyway - the connection may still succeed asynchronously
|
198
187
|
|
199
188
|
def mqttDecodeUpdate(self, topic, payload):
|
200
189
|
""" Attempt to decode a received MQTT message and direct appropriately
|
@@ -238,6 +227,8 @@ class EmeraldHWS():
|
|
238
227
|
# Reset failure counter and update connection state
|
239
228
|
self.consecutive_failures = 0
|
240
229
|
self.connection_state = "connected"
|
230
|
+
# Signal that connection is established
|
231
|
+
self._connection_event.set()
|
241
232
|
return
|
242
233
|
|
243
234
|
def on_lifecycle_connection_failure(self, lifecycle_connection_failure: mqtt5.LifecycleConnectFailureData):
|
@@ -247,14 +238,18 @@ class EmeraldHWS():
|
|
247
238
|
error_code = getattr(error, 'code', 'unknown')
|
248
239
|
error_name = getattr(error, 'name', 'unknown')
|
249
240
|
error_message = str(error)
|
250
|
-
|
241
|
+
|
251
242
|
# Update connection state and increment failure counter
|
252
243
|
self.connection_state = "failed"
|
253
244
|
self.consecutive_failures += 1
|
254
|
-
|
245
|
+
|
255
246
|
# Log at INFO level since this is important for troubleshooting
|
256
247
|
self.logger.info(f"emeraldhws: awsiot: connection failed - Error: {error_name} (code: {error_code}), Message: {error_message}")
|
257
|
-
|
248
|
+
|
249
|
+
# Log additional error details if available
|
250
|
+
if hasattr(error, '__dict__'):
|
251
|
+
self.logger.debug(f"emeraldhws: awsiot: error details: {error.__dict__}")
|
252
|
+
|
258
253
|
# If there's a CONNACK packet available, log its details too
|
259
254
|
if hasattr(lifecycle_connection_failure, 'connack_packet') and lifecycle_connection_failure.connack_packet:
|
260
255
|
connack = lifecycle_connection_failure.connack_packet
|
@@ -264,6 +259,17 @@ class EmeraldHWS():
|
|
264
259
|
self.logger.info(f"emeraldhws: awsiot: MQTT CONNACK reason: {reason_code} - {reason_string}")
|
265
260
|
else:
|
266
261
|
self.logger.info(f"emeraldhws: awsiot: MQTT CONNACK reason code: {reason_code}")
|
262
|
+
|
263
|
+
# Log all CONNACK properties if available
|
264
|
+
if hasattr(connack, '__dict__'):
|
265
|
+
self.logger.debug(f"emeraldhws: awsiot: CONNACK details: {connack.__dict__}")
|
266
|
+
else:
|
267
|
+
self.logger.debug("emeraldhws: awsiot: no CONNACK packet available in failure data")
|
268
|
+
|
269
|
+
# Log the exception data structure itself for deeper debugging
|
270
|
+
if hasattr(lifecycle_connection_failure, '__dict__'):
|
271
|
+
self.logger.debug(f"emeraldhws: awsiot: failure data: {lifecycle_connection_failure.__dict__}")
|
272
|
+
|
267
273
|
return
|
268
274
|
|
269
275
|
def on_lifecycle_stopped(self, lifecycle_stopped_data: mqtt5.LifecycleStoppedData):
|
@@ -278,23 +284,42 @@ class EmeraldHWS():
|
|
278
284
|
# Extract disconnect reason if available
|
279
285
|
reason = "unknown reason"
|
280
286
|
if hasattr(lifecycle_disconnect_data, 'disconnect_packet') and lifecycle_disconnect_data.disconnect_packet:
|
281
|
-
|
282
|
-
|
287
|
+
disconnect_packet = lifecycle_disconnect_data.disconnect_packet
|
288
|
+
reason_code = getattr(disconnect_packet, 'reason_code', 'unknown')
|
289
|
+
reason_string = getattr(disconnect_packet, 'reason_string', '')
|
283
290
|
reason = f"reason code: {reason_code}" + (f" - {reason_string}" if reason_string else "")
|
284
|
-
|
291
|
+
|
292
|
+
# Log full disconnect packet details at debug level
|
293
|
+
if hasattr(disconnect_packet, '__dict__'):
|
294
|
+
self.logger.debug(f"emeraldhws: awsiot: disconnect packet details: {disconnect_packet.__dict__}")
|
295
|
+
else:
|
296
|
+
# Log the disconnect data structure if no packet available
|
297
|
+
if hasattr(lifecycle_disconnect_data, '__dict__'):
|
298
|
+
self.logger.debug(f"emeraldhws: awsiot: disconnect data: {lifecycle_disconnect_data.__dict__}")
|
299
|
+
|
285
300
|
self.logger.info(f"emeraldhws: awsiot: disconnected - {reason}")
|
286
301
|
return
|
287
302
|
|
288
303
|
def on_lifecycle_attempting_connect(self, lifecycle_attempting_connect_data: mqtt5.LifecycleAttemptingConnectData):
|
289
304
|
""" Log message when attempting connect
|
290
305
|
"""
|
291
|
-
|
292
|
-
endpoint = getattr(lifecycle_attempting_connect_data, 'endpoint', 'unknown')
|
293
|
-
self.logger.debug(f"emeraldhws: awsiot: attempting to connect to {endpoint}")
|
306
|
+
self.logger.debug("emeraldhws: awsiot: attempting to connect")
|
294
307
|
return
|
295
|
-
|
308
|
+
|
309
|
+
def scheduled_reconnect(self):
|
310
|
+
""" Periodic MQTT reconnect - called by timer and reschedules itself
|
311
|
+
"""
|
312
|
+
self.reconnectMQTT(reason="scheduled")
|
313
|
+
|
314
|
+
# Reschedule for next time
|
315
|
+
if self.connection_timeout > 0:
|
316
|
+
self.reconnect_timer = threading.Timer(self.connection_timeout, self.scheduled_reconnect)
|
317
|
+
self.reconnect_timer.daemon = True
|
318
|
+
self.reconnect_timer.start()
|
319
|
+
|
296
320
|
def check_connection_health(self):
|
297
321
|
""" Check if we've received any messages recently, reconnect if not
|
322
|
+
Called by timer and reschedules itself
|
298
323
|
"""
|
299
324
|
if self.last_message_time is None:
|
300
325
|
# No messages received yet, don't reconnect
|
@@ -303,24 +328,24 @@ class EmeraldHWS():
|
|
303
328
|
current_time = time.time()
|
304
329
|
time_since_last_message = current_time - self.last_message_time
|
305
330
|
minutes_since_last = time_since_last_message / 60.0
|
306
|
-
|
331
|
+
|
307
332
|
if time_since_last_message > self.health_check_interval:
|
308
333
|
# This is an INFO level log because it's an important event
|
309
334
|
self.logger.info(f"emeraldhws: awsiot: No messages received for {minutes_since_last:.1f} minutes, reconnecting")
|
310
|
-
|
335
|
+
|
311
336
|
# If we're in a failed state, apply exponential backoff
|
312
337
|
if self.connection_state == "failed" and self.consecutive_failures > 0:
|
313
338
|
# Calculate backoff time with exponential increase, capped at max_backoff_seconds
|
314
339
|
backoff_seconds = min(2 ** (self.consecutive_failures - 1), self.max_backoff_seconds)
|
315
340
|
self.logger.info(f"emeraldhws: awsiot: Connection in failed state, applying backoff of {backoff_seconds} seconds before retry (attempt {self.consecutive_failures})")
|
316
341
|
time.sleep(backoff_seconds)
|
317
|
-
|
342
|
+
|
318
343
|
self.reconnectMQTT(reason="health_check")
|
319
344
|
else:
|
320
345
|
# This is a DEBUG level log to avoid cluttering logs
|
321
346
|
self.logger.debug(f"emeraldhws: awsiot: Health check - Last message received {minutes_since_last:.1f} minutes ago")
|
322
|
-
|
323
|
-
#
|
347
|
+
|
348
|
+
# Always reschedule next health check
|
324
349
|
if self.health_check_interval > 0:
|
325
350
|
self.health_check_timer = threading.Timer(self.health_check_interval, self.check_connection_health)
|
326
351
|
self.health_check_timer.daemon = True
|
@@ -339,7 +364,7 @@ class EmeraldHWS():
|
|
339
364
|
for heat_pump in heat_pumps:
|
340
365
|
if heat_pump['id'] == id:
|
341
366
|
heat_pump['last_state'][key] = value
|
342
|
-
|
367
|
+
|
343
368
|
# Call callback AFTER releasing lock to avoid potential deadlocks
|
344
369
|
if self.update_callback is not None:
|
345
370
|
self.update_callback()
|
@@ -348,25 +373,26 @@ class EmeraldHWS():
|
|
348
373
|
""" Subscribes to the MQTT topics for the supplied HWS
|
349
374
|
:param id: The UUID of the requested HWS
|
350
375
|
"""
|
351
|
-
|
352
|
-
self.
|
376
|
+
with self._mqtt_lock:
|
377
|
+
if not self.mqttClient:
|
378
|
+
self.connectMQTT()
|
353
379
|
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
|
380
|
+
mqtt_topic = "ep/heat_pump/from_gw/{}".format(id)
|
381
|
+
subscribe_future = self.mqttClient.subscribe(
|
382
|
+
subscribe_packet=mqtt5.SubscribePacket(
|
383
|
+
subscriptions=[mqtt5.Subscription(
|
384
|
+
topic_filter=mqtt_topic,
|
385
|
+
qos=mqtt5.QoS.AT_LEAST_ONCE)]))
|
360
386
|
|
361
|
-
|
362
|
-
|
387
|
+
# Wait for subscription to complete
|
388
|
+
subscribe_future.result(20)
|
363
389
|
|
364
390
|
def getFullStatus(self, id):
|
365
391
|
""" Returns a dict with the full status of the specified HWS
|
366
392
|
:param id: UUID of the HWS to get the status for
|
367
393
|
"""
|
368
394
|
|
369
|
-
if not self.
|
395
|
+
if not self._is_connected:
|
370
396
|
self.connect()
|
371
397
|
|
372
398
|
with self._state_lock:
|
@@ -383,7 +409,7 @@ class EmeraldHWS():
|
|
383
409
|
:param payload: JSON payload to send eg {"switch":1}
|
384
410
|
"""
|
385
411
|
|
386
|
-
if not self.
|
412
|
+
if not self._is_connected:
|
387
413
|
self.connect()
|
388
414
|
|
389
415
|
hwsdetail = self.getFullStatus(id)
|
@@ -401,11 +427,17 @@ class EmeraldHWS():
|
|
401
427
|
payload
|
402
428
|
]
|
403
429
|
mqtt_topic = "ep/heat_pump/to_gw/{}".format(id)
|
404
|
-
|
405
|
-
|
406
|
-
|
407
|
-
|
408
|
-
|
430
|
+
|
431
|
+
with self._mqtt_lock:
|
432
|
+
if not self.mqttClient:
|
433
|
+
raise Exception("MQTT client not connected")
|
434
|
+
publish_future = self.mqttClient.publish(
|
435
|
+
mqtt5.PublishPacket(
|
436
|
+
topic=mqtt_topic,
|
437
|
+
payload=json.dumps(msg),
|
438
|
+
qos=mqtt5.QoS.AT_LEAST_ONCE))
|
439
|
+
|
440
|
+
# Wait for publish to complete outside the lock
|
409
441
|
publish_future.result(20) # 20 seconds
|
410
442
|
|
411
443
|
def turnOn(self, id):
|
@@ -464,14 +496,35 @@ class EmeraldHWS():
|
|
464
496
|
work_state = full_status.get("last_state").get("work_state")
|
465
497
|
# work_state: 0=off/idle, 1=actively heating, 2=on but not heating
|
466
498
|
return (work_state == 1)
|
467
|
-
|
499
|
+
|
468
500
|
# Fallback to device_operation_status if work_state not available yet
|
469
501
|
# (e.g., before first MQTT update after initialization)
|
470
502
|
heating_status = full_status.get("device_operation_status")
|
471
503
|
return (heating_status == 1)
|
472
|
-
|
504
|
+
|
473
505
|
return False
|
474
506
|
|
507
|
+
def getHourlyEnergyUsage(self, id):
|
508
|
+
""" Returns energy usage as reported by heater for the previous hour in kWh and a string of format YYYY-MM-DD HH:00 dictating the starting hour for the energy reading
|
509
|
+
:param id: The UUID of the HWS to query
|
510
|
+
"""
|
511
|
+
full_status = self.getFullStatus(id)
|
512
|
+
if not full_status:
|
513
|
+
return None
|
514
|
+
|
515
|
+
consumption = full_status.get("consumption_data")
|
516
|
+
if consumption:
|
517
|
+
consumption = json.loads(consumption)
|
518
|
+
else:
|
519
|
+
return None
|
520
|
+
|
521
|
+
current_hour = consumption.get("current_hour")
|
522
|
+
last_data_at = consumption.get("last_data_at")
|
523
|
+
if current_hour is None or last_data_at is None:
|
524
|
+
return None
|
525
|
+
|
526
|
+
return current_hour, last_data_at
|
527
|
+
|
475
528
|
def currentMode(self, id):
|
476
529
|
""" Returns an integer specifying the current mode (0==boost, 1==normal, 2==quiet)
|
477
530
|
:param id: The UUID of the HWS to query
|
@@ -499,7 +552,7 @@ class EmeraldHWS():
|
|
499
552
|
def listHWS(self):
|
500
553
|
""" Returns a list of UUIDs of all discovered HWS
|
501
554
|
"""
|
502
|
-
if not self.
|
555
|
+
if not self._is_connected:
|
503
556
|
self.connect()
|
504
557
|
|
505
558
|
hws = []
|
@@ -526,7 +579,27 @@ class EmeraldHWS():
|
|
526
579
|
""" Connect to the API with the supplied credentials, retrieve HWS details
|
527
580
|
:returns: True if successful
|
528
581
|
"""
|
529
|
-
|
530
|
-
self.
|
531
|
-
|
532
|
-
|
582
|
+
# Use lock to ensure only one thread can connect at a time
|
583
|
+
with self._connect_lock:
|
584
|
+
# Double-check pattern: check again inside the lock
|
585
|
+
if self._is_connected:
|
586
|
+
self.logger.debug("emeraldhws: Already connected, skipping")
|
587
|
+
return
|
588
|
+
|
589
|
+
self.logger.debug("emeraldhws: Connecting...")
|
590
|
+
self.getLoginToken()
|
591
|
+
self.getAllHWS()
|
592
|
+
self.connectMQTT()
|
593
|
+
self.subscribeAllHWS()
|
594
|
+
self._is_connected = True
|
595
|
+
|
596
|
+
# Start timers ONCE on initial connection
|
597
|
+
if self.connection_timeout > 0:
|
598
|
+
self.reconnect_timer = threading.Timer(self.connection_timeout, self.scheduled_reconnect)
|
599
|
+
self.reconnect_timer.daemon = True
|
600
|
+
self.reconnect_timer.start()
|
601
|
+
|
602
|
+
if self.health_check_interval > 0:
|
603
|
+
self.health_check_timer = threading.Timer(self.health_check_interval, self.check_connection_health)
|
604
|
+
self.health_check_timer.daemon = True
|
605
|
+
self.health_check_timer.start()
|
@@ -0,0 +1,7 @@
|
|
1
|
+
emerald_hws/__init__.py,sha256=uukjQ-kiPYKWvGT3jLL6kJA1DCNAxtw4HlLKqPSypXs,61
|
2
|
+
emerald_hws/emeraldhws.py,sha256=_kW1CtCrhfUW7AXhzTdqO7TSSKQe_Oxw9q5v6qiPx_Q,26598
|
3
|
+
emerald_hws/__assets__/SFSRootCAG2.pem,sha256=hw9W0AnYrrlbcWsOewAgIl1ULEsoO57Ylu35dCjWcS4,1424
|
4
|
+
emerald_hws-0.0.18.dist-info/METADATA,sha256=gmRQmS3lp6IcJbb6jPCAYrn4sY7gY5GWln4a9x8VToY,2534
|
5
|
+
emerald_hws-0.0.18.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
6
|
+
emerald_hws-0.0.18.dist-info/top_level.txt,sha256=ZCiUmnBkDr2n4QVkTet1s_AKiGJjuz3heuCR5w5ZqLY,12
|
7
|
+
emerald_hws-0.0.18.dist-info/RECORD,,
|
@@ -1,7 +0,0 @@
|
|
1
|
-
emerald_hws/__init__.py,sha256=uukjQ-kiPYKWvGT3jLL6kJA1DCNAxtw4HlLKqPSypXs,61
|
2
|
-
emerald_hws/emeraldhws.py,sha256=in2F-kUyKXGQApvm00L2DH3CSXFj4H_1Vapnxkfa7xM,23416
|
3
|
-
emerald_hws/__assets__/SFSRootCAG2.pem,sha256=hw9W0AnYrrlbcWsOewAgIl1ULEsoO57Ylu35dCjWcS4,1424
|
4
|
-
emerald_hws-0.0.16.dist-info/METADATA,sha256=X2bALg477JBnh4JUPD8MaEgWJiEjyh0INb5QMbGiHxE,2534
|
5
|
-
emerald_hws-0.0.16.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
6
|
-
emerald_hws-0.0.16.dist-info/top_level.txt,sha256=ZCiUmnBkDr2n4QVkTet1s_AKiGJjuz3heuCR5w5ZqLY,12
|
7
|
-
emerald_hws-0.0.16.dist-info/RECORD,,
|
File without changes
|
File without changes
|