sfq 0.0.13__py3-none-any.whl → 0.0.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sfq/__init__.py +195 -519
- sfq/_cometd.py +297 -0
- {sfq-0.0.13.dist-info → sfq-0.0.15.dist-info}/METADATA +1 -1
- sfq-0.0.15.dist-info/RECORD +6 -0
- sfq-0.0.13.dist-info/RECORD +0 -5
- {sfq-0.0.13.dist-info → sfq-0.0.15.dist-info}/WHEEL +0 -0
sfq/__init__.py
CHANGED
@@ -7,8 +7,7 @@ import time
|
|
7
7
|
import warnings
|
8
8
|
from collections import OrderedDict
|
9
9
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
10
|
-
from
|
11
|
-
from typing import Any, Dict, Literal, Optional
|
10
|
+
from typing import Any, Dict, Literal, Optional, List, Tuple
|
12
11
|
from urllib.parse import quote, urlparse
|
13
12
|
|
14
13
|
TRACE = 5
|
@@ -31,6 +30,7 @@ def trace(self: logging.Logger, message: str, *args: Any, **kwargs: Any) -> None
|
|
31
30
|
"set-cookie",
|
32
31
|
"cookie",
|
33
32
|
"refresh_token",
|
33
|
+
"client_secret",
|
34
34
|
]
|
35
35
|
if isinstance(data, dict):
|
36
36
|
return {
|
@@ -81,14 +81,15 @@ class SFAuth:
|
|
81
81
|
self,
|
82
82
|
instance_url: str,
|
83
83
|
client_id: str,
|
84
|
-
refresh_token: str,
|
84
|
+
refresh_token: str, # client_secret & refresh_token will swap positions 2025-AUG-1
|
85
85
|
client_secret: str = "_deprecation_warning", # mandatory after 2025-AUG-1
|
86
86
|
api_version: str = "v63.0",
|
87
87
|
token_endpoint: str = "/services/oauth2/token",
|
88
88
|
access_token: Optional[str] = None,
|
89
89
|
token_expiration_time: Optional[float] = None,
|
90
90
|
token_lifetime: int = 15 * 60,
|
91
|
-
user_agent: str = "sfq/0.0.
|
91
|
+
user_agent: str = "sfq/0.0.15",
|
92
|
+
sforce_client: str = "_auto",
|
92
93
|
proxy: str = "auto",
|
93
94
|
) -> None:
|
94
95
|
"""
|
@@ -103,7 +104,8 @@ class SFAuth:
|
|
103
104
|
:param access_token: The access token for the current session (default is None).
|
104
105
|
:param token_expiration_time: The expiration time of the access token (default is None).
|
105
106
|
:param token_lifetime: The lifetime of the access token in seconds (default is 15 minutes).
|
106
|
-
:param user_agent: Custom User-Agent string (default is "sfq/0.0.
|
107
|
+
:param user_agent: Custom User-Agent string (default is "sfq/0.0.15").
|
108
|
+
:param sforce_client: Custom Application Identifier (default is user_agent).
|
107
109
|
:param proxy: The proxy configuration, "auto" to use environment (default is "auto").
|
108
110
|
"""
|
109
111
|
self.instance_url = self._format_instance_url(instance_url)
|
@@ -116,9 +118,13 @@ class SFAuth:
|
|
116
118
|
self.token_expiration_time = token_expiration_time
|
117
119
|
self.token_lifetime = token_lifetime
|
118
120
|
self.user_agent = user_agent
|
121
|
+
self.sforce_client = sforce_client
|
119
122
|
self._auto_configure_proxy(proxy)
|
120
123
|
self._high_api_usage_threshold = 80
|
121
124
|
|
125
|
+
if sforce_client == "_auto":
|
126
|
+
self.sforce_client = user_agent
|
127
|
+
|
122
128
|
if self.client_secret == "_deprecation_warning":
|
123
129
|
warnings.warn(
|
124
130
|
"The 'client_secret' parameter will be mandatory and positional arguments will change after 1 August 2025. "
|
@@ -132,7 +138,6 @@ class SFAuth:
|
|
132
138
|
)
|
133
139
|
|
134
140
|
def _format_instance_url(self, instance_url) -> str:
|
135
|
-
# check if it begins with https://
|
136
141
|
if instance_url.startswith("https://"):
|
137
142
|
return instance_url
|
138
143
|
if instance_url.startswith("http://"):
|
@@ -199,48 +204,73 @@ class SFAuth:
|
|
199
204
|
logger.trace("Direct connection to %s", netloc)
|
200
205
|
return conn
|
201
206
|
|
202
|
-
def
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
:
|
207
|
-
:
|
207
|
+
def _send_request(
|
208
|
+
self,
|
209
|
+
method: str,
|
210
|
+
endpoint: str,
|
211
|
+
headers: Dict[str, str],
|
212
|
+
body: Optional[str] = None,
|
213
|
+
timeout: Optional[int] = None,
|
214
|
+
) -> Tuple[Optional[int], Optional[str]]:
|
215
|
+
"""
|
216
|
+
Unified request method with built-in logging and error handling.
|
217
|
+
|
218
|
+
:param method: HTTP method to use.
|
219
|
+
:param endpoint: Target API endpoint.
|
220
|
+
:param headers: HTTP headers.
|
221
|
+
:param body: Optional request body.
|
222
|
+
:param timeout: Optional timeout in seconds.
|
223
|
+
:return: Tuple of HTTP status code and response body as a string.
|
208
224
|
"""
|
209
225
|
parsed_url = urlparse(self.instance_url)
|
210
226
|
conn = self._create_connection(parsed_url.netloc)
|
211
|
-
headers = {
|
212
|
-
"Accept": "application/json",
|
213
|
-
"Content-Type": "application/x-www-form-urlencoded",
|
214
|
-
"User-Agent": self.user_agent,
|
215
|
-
}
|
216
|
-
body = "&".join(f"{key}={quote(str(value))}" for key, value in payload.items())
|
217
227
|
|
218
228
|
try:
|
219
|
-
logger.trace("Request
|
220
|
-
logger.trace("Request
|
229
|
+
logger.trace("Request method: %s", method)
|
230
|
+
logger.trace("Request endpoint: %s", endpoint)
|
221
231
|
logger.trace("Request headers: %s", headers)
|
222
|
-
|
232
|
+
if body:
|
233
|
+
logger.trace("Request body: %s", body)
|
234
|
+
|
235
|
+
conn.request(method, endpoint, body=body, headers=headers)
|
223
236
|
response = conn.getresponse()
|
224
|
-
data = response.read().decode("utf-8")
|
225
237
|
self._http_resp_header_logic(response)
|
226
238
|
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
logger.error(
|
233
|
-
"Token refresh failed: %s %s", response.status, response.reason
|
234
|
-
)
|
235
|
-
logger.debug("Response body: %s", data)
|
239
|
+
data = response.read().decode("utf-8")
|
240
|
+
logger.trace("Response status: %s", response.status)
|
241
|
+
logger.trace("Response body: %s", data)
|
242
|
+
return response.status, data
|
236
243
|
|
237
244
|
except Exception as err:
|
238
|
-
logger.exception("
|
245
|
+
logger.exception("HTTP request failed: %s", err)
|
246
|
+
return None, None
|
239
247
|
|
240
248
|
finally:
|
241
|
-
logger.trace("Closing connection
|
249
|
+
logger.trace("Closing connection...")
|
242
250
|
conn.close()
|
243
251
|
|
252
|
+
def _new_token_request(self, payload: Dict[str, str]) -> Optional[Dict[str, Any]]:
|
253
|
+
"""
|
254
|
+
Perform a new token request using the provided payload.
|
255
|
+
|
256
|
+
:param payload: Payload for the token request.
|
257
|
+
:return: Parsed JSON response or None on failure.
|
258
|
+
"""
|
259
|
+
headers = self._get_common_headers()
|
260
|
+
headers["Content-Type"] = "application/x-www-form-urlencoded"
|
261
|
+
del headers["Authorization"]
|
262
|
+
|
263
|
+
body = "&".join(f"{key}={quote(str(value))}" for key, value in payload.items())
|
264
|
+
status, data = self._send_request("POST", self.token_endpoint, headers, body)
|
265
|
+
|
266
|
+
if status == 200:
|
267
|
+
logger.trace("Token refresh successful.")
|
268
|
+
return json.loads(data)
|
269
|
+
|
270
|
+
if status:
|
271
|
+
logger.error("Token refresh failed: %s", status)
|
272
|
+
logger.debug("Response body: %s", data)
|
273
|
+
|
244
274
|
return None
|
245
275
|
|
246
276
|
def _http_resp_header_logic(self, response: http.client.HTTPResponse) -> None:
|
@@ -313,6 +343,26 @@ class SFAuth:
|
|
313
343
|
logger.error("Failed to obtain access token.")
|
314
344
|
return None
|
315
345
|
|
346
|
+
def _get_common_headers(self) -> Dict[str, str]:
|
347
|
+
"""
|
348
|
+
Generate common headers for API requests.
|
349
|
+
|
350
|
+
:return: A dictionary of common headers.
|
351
|
+
"""
|
352
|
+
if not self.access_token and self.token_expiration_time is None:
|
353
|
+
self.token_expiration_time = int(time.time())
|
354
|
+
self._refresh_token_if_needed()
|
355
|
+
|
356
|
+
return {
|
357
|
+
"Authorization": f"Bearer {self.access_token}",
|
358
|
+
"User-Agent": self.user_agent,
|
359
|
+
"Sforce-Call-Options": f"client={self.sforce_client}",
|
360
|
+
"Accept": "application/json",
|
361
|
+
"Content-Type": "application/json",
|
362
|
+
}
|
363
|
+
|
364
|
+
|
365
|
+
|
316
366
|
def _is_token_expired(self) -> bool:
|
317
367
|
"""
|
318
368
|
Check if the access token has expired.
|
@@ -362,51 +412,15 @@ class SFAuth:
|
|
362
412
|
:param resource_id: ID of the static resource to read.
|
363
413
|
:return: Static resource content or None on failure.
|
364
414
|
"""
|
365
|
-
self._refresh_token_if_needed()
|
366
|
-
|
367
|
-
if not self.access_token:
|
368
|
-
logger.error("No access token available for limits.")
|
369
|
-
return None
|
370
|
-
|
371
415
|
endpoint = f"/services/data/{self.api_version}/sobjects/StaticResource/{resource_id}/Body"
|
372
|
-
headers =
|
373
|
-
|
374
|
-
"User-Agent": self.user_agent,
|
375
|
-
"Accept": "application/json",
|
376
|
-
}
|
377
|
-
|
378
|
-
parsed_url = urlparse(self.instance_url)
|
379
|
-
conn = self._create_connection(parsed_url.netloc)
|
380
|
-
|
381
|
-
try:
|
382
|
-
logger.trace("Request endpoint: %s", endpoint)
|
383
|
-
logger.trace("Request headers: %s", headers)
|
384
|
-
conn.request("GET", endpoint, headers=headers)
|
385
|
-
response = conn.getresponse()
|
386
|
-
data = response.read().decode("utf-8")
|
387
|
-
self._http_resp_header_logic(response)
|
388
|
-
|
389
|
-
if response.status == 200:
|
390
|
-
logger.debug("Get Static Resource Body API request successful.")
|
391
|
-
logger.trace("Response body: %s", data)
|
392
|
-
return data
|
393
|
-
|
394
|
-
logger.error(
|
395
|
-
"Get Static Resource Body API request failed: %s %s",
|
396
|
-
response.status,
|
397
|
-
response.reason,
|
398
|
-
)
|
399
|
-
logger.debug("Response body: %s", data)
|
416
|
+
headers = self._get_common_headers()
|
417
|
+
status, data = self._send_request("GET", endpoint, headers)
|
400
418
|
|
401
|
-
|
402
|
-
logger.
|
403
|
-
|
404
|
-
)
|
405
|
-
|
406
|
-
finally:
|
407
|
-
logger.trace("Closing connection...")
|
408
|
-
conn.close()
|
419
|
+
if status == 200:
|
420
|
+
logger.debug("Static resource fetched successfully.")
|
421
|
+
return data
|
409
422
|
|
423
|
+
logger.error("Failed to fetch static resource: %s", status)
|
410
424
|
return None
|
411
425
|
|
412
426
|
def update_static_resource_name(
|
@@ -453,109 +467,48 @@ class SFAuth:
|
|
453
467
|
:param data: Content to update the static resource with.
|
454
468
|
:return: Parsed JSON response or None on failure.
|
455
469
|
"""
|
456
|
-
|
457
|
-
|
458
|
-
if not self.access_token:
|
459
|
-
logger.error("No access token available for limits.")
|
460
|
-
return None
|
461
|
-
|
462
|
-
payload = {"Body": base64.b64encode(data.encode("utf-8"))}
|
470
|
+
payload = {"Body": base64.b64encode(data.encode("utf-8")).decode("utf-8")}
|
463
471
|
|
464
472
|
endpoint = (
|
465
473
|
f"/services/data/{self.api_version}/sobjects/StaticResource/{resource_id}"
|
466
474
|
)
|
467
|
-
headers =
|
468
|
-
"Authorization": f"Bearer {self.access_token}",
|
469
|
-
"User-Agent": self.user_agent,
|
470
|
-
"Content-Type": "application/json",
|
471
|
-
"Accept": "application/json",
|
472
|
-
}
|
473
|
-
|
474
|
-
parsed_url = urlparse(self.instance_url)
|
475
|
-
conn = self._create_connection(parsed_url.netloc)
|
476
|
-
|
477
|
-
try:
|
478
|
-
logger.trace("Request endpoint: %s", endpoint)
|
479
|
-
logger.trace("Request headers: %s", headers)
|
480
|
-
logger.trace("Request payload: %s", payload)
|
481
|
-
conn.request(
|
482
|
-
"PATCH",
|
483
|
-
endpoint,
|
484
|
-
headers=headers,
|
485
|
-
body=json.dumps(payload, default=lambda x: x.decode("utf-8")),
|
486
|
-
)
|
487
|
-
response = conn.getresponse()
|
488
|
-
data = response.read().decode("utf-8")
|
489
|
-
self._http_resp_header_logic(response)
|
490
|
-
|
491
|
-
if response.status == 200:
|
492
|
-
logger.debug("Patch Static Resource request successful.")
|
493
|
-
logger.trace("Response body: %s", data)
|
494
|
-
return json.loads(data)
|
475
|
+
headers = self._get_common_headers()
|
495
476
|
|
496
|
-
|
497
|
-
|
498
|
-
|
499
|
-
|
500
|
-
)
|
501
|
-
|
477
|
+
status_code, response_data = self._send_request(
|
478
|
+
method="PATCH",
|
479
|
+
endpoint=endpoint,
|
480
|
+
headers=headers,
|
481
|
+
body=json.dumps(payload),
|
482
|
+
)
|
502
483
|
|
503
|
-
|
504
|
-
logger.
|
484
|
+
if status_code == 200:
|
485
|
+
logger.debug("Patch Static Resource request successful.")
|
486
|
+
return json.loads(response_data)
|
505
487
|
|
506
|
-
|
507
|
-
|
508
|
-
|
488
|
+
logger.error(
|
489
|
+
"Patch Static Resource API request failed: %s",
|
490
|
+
status_code,
|
491
|
+
)
|
492
|
+
logger.debug("Response body: %s", response_data)
|
509
493
|
|
510
494
|
return None
|
511
495
|
|
512
496
|
def limits(self) -> Optional[Dict[str, Any]]:
|
513
497
|
"""
|
514
|
-
|
498
|
+
Fetch the current limits for the Salesforce instance.
|
515
499
|
|
516
500
|
:return: Parsed JSON response or None on failure.
|
517
501
|
"""
|
518
|
-
self._refresh_token_if_needed()
|
519
|
-
|
520
|
-
if not self.access_token:
|
521
|
-
logger.error("No access token available for limits.")
|
522
|
-
return None
|
523
|
-
|
524
502
|
endpoint = f"/services/data/{self.api_version}/limits"
|
525
|
-
headers =
|
526
|
-
"Authorization": f"Bearer {self.access_token}",
|
527
|
-
"User-Agent": self.user_agent,
|
528
|
-
"Accept": "application/json",
|
529
|
-
}
|
530
|
-
|
531
|
-
parsed_url = urlparse(self.instance_url)
|
532
|
-
conn = self._create_connection(parsed_url.netloc)
|
533
|
-
|
534
|
-
try:
|
535
|
-
logger.trace("Request endpoint: %s", endpoint)
|
536
|
-
logger.trace("Request headers: %s", headers)
|
537
|
-
conn.request("GET", endpoint, headers=headers)
|
538
|
-
response = conn.getresponse()
|
539
|
-
data = response.read().decode("utf-8")
|
540
|
-
self._http_resp_header_logic(response)
|
541
|
-
|
542
|
-
if response.status == 200:
|
543
|
-
logger.debug("Limits API request successful.")
|
544
|
-
logger.trace("Response body: %s", data)
|
545
|
-
return json.loads(data)
|
546
|
-
|
547
|
-
logger.error(
|
548
|
-
"Limits API request failed: %s %s", response.status, response.reason
|
549
|
-
)
|
550
|
-
logger.debug("Response body: %s", data)
|
503
|
+
headers = self._get_common_headers()
|
551
504
|
|
552
|
-
|
553
|
-
logger.exception("Error during limits request: %s", err)
|
505
|
+
status, data = self._send_request("GET", endpoint, headers)
|
554
506
|
|
555
|
-
|
556
|
-
logger.debug("
|
557
|
-
|
507
|
+
if status == 200:
|
508
|
+
logger.debug("Limits fetched successfully.")
|
509
|
+
return json.loads(data)
|
558
510
|
|
511
|
+
logger.error("Failed to fetch limits: %s", status)
|
559
512
|
return None
|
560
513
|
|
561
514
|
def query(self, query: str, tooling: bool = False) -> Optional[Dict[str, Any]]:
|
@@ -566,38 +519,27 @@ class SFAuth:
|
|
566
519
|
:param tooling: If True, use the Tooling API endpoint.
|
567
520
|
:return: Parsed JSON response or None on failure.
|
568
521
|
"""
|
569
|
-
self._refresh_token_if_needed()
|
570
|
-
|
571
|
-
if not self.access_token:
|
572
|
-
logger.error("No access token available for query.")
|
573
|
-
return None
|
574
|
-
|
575
522
|
endpoint = f"/services/data/{self.api_version}/"
|
576
523
|
endpoint += "tooling/query" if tooling else "query"
|
577
524
|
query_string = f"?q={quote(query)}"
|
578
|
-
|
579
525
|
endpoint += query_string
|
526
|
+
headers = self._get_common_headers()
|
580
527
|
|
581
|
-
|
582
|
-
"Authorization": f"Bearer {self.access_token}",
|
583
|
-
"User-Agent": self.user_agent,
|
584
|
-
"Accept": "application/json",
|
585
|
-
}
|
586
|
-
|
587
|
-
parsed_url = urlparse(self.instance_url)
|
588
|
-
conn = self._create_connection(parsed_url.netloc)
|
528
|
+
paginated_results = {"totalSize": 0, "done": False, "records": []}
|
589
529
|
|
590
530
|
try:
|
591
|
-
paginated_results = {"totalSize": 0, "done": False, "records": []}
|
592
531
|
while True:
|
593
532
|
logger.trace("Request endpoint: %s", endpoint)
|
594
533
|
logger.trace("Request headers: %s", headers)
|
595
|
-
|
596
|
-
|
597
|
-
data =
|
598
|
-
|
534
|
+
headers = self._get_common_headers() # handle refresh token
|
535
|
+
|
536
|
+
status_code, data = self._send_request(
|
537
|
+
method="GET",
|
538
|
+
endpoint=endpoint,
|
539
|
+
headers=headers,
|
540
|
+
)
|
599
541
|
|
600
|
-
if
|
542
|
+
if status_code == 200:
|
601
543
|
current_results = json.loads(data)
|
602
544
|
paginated_results["records"].extend(current_results["records"])
|
603
545
|
query_done = current_results.get("done")
|
@@ -622,9 +564,8 @@ class SFAuth:
|
|
622
564
|
else:
|
623
565
|
logger.debug("Query failed: %r", query)
|
624
566
|
logger.error(
|
625
|
-
"Query failed with HTTP status %s
|
626
|
-
|
627
|
-
response.reason,
|
567
|
+
"Query failed with HTTP status %s",
|
568
|
+
status_code,
|
628
569
|
)
|
629
570
|
logger.debug("Query response: %s", data)
|
630
571
|
break
|
@@ -634,10 +575,6 @@ class SFAuth:
|
|
634
575
|
except Exception as err:
|
635
576
|
logger.exception("Exception during query: %s", err)
|
636
577
|
|
637
|
-
finally:
|
638
|
-
logger.trace("Closing connection...")
|
639
|
-
conn.close()
|
640
|
-
|
641
578
|
return None
|
642
579
|
|
643
580
|
def tooling_query(self, query: str) -> Optional[Dict[str, Any]]:
|
@@ -667,32 +604,22 @@ class SFAuth:
|
|
667
604
|
)
|
668
605
|
return None
|
669
606
|
|
670
|
-
self._refresh_token_if_needed()
|
671
|
-
|
672
|
-
if not self.access_token:
|
673
|
-
logger.error("No access token available for key prefixes.")
|
674
|
-
return None
|
675
|
-
|
676
607
|
endpoint = f"/services/data/{self.api_version}/sobjects/"
|
677
|
-
headers =
|
678
|
-
"Authorization": f"Bearer {self.access_token}",
|
679
|
-
"User-Agent": self.user_agent,
|
680
|
-
"Accept": "application/json",
|
681
|
-
}
|
608
|
+
headers = self._get_common_headers()
|
682
609
|
|
683
|
-
parsed_url = urlparse(self.instance_url)
|
684
|
-
conn = self._create_connection(parsed_url.netloc)
|
685
610
|
prefixes = {}
|
686
611
|
|
687
612
|
try:
|
688
613
|
logger.trace("Request endpoint: %s", endpoint)
|
689
614
|
logger.trace("Request headers: %s", headers)
|
690
|
-
conn.request("GET", endpoint, headers=headers)
|
691
|
-
response = conn.getresponse()
|
692
|
-
data = response.read().decode("utf-8")
|
693
|
-
self._http_resp_header_logic(response)
|
694
615
|
|
695
|
-
|
616
|
+
status_code, data = self._send_request(
|
617
|
+
method="GET",
|
618
|
+
endpoint=endpoint,
|
619
|
+
headers=headers,
|
620
|
+
)
|
621
|
+
|
622
|
+
if status_code == 200:
|
696
623
|
logger.debug("Key prefixes API request successful.")
|
697
624
|
logger.trace("Response body: %s", data)
|
698
625
|
for sobject in json.loads(data)["sobjects"]:
|
@@ -710,19 +637,14 @@ class SFAuth:
|
|
710
637
|
return prefixes
|
711
638
|
|
712
639
|
logger.error(
|
713
|
-
"Key prefixes API request failed: %s
|
714
|
-
|
715
|
-
response.reason,
|
640
|
+
"Key prefixes API request failed: %s",
|
641
|
+
status_code,
|
716
642
|
)
|
717
643
|
logger.debug("Response body: %s", data)
|
718
644
|
|
719
645
|
except Exception as err:
|
720
646
|
logger.exception("Exception during key prefixes API request: %s", err)
|
721
647
|
|
722
|
-
finally:
|
723
|
-
logger.trace("Closing connection...")
|
724
|
-
conn.close()
|
725
|
-
|
726
648
|
return None
|
727
649
|
|
728
650
|
def cquery(
|
@@ -742,20 +664,9 @@ class SFAuth:
|
|
742
664
|
logger.warning("No queries to execute.")
|
743
665
|
return None
|
744
666
|
|
745
|
-
self._refresh_token_if_needed()
|
746
|
-
|
747
|
-
if not self.access_token:
|
748
|
-
logger.error("No access token available for query.")
|
749
|
-
return None
|
750
|
-
|
751
667
|
def _execute_batch(queries_batch):
|
752
668
|
endpoint = f"/services/data/{self.api_version}/composite/batch"
|
753
|
-
headers =
|
754
|
-
"Authorization": f"Bearer {self.access_token}",
|
755
|
-
"User-Agent": self.user_agent,
|
756
|
-
"Accept": "application/json",
|
757
|
-
"Content-Type": "application/json",
|
758
|
-
}
|
669
|
+
headers = self._get_common_headers()
|
759
670
|
|
760
671
|
payload = {
|
761
672
|
"haltOnError": False,
|
@@ -768,75 +679,65 @@ class SFAuth:
|
|
768
679
|
],
|
769
680
|
}
|
770
681
|
|
771
|
-
|
772
|
-
|
773
|
-
|
682
|
+
status_code, data = self._send_request(
|
683
|
+
method="POST",
|
684
|
+
endpoint=endpoint,
|
685
|
+
headers=headers,
|
686
|
+
body=json.dumps(payload),
|
687
|
+
)
|
774
688
|
|
775
|
-
|
776
|
-
|
777
|
-
logger.
|
778
|
-
logger.trace("
|
779
|
-
|
780
|
-
|
781
|
-
|
782
|
-
|
783
|
-
|
784
|
-
|
785
|
-
|
786
|
-
|
787
|
-
|
788
|
-
|
789
|
-
|
790
|
-
|
791
|
-
|
792
|
-
|
793
|
-
records.extend(result["result"]["records"])
|
794
|
-
# Handle pagination
|
795
|
-
while not result["result"].get("done", True):
|
796
|
-
next_url = result["result"].get("nextRecordsUrl")
|
797
|
-
if next_url:
|
798
|
-
conn.request("GET", next_url, headers=headers)
|
799
|
-
response = conn.getresponse()
|
800
|
-
data = response.read().decode("utf-8")
|
801
|
-
self._http_resp_header_logic(response)
|
802
|
-
if response.status == 200:
|
803
|
-
next_results = json.loads(data)
|
804
|
-
records.extend(next_results.get("records", []))
|
805
|
-
result["result"]["done"] = next_results.get("done")
|
806
|
-
else:
|
807
|
-
logger.error(
|
808
|
-
"Failed to fetch next records: %s",
|
809
|
-
response.reason,
|
810
|
-
)
|
811
|
-
break
|
812
|
-
else:
|
813
|
-
result["result"]["done"] = True
|
814
|
-
paginated_results = result["result"]
|
815
|
-
paginated_results["records"] = records
|
816
|
-
if "nextRecordsUrl" in paginated_results:
|
817
|
-
del paginated_results["nextRecordsUrl"]
|
818
|
-
batch_results[keys[i]] = paginated_results
|
819
|
-
if result.get("statusCode") != 200:
|
820
|
-
logger.error("Query failed for key %s: %s", keys[i], result)
|
821
|
-
logger.error(
|
822
|
-
"Query failed with HTTP status %s (%s)",
|
823
|
-
result.get("statusCode"),
|
824
|
-
result.get("statusMessage"),
|
689
|
+
batch_results = {}
|
690
|
+
if status_code == 200:
|
691
|
+
logger.debug("Composite query successful.")
|
692
|
+
logger.trace("Composite query full response: %s", data)
|
693
|
+
results = json.loads(data).get("results", [])
|
694
|
+
for i, result in enumerate(results):
|
695
|
+
records = []
|
696
|
+
if "result" in result and "records" in result["result"]:
|
697
|
+
records.extend(result["result"]["records"])
|
698
|
+
# Handle pagination
|
699
|
+
while not result["result"].get("done", True):
|
700
|
+
headers = self._get_common_headers() # handles token refresh
|
701
|
+
next_url = result["result"].get("nextRecordsUrl")
|
702
|
+
if next_url:
|
703
|
+
status_code, next_data = self._send_request(
|
704
|
+
method="GET",
|
705
|
+
endpoint=next_url,
|
706
|
+
headers=headers,
|
825
707
|
)
|
826
|
-
|
827
|
-
|
828
|
-
|
829
|
-
|
830
|
-
|
831
|
-
|
832
|
-
|
833
|
-
|
834
|
-
|
835
|
-
|
836
|
-
|
837
|
-
|
838
|
-
|
839
|
-
|
708
|
+
if status_code == 200:
|
709
|
+
next_results = json.loads(next_data)
|
710
|
+
records.extend(next_results.get("records", []))
|
711
|
+
result["result"]["done"] = next_results.get("done")
|
712
|
+
else:
|
713
|
+
logger.error(
|
714
|
+
"Failed to fetch next records: %s",
|
715
|
+
next_data,
|
716
|
+
)
|
717
|
+
break
|
718
|
+
else:
|
719
|
+
result["result"]["done"] = True
|
720
|
+
paginated_results = result["result"]
|
721
|
+
paginated_results["records"] = records
|
722
|
+
if "nextRecordsUrl" in paginated_results:
|
723
|
+
del paginated_results["nextRecordsUrl"]
|
724
|
+
batch_results[keys[i]] = paginated_results
|
725
|
+
if result.get("statusCode") != 200:
|
726
|
+
logger.error("Query failed for key %s: %s", keys[i], result)
|
727
|
+
logger.error(
|
728
|
+
"Query failed with HTTP status %s (%s)",
|
729
|
+
result.get("statusCode"),
|
730
|
+
result.get("statusMessage"),
|
731
|
+
)
|
732
|
+
logger.trace("Query response: %s", result)
|
733
|
+
else:
|
734
|
+
logger.error(
|
735
|
+
"Composite query failed with HTTP status %s (%s)",
|
736
|
+
status_code,
|
737
|
+
data,
|
738
|
+
)
|
739
|
+
batch_results[keys[i]] = data
|
740
|
+
logger.trace("Composite query response: %s", data)
|
840
741
|
|
841
742
|
return batch_results
|
842
743
|
|
@@ -845,8 +746,9 @@ class SFAuth:
|
|
845
746
|
|
846
747
|
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
847
748
|
futures = []
|
848
|
-
|
849
|
-
|
749
|
+
BATCH_SIZE = 25
|
750
|
+
for i in range(0, len(keys), BATCH_SIZE):
|
751
|
+
batch_keys = keys[i : i + BATCH_SIZE]
|
850
752
|
batch_queries = [query_dict[key] for key in batch_keys]
|
851
753
|
futures.append(executor.submit(_execute_batch, batch_queries))
|
852
754
|
|
@@ -855,229 +757,3 @@ class SFAuth:
|
|
855
757
|
|
856
758
|
logger.trace("Composite query results: %s", results_dict)
|
857
759
|
return results_dict
|
858
|
-
|
859
|
-
def _reconnect_with_backoff(self, attempt: int) -> None:
|
860
|
-
wait_time = min(2**attempt, 60)
|
861
|
-
logger.warning(
|
862
|
-
f"Reconnecting after failure, backoff {wait_time}s (attempt {attempt})"
|
863
|
-
)
|
864
|
-
time.sleep(wait_time)
|
865
|
-
|
866
|
-
def _subscribe_topic(
|
867
|
-
self,
|
868
|
-
topic: str,
|
869
|
-
queue_timeout: int = 90,
|
870
|
-
max_runtime: Optional[int] = None,
|
871
|
-
):
|
872
|
-
"""
|
873
|
-
Yields events from a subscribed Salesforce CometD topic.
|
874
|
-
|
875
|
-
:param topic: Topic to subscribe to, e.g. '/event/MyEvent__e'
|
876
|
-
:param queue_timeout: Seconds to wait for a message before logging heartbeat
|
877
|
-
:param max_runtime: Max total time to listen in seconds (None = unlimited)
|
878
|
-
"""
|
879
|
-
warnings.warn(
|
880
|
-
"The _subscribe_topic method is experimental and subject to change in future versions.",
|
881
|
-
ExperimentalWarning,
|
882
|
-
stacklevel=2,
|
883
|
-
)
|
884
|
-
|
885
|
-
self._refresh_token_if_needed()
|
886
|
-
self._msg_count: int = 0
|
887
|
-
|
888
|
-
if not self.access_token:
|
889
|
-
logger.error("No access token available for event stream.")
|
890
|
-
return
|
891
|
-
|
892
|
-
start_time = time.time()
|
893
|
-
message_queue = Queue()
|
894
|
-
headers = {
|
895
|
-
"Authorization": f"Bearer {self.access_token}",
|
896
|
-
"Content-Type": "application/json",
|
897
|
-
"Accept": "application/json",
|
898
|
-
"User-Agent": self.user_agent,
|
899
|
-
}
|
900
|
-
|
901
|
-
parsed_url = urlparse(self.instance_url)
|
902
|
-
conn = self._create_connection(parsed_url.netloc)
|
903
|
-
_API_VERSION = str(self.api_version).removeprefix("v")
|
904
|
-
client_id = str()
|
905
|
-
|
906
|
-
try:
|
907
|
-
logger.trace("Starting handshake with Salesforce CometD server.")
|
908
|
-
handshake_payload = json.dumps(
|
909
|
-
{
|
910
|
-
"id": str(self._msg_count + 1),
|
911
|
-
"version": "1.0",
|
912
|
-
"minimumVersion": "1.0",
|
913
|
-
"channel": "/meta/handshake",
|
914
|
-
"supportedConnectionTypes": ["long-polling"],
|
915
|
-
"advice": {"timeout": 60000, "interval": 0},
|
916
|
-
}
|
917
|
-
)
|
918
|
-
conn.request(
|
919
|
-
"POST",
|
920
|
-
f"/cometd/{_API_VERSION}/meta/handshake",
|
921
|
-
headers=headers,
|
922
|
-
body=handshake_payload,
|
923
|
-
)
|
924
|
-
response = conn.getresponse()
|
925
|
-
self._http_resp_header_logic(response)
|
926
|
-
|
927
|
-
logger.trace("Received handshake response.")
|
928
|
-
for name, value in response.getheaders():
|
929
|
-
if name.lower() == "set-cookie" and "BAYEUX_BROWSER=" in value:
|
930
|
-
_bayeux_browser_cookie = value.split("BAYEUX_BROWSER=")[1].split(
|
931
|
-
";"
|
932
|
-
)[0]
|
933
|
-
headers["Cookie"] = f"BAYEUX_BROWSER={_bayeux_browser_cookie}"
|
934
|
-
break
|
935
|
-
|
936
|
-
data = json.loads(response.read().decode("utf-8"))
|
937
|
-
if not data or not data[0].get("successful"):
|
938
|
-
logger.error("Handshake failed: %s", data)
|
939
|
-
return
|
940
|
-
|
941
|
-
client_id = data[0]["clientId"]
|
942
|
-
logger.trace(f"Handshake successful, client ID: {client_id}")
|
943
|
-
|
944
|
-
logger.trace(f"Subscribing to topic: {topic}")
|
945
|
-
subscribe_message = {
|
946
|
-
"channel": "/meta/subscribe",
|
947
|
-
"clientId": client_id,
|
948
|
-
"subscription": topic,
|
949
|
-
"id": str(self._msg_count + 1),
|
950
|
-
}
|
951
|
-
conn.request(
|
952
|
-
"POST",
|
953
|
-
f"/cometd/{_API_VERSION}/meta/subscribe",
|
954
|
-
headers=headers,
|
955
|
-
body=json.dumps(subscribe_message),
|
956
|
-
)
|
957
|
-
response = conn.getresponse()
|
958
|
-
self._http_resp_header_logic(response)
|
959
|
-
|
960
|
-
sub_response = json.loads(response.read().decode("utf-8"))
|
961
|
-
if not sub_response or not sub_response[0].get("successful"):
|
962
|
-
logger.error("Subscription failed: %s", sub_response)
|
963
|
-
return
|
964
|
-
|
965
|
-
logger.info(f"Successfully subscribed to topic: {topic}")
|
966
|
-
logger.trace("Entering event polling loop.")
|
967
|
-
|
968
|
-
try:
|
969
|
-
while True:
|
970
|
-
if max_runtime and (time.time() - start_time > max_runtime):
|
971
|
-
logger.info(
|
972
|
-
f"Disconnecting after max_runtime={max_runtime} seconds"
|
973
|
-
)
|
974
|
-
break
|
975
|
-
|
976
|
-
logger.trace("Sending connection message.")
|
977
|
-
connect_payload = json.dumps(
|
978
|
-
[
|
979
|
-
{
|
980
|
-
"channel": "/meta/connect",
|
981
|
-
"clientId": client_id,
|
982
|
-
"connectionType": "long-polling",
|
983
|
-
"id": str(self._msg_count + 1),
|
984
|
-
}
|
985
|
-
]
|
986
|
-
)
|
987
|
-
|
988
|
-
max_retries = 5
|
989
|
-
attempt = 0
|
990
|
-
|
991
|
-
while attempt < max_retries:
|
992
|
-
try:
|
993
|
-
conn.request(
|
994
|
-
"POST",
|
995
|
-
f"/cometd/{_API_VERSION}/meta/connect",
|
996
|
-
headers=headers,
|
997
|
-
body=connect_payload,
|
998
|
-
)
|
999
|
-
response = conn.getresponse()
|
1000
|
-
self._http_resp_header_logic(response)
|
1001
|
-
self._msg_count += 1
|
1002
|
-
|
1003
|
-
events = json.loads(response.read().decode("utf-8"))
|
1004
|
-
for event in events:
|
1005
|
-
if event.get("channel") == topic and "data" in event:
|
1006
|
-
logger.trace(
|
1007
|
-
f"Event received for topic {topic}, data: {event['data']}"
|
1008
|
-
)
|
1009
|
-
message_queue.put(event)
|
1010
|
-
break
|
1011
|
-
except (
|
1012
|
-
http.client.RemoteDisconnected,
|
1013
|
-
ConnectionResetError,
|
1014
|
-
TimeoutError,
|
1015
|
-
http.client.BadStatusLine,
|
1016
|
-
http.client.CannotSendRequest,
|
1017
|
-
ConnectionAbortedError,
|
1018
|
-
ConnectionRefusedError,
|
1019
|
-
ConnectionError,
|
1020
|
-
) as e:
|
1021
|
-
logger.warning(
|
1022
|
-
f"Connection error (attempt {attempt + 1}): {e}"
|
1023
|
-
)
|
1024
|
-
conn.close()
|
1025
|
-
conn = self._create_connection(parsed_url.netloc)
|
1026
|
-
self._reconnect_with_backoff(attempt)
|
1027
|
-
attempt += 1
|
1028
|
-
except Exception as e:
|
1029
|
-
logger.exception(
|
1030
|
-
f"Connection error (attempt {attempt + 1}): {e}"
|
1031
|
-
)
|
1032
|
-
break
|
1033
|
-
else:
|
1034
|
-
logger.error("Max retries reached. Exiting event stream.")
|
1035
|
-
break
|
1036
|
-
|
1037
|
-
while True:
|
1038
|
-
try:
|
1039
|
-
msg = message_queue.get(timeout=queue_timeout, block=True)
|
1040
|
-
yield msg
|
1041
|
-
except Empty:
|
1042
|
-
logger.debug(
|
1043
|
-
f"Heartbeat: no message in last {queue_timeout} seconds"
|
1044
|
-
)
|
1045
|
-
break
|
1046
|
-
except KeyboardInterrupt:
|
1047
|
-
logger.info("Received keyboard interrupt, disconnecting...")
|
1048
|
-
|
1049
|
-
except Exception as e:
|
1050
|
-
logger.exception(f"Polling error: {e}")
|
1051
|
-
|
1052
|
-
finally:
|
1053
|
-
if client_id:
|
1054
|
-
try:
|
1055
|
-
logger.trace(
|
1056
|
-
f"Disconnecting from server with client ID: {client_id}"
|
1057
|
-
)
|
1058
|
-
disconnect_payload = json.dumps(
|
1059
|
-
[
|
1060
|
-
{
|
1061
|
-
"channel": "/meta/disconnect",
|
1062
|
-
"clientId": client_id,
|
1063
|
-
"id": str(self._msg_count + 1),
|
1064
|
-
}
|
1065
|
-
]
|
1066
|
-
)
|
1067
|
-
conn.request(
|
1068
|
-
"POST",
|
1069
|
-
f"/cometd/{_API_VERSION}/meta/disconnect",
|
1070
|
-
headers=headers,
|
1071
|
-
body=disconnect_payload,
|
1072
|
-
)
|
1073
|
-
response = conn.getresponse()
|
1074
|
-
self._http_resp_header_logic(response)
|
1075
|
-
_ = response.read()
|
1076
|
-
logger.trace("Disconnected successfully.")
|
1077
|
-
except Exception as e:
|
1078
|
-
logger.warning(f"Exception during disconnect: {e}")
|
1079
|
-
if conn:
|
1080
|
-
logger.trace("Closing connection.")
|
1081
|
-
conn.close()
|
1082
|
-
|
1083
|
-
logger.trace("Leaving event polling loop.")
|
sfq/_cometd.py
ADDED
@@ -0,0 +1,297 @@
|
|
1
|
+
import http.client
|
2
|
+
import json
|
3
|
+
import logging
|
4
|
+
import time
|
5
|
+
from typing import Any, Optional
|
6
|
+
import warnings
|
7
|
+
from queue import Empty, Queue
|
8
|
+
|
9
|
+
TRACE = 5
|
10
|
+
logging.addLevelName(TRACE, "TRACE")
|
11
|
+
|
12
|
+
class ExperimentalWarning(Warning):
|
13
|
+
pass
|
14
|
+
|
15
|
+
|
16
|
+
def trace(self: logging.Logger, message: str, *args: Any, **kwargs: Any) -> None:
|
17
|
+
"""Custom TRACE level logging function with redaction."""
|
18
|
+
|
19
|
+
def _redact_sensitive(data: Any) -> Any:
|
20
|
+
"""Redacts sensitive keys from a dictionary or query string."""
|
21
|
+
REDACT_VALUE = "*" * 8
|
22
|
+
REDACT_KEYS = [
|
23
|
+
"access_token",
|
24
|
+
"authorization",
|
25
|
+
"set-cookie",
|
26
|
+
"cookie",
|
27
|
+
"refresh_token",
|
28
|
+
]
|
29
|
+
if isinstance(data, dict):
|
30
|
+
return {
|
31
|
+
k: (REDACT_VALUE if k.lower() in REDACT_KEYS else v)
|
32
|
+
for k, v in data.items()
|
33
|
+
}
|
34
|
+
elif isinstance(data, (list, tuple)):
|
35
|
+
return type(data)(
|
36
|
+
(
|
37
|
+
(item[0], REDACT_VALUE)
|
38
|
+
if isinstance(item, tuple) and item[0].lower() in REDACT_KEYS
|
39
|
+
else item
|
40
|
+
for item in data
|
41
|
+
)
|
42
|
+
)
|
43
|
+
elif isinstance(data, str):
|
44
|
+
parts = data.split("&")
|
45
|
+
for i, part in enumerate(parts):
|
46
|
+
if "=" in part:
|
47
|
+
key, value = part.split("=", 1)
|
48
|
+
if key.lower() in REDACT_KEYS:
|
49
|
+
parts[i] = f"{key}={REDACT_VALUE}"
|
50
|
+
return "&".join(parts)
|
51
|
+
return data
|
52
|
+
|
53
|
+
redacted_args = args
|
54
|
+
if args:
|
55
|
+
first = args[0]
|
56
|
+
if isinstance(first, str):
|
57
|
+
try:
|
58
|
+
loaded = json.loads(first)
|
59
|
+
first = loaded
|
60
|
+
except (json.JSONDecodeError, TypeError):
|
61
|
+
pass
|
62
|
+
redacted_first = _redact_sensitive(first)
|
63
|
+
redacted_args = (redacted_first,) + args[1:]
|
64
|
+
|
65
|
+
if self.isEnabledFor(TRACE):
|
66
|
+
self._log(TRACE, message, redacted_args, **kwargs)
|
67
|
+
|
68
|
+
|
69
|
+
logging.Logger.trace = trace
|
70
|
+
logger = logging.getLogger("sfq")
|
71
|
+
|
72
|
+
def _reconnect_with_backoff(self, attempt: int) -> None:
|
73
|
+
wait_time = min(2**attempt, 60)
|
74
|
+
logger.warning(
|
75
|
+
f"Reconnecting after failure, backoff {wait_time}s (attempt {attempt})"
|
76
|
+
)
|
77
|
+
time.sleep(wait_time)
|
78
|
+
|
79
|
+
def _subscribe_topic(
|
80
|
+
self,
|
81
|
+
topic: str,
|
82
|
+
queue_timeout: int = 90,
|
83
|
+
max_runtime: Optional[int] = None,
|
84
|
+
):
|
85
|
+
"""
|
86
|
+
Yields events from a subscribed Salesforce CometD topic.
|
87
|
+
|
88
|
+
:param topic: Topic to subscribe to, e.g. '/event/MyEvent__e'
|
89
|
+
:param queue_timeout: Seconds to wait for a message before logging heartbeat
|
90
|
+
:param max_runtime: Max total time to listen in seconds (None = unlimited)
|
91
|
+
"""
|
92
|
+
warnings.warn(
|
93
|
+
"The _subscribe_topic method is experimental and subject to change in future versions.",
|
94
|
+
ExperimentalWarning,
|
95
|
+
stacklevel=2,
|
96
|
+
)
|
97
|
+
|
98
|
+
self._refresh_token_if_needed()
|
99
|
+
self._msg_count: int = 0
|
100
|
+
|
101
|
+
if not self.access_token:
|
102
|
+
logger.error("No access token available for event stream.")
|
103
|
+
return
|
104
|
+
|
105
|
+
start_time = time.time()
|
106
|
+
message_queue = Queue()
|
107
|
+
headers = {
|
108
|
+
"Authorization": f"Bearer {self.access_token}",
|
109
|
+
"Content-Type": "application/json",
|
110
|
+
"Accept": "application/json",
|
111
|
+
"User-Agent": self.user_agent,
|
112
|
+
"Sforce-Call-Options": f"client={self.sforce_client}",
|
113
|
+
}
|
114
|
+
|
115
|
+
parsed_url = urlparse(self.instance_url)
|
116
|
+
conn = self._create_connection(parsed_url.netloc)
|
117
|
+
_API_VERSION = str(self.api_version).removeprefix("v")
|
118
|
+
client_id = str()
|
119
|
+
|
120
|
+
try:
|
121
|
+
logger.trace("Starting handshake with Salesforce CometD server.")
|
122
|
+
handshake_payload = json.dumps(
|
123
|
+
{
|
124
|
+
"id": str(self._msg_count + 1),
|
125
|
+
"version": "1.0",
|
126
|
+
"minimumVersion": "1.0",
|
127
|
+
"channel": "/meta/handshake",
|
128
|
+
"supportedConnectionTypes": ["long-polling"],
|
129
|
+
"advice": {"timeout": 60000, "interval": 0},
|
130
|
+
}
|
131
|
+
)
|
132
|
+
conn.request(
|
133
|
+
"POST",
|
134
|
+
f"/cometd/{_API_VERSION}/meta/handshake",
|
135
|
+
headers=headers,
|
136
|
+
body=handshake_payload,
|
137
|
+
)
|
138
|
+
response = conn.getresponse()
|
139
|
+
self._http_resp_header_logic(response)
|
140
|
+
|
141
|
+
logger.trace("Received handshake response.")
|
142
|
+
for name, value in response.getheaders():
|
143
|
+
if name.lower() == "set-cookie" and "BAYEUX_BROWSER=" in value:
|
144
|
+
_bayeux_browser_cookie = value.split("BAYEUX_BROWSER=")[1].split(
|
145
|
+
";"
|
146
|
+
)[0]
|
147
|
+
headers["Cookie"] = f"BAYEUX_BROWSER={_bayeux_browser_cookie}"
|
148
|
+
break
|
149
|
+
|
150
|
+
data = json.loads(response.read().decode("utf-8"))
|
151
|
+
if not data or not data[0].get("successful"):
|
152
|
+
logger.error("Handshake failed: %s", data)
|
153
|
+
return
|
154
|
+
|
155
|
+
client_id = data[0]["clientId"]
|
156
|
+
logger.trace(f"Handshake successful, client ID: {client_id}")
|
157
|
+
|
158
|
+
logger.trace(f"Subscribing to topic: {topic}")
|
159
|
+
subscribe_message = {
|
160
|
+
"channel": "/meta/subscribe",
|
161
|
+
"clientId": client_id,
|
162
|
+
"subscription": topic,
|
163
|
+
"id": str(self._msg_count + 1),
|
164
|
+
}
|
165
|
+
conn.request(
|
166
|
+
"POST",
|
167
|
+
f"/cometd/{_API_VERSION}/meta/subscribe",
|
168
|
+
headers=headers,
|
169
|
+
body=json.dumps(subscribe_message),
|
170
|
+
)
|
171
|
+
response = conn.getresponse()
|
172
|
+
self._http_resp_header_logic(response)
|
173
|
+
|
174
|
+
sub_response = json.loads(response.read().decode("utf-8"))
|
175
|
+
if not sub_response or not sub_response[0].get("successful"):
|
176
|
+
logger.error("Subscription failed: %s", sub_response)
|
177
|
+
return
|
178
|
+
|
179
|
+
logger.info(f"Successfully subscribed to topic: {topic}")
|
180
|
+
logger.trace("Entering event polling loop.")
|
181
|
+
|
182
|
+
try:
|
183
|
+
while True:
|
184
|
+
if max_runtime and (time.time() - start_time > max_runtime):
|
185
|
+
logger.info(
|
186
|
+
f"Disconnecting after max_runtime={max_runtime} seconds"
|
187
|
+
)
|
188
|
+
break
|
189
|
+
|
190
|
+
logger.trace("Sending connection message.")
|
191
|
+
connect_payload = json.dumps(
|
192
|
+
[
|
193
|
+
{
|
194
|
+
"channel": "/meta/connect",
|
195
|
+
"clientId": client_id,
|
196
|
+
"connectionType": "long-polling",
|
197
|
+
"id": str(self._msg_count + 1),
|
198
|
+
}
|
199
|
+
]
|
200
|
+
)
|
201
|
+
|
202
|
+
max_retries = 5
|
203
|
+
attempt = 0
|
204
|
+
|
205
|
+
while attempt < max_retries:
|
206
|
+
try:
|
207
|
+
conn.request(
|
208
|
+
"POST",
|
209
|
+
f"/cometd/{_API_VERSION}/meta/connect",
|
210
|
+
headers=headers,
|
211
|
+
body=connect_payload,
|
212
|
+
)
|
213
|
+
response = conn.getresponse()
|
214
|
+
self._http_resp_header_logic(response)
|
215
|
+
self._msg_count += 1
|
216
|
+
|
217
|
+
events = json.loads(response.read().decode("utf-8"))
|
218
|
+
for event in events:
|
219
|
+
if event.get("channel") == topic and "data" in event:
|
220
|
+
logger.trace(
|
221
|
+
f"Event received for topic {topic}, data: {event['data']}"
|
222
|
+
)
|
223
|
+
message_queue.put(event)
|
224
|
+
break
|
225
|
+
except (
|
226
|
+
http.client.RemoteDisconnected,
|
227
|
+
ConnectionResetError,
|
228
|
+
TimeoutError,
|
229
|
+
http.client.BadStatusLine,
|
230
|
+
http.client.CannotSendRequest,
|
231
|
+
ConnectionAbortedError,
|
232
|
+
ConnectionRefusedError,
|
233
|
+
ConnectionError,
|
234
|
+
) as e:
|
235
|
+
logger.warning(
|
236
|
+
f"Connection error (attempt {attempt + 1}): {e}"
|
237
|
+
)
|
238
|
+
conn.close()
|
239
|
+
conn = self._create_connection(parsed_url.netloc)
|
240
|
+
self._reconnect_with_backoff(attempt)
|
241
|
+
attempt += 1
|
242
|
+
except Exception as e:
|
243
|
+
logger.exception(
|
244
|
+
f"Connection error (attempt {attempt + 1}): {e}"
|
245
|
+
)
|
246
|
+
break
|
247
|
+
else:
|
248
|
+
logger.error("Max retries reached. Exiting event stream.")
|
249
|
+
break
|
250
|
+
|
251
|
+
while True:
|
252
|
+
try:
|
253
|
+
msg = message_queue.get(timeout=queue_timeout, block=True)
|
254
|
+
yield msg
|
255
|
+
except Empty:
|
256
|
+
logger.debug(
|
257
|
+
f"Heartbeat: no message in last {queue_timeout} seconds"
|
258
|
+
)
|
259
|
+
break
|
260
|
+
except KeyboardInterrupt:
|
261
|
+
logger.info("Received keyboard interrupt, disconnecting...")
|
262
|
+
|
263
|
+
except Exception as e:
|
264
|
+
logger.exception(f"Polling error: {e}")
|
265
|
+
|
266
|
+
finally:
|
267
|
+
if client_id:
|
268
|
+
try:
|
269
|
+
logger.trace(
|
270
|
+
f"Disconnecting from server with client ID: {client_id}"
|
271
|
+
)
|
272
|
+
disconnect_payload = json.dumps(
|
273
|
+
[
|
274
|
+
{
|
275
|
+
"channel": "/meta/disconnect",
|
276
|
+
"clientId": client_id,
|
277
|
+
"id": str(self._msg_count + 1),
|
278
|
+
}
|
279
|
+
]
|
280
|
+
)
|
281
|
+
conn.request(
|
282
|
+
"POST",
|
283
|
+
f"/cometd/{_API_VERSION}/meta/disconnect",
|
284
|
+
headers=headers,
|
285
|
+
body=disconnect_payload,
|
286
|
+
)
|
287
|
+
response = conn.getresponse()
|
288
|
+
self._http_resp_header_logic(response)
|
289
|
+
_ = response.read()
|
290
|
+
logger.trace("Disconnected successfully.")
|
291
|
+
except Exception as e:
|
292
|
+
logger.warning(f"Exception during disconnect: {e}")
|
293
|
+
if conn:
|
294
|
+
logger.trace("Closing connection.")
|
295
|
+
conn.close()
|
296
|
+
|
297
|
+
logger.trace("Leaving event polling loop.")
|
@@ -0,0 +1,6 @@
|
|
1
|
+
sfq/__init__.py,sha256=b70qbaov94JC7qWHuJA6X0i6O-H145YS-_vlyPzWig4,29895
|
2
|
+
sfq/_cometd.py,sha256=XimQEubmJwUmbWe85TxH_cuhGvWVuiHHrVr41tguuiI,10508
|
3
|
+
sfq/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
4
|
+
sfq-0.0.15.dist-info/METADATA,sha256=ipG9mLqnwZwGp6gUGSbggP_LNl80YcGPpM1_fYlS7Vo,6598
|
5
|
+
sfq-0.0.15.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
6
|
+
sfq-0.0.15.dist-info/RECORD,,
|
sfq-0.0.13.dist-info/RECORD
DELETED
@@ -1,5 +0,0 @@
|
|
1
|
-
sfq/__init__.py,sha256=Lb3nk6IyyC5_qNy6xOWwfp36Zybtfs8jeGgeiUAbESA,42921
|
2
|
-
sfq/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
|
-
sfq-0.0.13.dist-info/METADATA,sha256=dBV2044TZgirMKp6VLg9uIZ0dqKq3eL0G1p8w94sGJs,6598
|
4
|
-
sfq-0.0.13.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
5
|
-
sfq-0.0.13.dist-info/RECORD,,
|
File without changes
|