diaspora-event-sdk 0.0.16__py3-none-any.whl → 0.0.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- diaspora_event_sdk/__init__.py +7 -2
- diaspora_event_sdk/sdk/kafka_client.py +57 -4
- diaspora_event_sdk/sdk/login_manager/tokenstore.py +5 -29
- diaspora_event_sdk/version.py +1 -1
- {diaspora_event_sdk-0.0.16.dist-info → diaspora_event_sdk-0.0.18.dist-info}/METADATA +33 -15
- {diaspora_event_sdk-0.0.16.dist-info → diaspora_event_sdk-0.0.18.dist-info}/RECORD +9 -9
- {diaspora_event_sdk-0.0.16.dist-info → diaspora_event_sdk-0.0.18.dist-info}/LICENSE +0 -0
- {diaspora_event_sdk-0.0.16.dist-info → diaspora_event_sdk-0.0.18.dist-info}/WHEEL +0 -0
- {diaspora_event_sdk-0.0.16.dist-info → diaspora_event_sdk-0.0.18.dist-info}/top_level.txt +0 -0
diaspora_event_sdk/__init__.py
CHANGED
|
@@ -7,5 +7,10 @@ __author__ = "The Diaspora Event Team"
|
|
|
7
7
|
__version__ = _version
|
|
8
8
|
|
|
9
9
|
from diaspora_event_sdk.sdk.client import Client
|
|
10
|
-
from diaspora_event_sdk.sdk.kafka_client import
|
|
11
|
-
|
|
10
|
+
from diaspora_event_sdk.sdk.kafka_client import (
|
|
11
|
+
KafkaProducer,
|
|
12
|
+
KafkaConsumer,
|
|
13
|
+
block_until_ready,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
__all__ = ("Client", "KafkaProducer", "KafkaConsumer", "block_until_ready")
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import json
|
|
2
2
|
from typing import Dict, Any
|
|
3
3
|
import warnings
|
|
4
|
+
import time
|
|
4
5
|
|
|
5
6
|
from ._environments import MSK_SCRAM_ENDPOINT
|
|
6
7
|
from .client import Client
|
|
@@ -8,8 +9,8 @@ from .client import Client
|
|
|
8
9
|
# If kafka-python is not installed, Kafka functionality is not available through diaspora-event-sdk.
|
|
9
10
|
kafka_available = True
|
|
10
11
|
try:
|
|
11
|
-
from kafka import KafkaProducer as KProd # type: ignore[import-not-found]
|
|
12
|
-
from kafka import KafkaConsumer as KCons # type: ignore[import-not-found]
|
|
12
|
+
from kafka import KafkaProducer as KProd # type: ignore[import,import-not-found]
|
|
13
|
+
from kafka import KafkaConsumer as KCons # type: ignore[import,import-not-found]
|
|
13
14
|
|
|
14
15
|
except ImportError:
|
|
15
16
|
kafka_available = False
|
|
@@ -38,6 +39,7 @@ def get_diaspora_config(extra_configs: Dict[str, Any] = {}) -> Dict[str, Any]:
|
|
|
38
39
|
|
|
39
40
|
|
|
40
41
|
if kafka_available:
|
|
42
|
+
|
|
41
43
|
class KafkaProducer(KProd):
|
|
42
44
|
def __init__(self, **configs):
|
|
43
45
|
configs.setdefault(
|
|
@@ -53,9 +55,60 @@ else:
|
|
|
53
55
|
class KafkaProducer: # type: ignore[no-redef]
|
|
54
56
|
def __init__(self, *args, **kwargs):
|
|
55
57
|
warnings.warn(
|
|
56
|
-
"KafkaProducer is not available. Initialization is a no-op.",
|
|
58
|
+
"KafkaProducer is not available. Initialization is a no-op.",
|
|
59
|
+
RuntimeWarning,
|
|
60
|
+
)
|
|
57
61
|
|
|
58
62
|
class KafkaConsumer: # type: ignore[no-redef]
|
|
59
63
|
def __init__(self, *args, **kwargs):
|
|
60
64
|
warnings.warn(
|
|
61
|
-
"KafkaConsumer is not available. Initialization is a no-op.",
|
|
65
|
+
"KafkaConsumer is not available. Initialization is a no-op.",
|
|
66
|
+
RuntimeWarning,
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
# TODO: mypy diaspora_event_sdk/sdk/kafka_client.py --disallow-untyped-defs
|
|
71
|
+
def block_until_ready(max_minutes=5):
|
|
72
|
+
"""
|
|
73
|
+
Test Kafka producer and consumer connections.
|
|
74
|
+
By default, this method blocks for five minutes before giving up.
|
|
75
|
+
It returns a boolean that indicates whether the connections can be successfully established.
|
|
76
|
+
"""
|
|
77
|
+
|
|
78
|
+
def producer_connection_test(result):
|
|
79
|
+
try:
|
|
80
|
+
producer = KafkaProducer(max_block_ms=10 * 1000)
|
|
81
|
+
future = producer.send(
|
|
82
|
+
topic="__connection_test",
|
|
83
|
+
value={"message": "Synchronous message from Diaspora SDK"},
|
|
84
|
+
)
|
|
85
|
+
result["producer_connection_test"] = future.get(timeout=10)
|
|
86
|
+
except Exception as e:
|
|
87
|
+
pass
|
|
88
|
+
|
|
89
|
+
def consumer_connection_test(result):
|
|
90
|
+
try:
|
|
91
|
+
consumer = KafkaConsumer(
|
|
92
|
+
"__connection_test",
|
|
93
|
+
consumer_timeout_ms=10 * 1000,
|
|
94
|
+
auto_offset_reset="earliest",
|
|
95
|
+
)
|
|
96
|
+
for msg in consumer:
|
|
97
|
+
result["consumer_connection_test"] = msg
|
|
98
|
+
break
|
|
99
|
+
except Exception as e:
|
|
100
|
+
pass
|
|
101
|
+
|
|
102
|
+
result, retry_count = {}, 0
|
|
103
|
+
start_time = time.time()
|
|
104
|
+
while len(result) < 2: # two tests
|
|
105
|
+
if retry_count > 0:
|
|
106
|
+
print(f"Block until connected or timed out ({max_minutes} minutes)... retry count:", retry_count, ", time passed:", int(time.time() - start_time), "seconds")
|
|
107
|
+
producer_connection_test(result)
|
|
108
|
+
consumer_connection_test(result)
|
|
109
|
+
retry_count += 1
|
|
110
|
+
elapsed_time = time.time() - start_time
|
|
111
|
+
if elapsed_time >= max_minutes * 60:
|
|
112
|
+
print("Time limit exceeded. Exiting loop.")
|
|
113
|
+
return False
|
|
114
|
+
return True
|
|
@@ -18,29 +18,10 @@ def _home() -> pathlib.Path:
|
|
|
18
18
|
return pathlib.Path.home()
|
|
19
19
|
|
|
20
20
|
|
|
21
|
-
def
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
try:
|
|
26
|
-
auth_client = internal_auth_client()
|
|
27
|
-
with open(token_file) as fp:
|
|
28
|
-
data = json.load(fp)
|
|
29
|
-
for token_data in data.values():
|
|
30
|
-
if "access_token" in token_data:
|
|
31
|
-
auth_client.oauth2_revoke_token(token_data["access_token"])
|
|
32
|
-
if "refresh_token" in token_data:
|
|
33
|
-
auth_client.oauth2_revoke_token(
|
|
34
|
-
token_data["refresh_token"])
|
|
35
|
-
finally:
|
|
36
|
-
os.remove(token_file)
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
def ensure_compute_dir() -> pathlib.Path:
|
|
40
|
-
legacy_dirname = _home() / ".funcx"
|
|
41
|
-
dirname = _home() / ".globus_compute"
|
|
42
|
-
|
|
43
|
-
user_dir = os.getenv("GLOBUS_COMPUTE_USER_DIR")
|
|
21
|
+
def ensure_diaspora_dir() -> pathlib.Path:
|
|
22
|
+
dirname = _home() / ".diaspora"
|
|
23
|
+
|
|
24
|
+
user_dir = os.getenv("DIASPORA_USER_DIR")
|
|
44
25
|
if user_dir:
|
|
45
26
|
dirname = pathlib.Path(user_dir)
|
|
46
27
|
|
|
@@ -51,9 +32,6 @@ def ensure_compute_dir() -> pathlib.Path:
|
|
|
51
32
|
f"Error creating directory {dirname}, "
|
|
52
33
|
"please remove or rename the conflicting file"
|
|
53
34
|
)
|
|
54
|
-
elif legacy_dirname.is_dir() and not user_dir:
|
|
55
|
-
legacy_dirname.replace(dirname)
|
|
56
|
-
legacy_dirname.symlink_to(dirname, target_is_directory=True)
|
|
57
35
|
else:
|
|
58
36
|
dirname.mkdir(mode=0o700, parents=True, exist_ok=True)
|
|
59
37
|
|
|
@@ -61,7 +39,7 @@ def ensure_compute_dir() -> pathlib.Path:
|
|
|
61
39
|
|
|
62
40
|
|
|
63
41
|
def _get_storage_filename():
|
|
64
|
-
datadir =
|
|
42
|
+
datadir = ensure_diaspora_dir()
|
|
65
43
|
return os.path.join(datadir, "storage.db")
|
|
66
44
|
|
|
67
45
|
|
|
@@ -90,8 +68,6 @@ def get_token_storage_adapter(*, environment: str | None = None) -> SQLiteAdapte
|
|
|
90
68
|
# when initializing the token storage adapter, check if the storage file exists
|
|
91
69
|
# if it does not, then use this as a flag to clean the old config
|
|
92
70
|
fname = _get_storage_filename()
|
|
93
|
-
if not os.path.exists(fname):
|
|
94
|
-
invalidate_old_config()
|
|
95
71
|
# namespace is equal to the current environment
|
|
96
72
|
return SQLiteAdapter(
|
|
97
73
|
fname,
|
diaspora_event_sdk/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.0.
|
|
1
|
+
__version__ = "0.0.18"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: diaspora-event-sdk
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.18
|
|
4
4
|
Summary: SDK of Diaspora Event Fabric: Resilience-enabling services for science from HPC to edge
|
|
5
5
|
Home-page: https://github.com/globus-labs/diaspora-event-sdk
|
|
6
6
|
License: LICENSE
|
|
@@ -9,26 +9,36 @@ License-File: LICENSE
|
|
|
9
9
|
Requires-Dist: globus-sdk <4,>=3.20.1
|
|
10
10
|
Provides-Extra: kafka-python
|
|
11
11
|
Requires-Dist: kafka-python ; extra == 'kafka-python'
|
|
12
|
+
Provides-Extra: test
|
|
13
|
+
Requires-Dist: pytest ; extra == 'test'
|
|
14
|
+
Requires-Dist: pytest-cov ; extra == 'test'
|
|
15
|
+
Requires-Dist: coverage ; extra == 'test'
|
|
16
|
+
Requires-Dist: mypy ; extra == 'test'
|
|
17
|
+
Requires-Dist: tox ; extra == 'test'
|
|
18
|
+
Requires-Dist: check-manifest ; extra == 'test'
|
|
12
19
|
|
|
13
20
|
<h1>Diaspora Event Fabric: Resilience-enabling services for science from HPC to edge</h1>
|
|
14
21
|
|
|
15
22
|
- [Installation](#installation)
|
|
16
|
-
|
|
17
|
-
|
|
23
|
+
* [Recommended Installation with Kafka Client Library](#recommended-installation-with-kafka-client-library)
|
|
24
|
+
* [Installation Without Kafka Client Library](#installation-without-kafka-client-library)
|
|
18
25
|
- [Use Diaspora Event SDK](#use-diaspora-event-sdk)
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
26
|
+
* [Use the SDK to communicate with Kafka (kafka-python Required)](#use-the-sdk-to-communicate-with-kafka--kafka-python-required-)
|
|
27
|
+
+ [Register Topic (create topic ACLs)](#register-topic--create-topic-acls-)
|
|
28
|
+
+ [Block Until Ready](#block-until-ready)
|
|
29
|
+
+ [Start Producer](#start-producer)
|
|
30
|
+
+ [Start Consumer](#start-consumer)
|
|
31
|
+
+ [Unregister Topic (remove topic ACLs)](#unregister-topic--remove-topic-acls-)
|
|
32
|
+
* [Use Your Preferred Kafka Client Library](#use-your-preferred-kafka-client-library)
|
|
33
|
+
+ [Register and Unregister Topic](#register-and-unregister-topic)
|
|
34
|
+
+ [Cluster Connection Details](#cluster-connection-details)
|
|
35
|
+
* [Advanced Usage](#advanced-usage)
|
|
36
|
+
+ [Password Refresh](#password-refresh)
|
|
29
37
|
- [Common Issues](#common-issues)
|
|
30
|
-
|
|
31
|
-
|
|
38
|
+
* [ImportError: cannot import name 'KafkaProducer' from 'diaspora_event_sdk'](#importerror--cannot-import-name--kafkaproducer--from--diaspora-event-sdk-)
|
|
39
|
+
* [kafka.errors.NoBrokersAvailable and kafka.errors.NodeNotReadyError](#kafkaerrorsnobrokersavailable-and-kafkaerrorsnodenotreadyerror)
|
|
40
|
+
* [kafka.errors.KafkaTimeoutError: KafkaTimeoutError: Failed to update metadata after 60.0 secs.](#kafkaerrorskafkatimeouterror--kafkatimeouterror--failed-to-update-metadata-after-600-secs)
|
|
41
|
+
* [ssl.SSLCertVerificationError](#sslsslcertverificationerror)
|
|
32
42
|
|
|
33
43
|
|
|
34
44
|
## Installation
|
|
@@ -65,6 +75,14 @@ print(c.list_topics())
|
|
|
65
75
|
```
|
|
66
76
|
Register a topic also creates it, if the topic previously does not exist.
|
|
67
77
|
|
|
78
|
+
#### Block Until Ready
|
|
79
|
+
`KafkaProducer` and `KafkaConsumer` would internally call `create_key` if the the connection credential is not found locally (e.g., when you first authenticated with Globus). Behind the sence, the middle service contacts AWS to initialize the asynchronous process of creating and associating the secret. The method below blocks until the credential is ready to be used by producer and consumer. When the method finishes, it returns True and the producer and consumer code below should work without further waiting. By default, the method retries in loop for five minutes before giving up and return False. Use parameter `max_minutes` to change the number of minutes of max waiting.
|
|
80
|
+
|
|
81
|
+
```python
|
|
82
|
+
from diaspora_event_sdk import block_until_ready
|
|
83
|
+
assert block_until_ready()
|
|
84
|
+
```
|
|
85
|
+
|
|
68
86
|
#### Start Producer
|
|
69
87
|
|
|
70
88
|
Once the topic is created we can publish to it. The KafkaProducer wraps the [Python KafkaProducer](https://kafka-python.readthedocs.io/en/master/apidoc/KafkaProducer.html) Event publication can be either synchronous or asynchronous. Below demonstrates the synchronous approach.
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
diaspora_event_sdk/__init__.py,sha256=
|
|
2
|
-
diaspora_event_sdk/version.py,sha256=
|
|
1
|
+
diaspora_event_sdk/__init__.py,sha256=v8IN3-WFpliakQKru8TAcmQ4IRdvRe_m9-abSDnGIFM,457
|
|
2
|
+
diaspora_event_sdk/version.py,sha256=qgKF-lRlzaBqf95e1sodHCZkSUCbz7ECKSeYHwXfAvI,23
|
|
3
3
|
diaspora_event_sdk/sdk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
diaspora_event_sdk/sdk/_environments.py,sha256=UwzEeBVRuP7ZyqTJagNVA24EiCLuOYiy395q608AwQ0,358
|
|
5
5
|
diaspora_event_sdk/sdk/client.py,sha256=2KMUdnOEtSe1uV3rpRZHd5tLOFgC_fztzny1XxL4rh8,3496
|
|
6
6
|
diaspora_event_sdk/sdk/decorators.py,sha256=Gel8AyhIjbf4-FNintTNcOqvC9hHH_YwbOH257Nfmf0,884
|
|
7
|
-
diaspora_event_sdk/sdk/kafka_client.py,sha256=
|
|
7
|
+
diaspora_event_sdk/sdk/kafka_client.py,sha256=e_c9M1L-HAV346iwR7tsH7ZZUON1tgSDCGlOkvNKdWQ,3959
|
|
8
8
|
diaspora_event_sdk/sdk/web_client.py,sha256=RCXZhMtPBe53c4-MmyBiFIdqsmcTOnV3ashaaQHKMew,1325
|
|
9
9
|
diaspora_event_sdk/sdk/login_manager/__init__.py,sha256=yeqVgjeHLMX0WZJu2feJmq-fbeXvSxWghVV81ygfY-w,239
|
|
10
10
|
diaspora_event_sdk/sdk/login_manager/client_login.py,sha256=gvR4PkIqQpIywNieJQ_u11PHUmdLxQ0Ho-QgPSfu8bw,1798
|
|
@@ -13,13 +13,13 @@ diaspora_event_sdk/sdk/login_manager/globus_auth.py,sha256=9Hymp0tv91OI5dBMUgh4r
|
|
|
13
13
|
diaspora_event_sdk/sdk/login_manager/login_flow.py,sha256=2TodgsvlEYPoZPQPkp6FHOC9IkSM07pS7MIVGS4MZNE,954
|
|
14
14
|
diaspora_event_sdk/sdk/login_manager/manager.py,sha256=uJHosI7ipqGt5KiL-iv2IGB4bNyZ6jQZQi7g4ka0pts,7052
|
|
15
15
|
diaspora_event_sdk/sdk/login_manager/protocol.py,sha256=RCuo2jy_XkpZvbxnKlDfTKs-L6b9_8_JR-Kq9wHwhoM,710
|
|
16
|
-
diaspora_event_sdk/sdk/login_manager/tokenstore.py,sha256=
|
|
16
|
+
diaspora_event_sdk/sdk/login_manager/tokenstore.py,sha256=Kq0IZGf9G9dE44yoyUZInod5xL_8caN9OugeTK6GBGg,2094
|
|
17
17
|
diaspora_event_sdk/sdk/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
18
|
diaspora_event_sdk/sdk/utils/uuid_like.py,sha256=xbxf0YXpDhdii16lwPLWRN21qFekHrNrqODSToMPtCg,470
|
|
19
19
|
tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
20
20
|
tests/unit/test_client.py,sha256=KjqXW7Mm3fm17lqhYdixL8s50z83F6g7MGKcOXJIyFc,2635
|
|
21
|
-
diaspora_event_sdk-0.0.
|
|
22
|
-
diaspora_event_sdk-0.0.
|
|
23
|
-
diaspora_event_sdk-0.0.
|
|
24
|
-
diaspora_event_sdk-0.0.
|
|
25
|
-
diaspora_event_sdk-0.0.
|
|
21
|
+
diaspora_event_sdk-0.0.18.dist-info/LICENSE,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
|
|
22
|
+
diaspora_event_sdk-0.0.18.dist-info/METADATA,sha256=4YlQ90MxHz4T1ILh2wrj_H97ouosqI_OX1OcRm7SQsI,9542
|
|
23
|
+
diaspora_event_sdk-0.0.18.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
|
24
|
+
diaspora_event_sdk-0.0.18.dist-info/top_level.txt,sha256=OVun-67t3fkLFEIwvJuNINgFFvAc--bClYhXjLhMmvs,25
|
|
25
|
+
diaspora_event_sdk-0.0.18.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|