diaspora-event-sdk 0.0.3__tar.gz → 0.0.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- diaspora-event-sdk-0.0.5/PKG-INFO +89 -0
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk/__init__.py +3 -2
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk/sdk/_environments.py +7 -0
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk/sdk/client.py +17 -17
- diaspora-event-sdk-0.0.5/diaspora_event_sdk/sdk/kafka_client.py +46 -0
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk/sdk/login_manager/__init__.py +4 -1
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk/sdk/login_manager/manager.py +1 -1
- diaspora-event-sdk-0.0.5/diaspora_event_sdk/sdk/login_manager/protocol.py +29 -0
- diaspora-event-sdk-0.0.5/diaspora_event_sdk/sdk/login_manager/tokenstore.py +100 -0
- diaspora-event-sdk-0.0.5/diaspora_event_sdk/sdk/utils/__init__.py +0 -0
- diaspora-event-sdk-0.0.5/diaspora_event_sdk/sdk/utils/uuid_like.py +15 -0
- diaspora-event-sdk-0.0.5/diaspora_event_sdk/sdk/web_client.py +39 -0
- diaspora-event-sdk-0.0.5/diaspora_event_sdk/version.py +1 -0
- diaspora-event-sdk-0.0.5/diaspora_event_sdk.egg-info/PKG-INFO +89 -0
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk.egg-info/SOURCES.txt +5 -2
- diaspora-event-sdk-0.0.5/diaspora_event_sdk.egg-info/requires.txt +2 -0
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/setup.py +1 -1
- diaspora-event-sdk-0.0.3/PKG-INFO +0 -76
- diaspora-event-sdk-0.0.3/README.md +0 -67
- diaspora-event-sdk-0.0.3/diaspora_event_sdk/sdk/kafka_client.py +0 -46
- diaspora-event-sdk-0.0.3/diaspora_event_sdk/sdk/web_client.py +0 -39
- diaspora-event-sdk-0.0.3/diaspora_event_sdk/version.py +0 -1
- diaspora-event-sdk-0.0.3/diaspora_event_sdk.egg-info/PKG-INFO +0 -76
- diaspora-event-sdk-0.0.3/diaspora_event_sdk.egg-info/requires.txt +0 -2
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/LICENSE +0 -0
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk/sdk/__init__.py +0 -0
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk/sdk/decorators.py +0 -0
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk/sdk/login_manager/client_login.py +0 -0
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk/sdk/login_manager/decorators.py +0 -0
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk/sdk/login_manager/globus_auth.py +0 -0
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk/sdk/login_manager/login_flow.py +0 -0
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk.egg-info/dependency_links.txt +0 -0
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk.egg-info/top_level.txt +0 -0
- {diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/setup.cfg +0 -0
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: diaspora-event-sdk
|
|
3
|
+
Version: 0.0.5
|
|
4
|
+
Summary: SDK of Diaspora Event Fabric: Resilience-enabling services for science from HPC to edge
|
|
5
|
+
Home-page: https://github.com/globus-labs/diaspora-event-sdk
|
|
6
|
+
License: LICENSE
|
|
7
|
+
Description-Content-Type: text/markdown
|
|
8
|
+
License-File: LICENSE
|
|
9
|
+
|
|
10
|
+
# Diaspora Event Fabric: Resilience-enabling services for science from HPC to edge
|
|
11
|
+
|
|
12
|
+
### Install
|
|
13
|
+
```bash
|
|
14
|
+
pip install diaspora-event-sdk
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
## Use kafka-python
|
|
18
|
+
|
|
19
|
+
### Register Topic (create topic ACLs)
|
|
20
|
+
|
|
21
|
+
Before you can create, describe, and delete topics we need to set the appropriate ACLs in ZooKeeper. Here we use the Client to register ACLs for the desired topic name.
|
|
22
|
+
|
|
23
|
+
```python
|
|
24
|
+
from diaspora_event_sdk import Client as GlobusClient
|
|
25
|
+
c = GlobusClient()
|
|
26
|
+
topic = "topic-" + c.subject_openid[-12:]
|
|
27
|
+
print(c.register_topic(topic))
|
|
28
|
+
print(c.list_topics())
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
### Create Topic
|
|
32
|
+
|
|
33
|
+
Now use the KafkaAdmin to create the topic.
|
|
34
|
+
|
|
35
|
+
```python
|
|
36
|
+
from diaspora_event_sdk import KafkaAdmin, NewTopic
|
|
37
|
+
admin = KafkaAdmin()
|
|
38
|
+
print(admin.create_topics(new_topics=[
|
|
39
|
+
NewTopic(name=topic, num_partitions=1, replication_factor=1)]))
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
### Start Producer
|
|
43
|
+
|
|
44
|
+
Once the topic is created we can publish to it. The KafkaProducer wraps the [Python KafkaProducer](https://kafka-python.readthedocs.io/en/master/apidoc/KafkaProducer.html) Event publication can be either synchronous or asynchronous. Below demonstrates the synchronous approach.
|
|
45
|
+
|
|
46
|
+
```python
|
|
47
|
+
from diaspora_event_sdk import KafkaProducer
|
|
48
|
+
producer = KafkaProducer()
|
|
49
|
+
future = producer.send(
|
|
50
|
+
topic, {'message': 'Synchronous message from Diaspora SDK'})
|
|
51
|
+
result = future.get(timeout=10)
|
|
52
|
+
print(result)
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
### Start Consumer
|
|
56
|
+
|
|
57
|
+
A consumer can be configured to monitor the topic and act on events as they are published. The KafkaConsumer wraps the [Python KafkaConsumer](https://kafka-python.readthedocs.io/en/master/apidoc/KafkaConsumer.html). Here we use the `auto_offset_reset` to consume from the first event published to the topic. Removing this field will have the consumer act only on new events.
|
|
58
|
+
|
|
59
|
+
```python
|
|
60
|
+
from diaspora_event_sdk import KafkaConsumer
|
|
61
|
+
consumer = KafkaConsumer(topic, auto_offset_reset='earliest')
|
|
62
|
+
for msg in consumer:
|
|
63
|
+
print(msg)
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
### Delete Topic
|
|
67
|
+
```python
|
|
68
|
+
from diaspora_event_sdk import KafkaAdmin
|
|
69
|
+
admin = KafkaAdmin()
|
|
70
|
+
res = admin.delete_topics(topics=[topic])
|
|
71
|
+
print(res)
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
### Unregister Topic (remove topic ACLs)
|
|
75
|
+
```python
|
|
76
|
+
from diaspora_event_sdk import Client as GlobusClient
|
|
77
|
+
c = GlobusClient()
|
|
78
|
+
topic = "topic-" + c.subject_openid[-12:]
|
|
79
|
+
print(c.unregister_topic(topic))
|
|
80
|
+
print(c.list_topics())
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
## Use other Kafka libraries
|
|
84
|
+
```python
|
|
85
|
+
from diaspora_event_sdk import Client as GlobusClient
|
|
86
|
+
c = GlobusClient()
|
|
87
|
+
print(c.retrieve_key())
|
|
88
|
+
```
|
|
89
|
+
For other Kafka clients, select SASL/SCRAM authentication, and use `username` and `password` as authentication credential. Other connection parameters see [here]().
|
|
@@ -7,7 +7,8 @@ __author__ = "The Diaspora Event Team"
|
|
|
7
7
|
__version__ = _version
|
|
8
8
|
|
|
9
9
|
from diaspora_event_sdk.sdk.client import Client # Globus client
|
|
10
|
-
from diaspora_event_sdk.sdk.kafka_client import
|
|
10
|
+
from diaspora_event_sdk.sdk.kafka_client import KafkaProducer, KafkaConsumer, KafkaAdmin, NewTopic
|
|
11
11
|
|
|
12
12
|
|
|
13
|
-
__all__ = ("Client", "
|
|
13
|
+
__all__ = ("Client", "KafkaProducer",
|
|
14
|
+
"KafkaConsumer", "KafkaAdmin", "NewTopic")
|
{diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk/sdk/_environments.py
RENAMED
|
@@ -1,4 +1,11 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
|
|
1
4
|
TOKEN_EXCHANGE = "http://3.220.110.101"
|
|
2
5
|
DIASPORA_RESOURCE_SERVER = '2b9d2f5c-fa32-45b5-875b-b24cd343b917'
|
|
3
6
|
MSK_SCRAM_ENDPOINT = "b-1-public.diaspora.k6i387.c21.kafka.us-east-1.amazonaws.com:9196,b-2-public.diaspora.k6i387.c21.kafka.us-east-1.amazonaws.com:9196"
|
|
4
7
|
MSK_IAM_ENDPOINT = "b-1-public.diaspora.k6i387.c21.kafka.us-east-1.amazonaws.com:9198,b-2-public.diaspora.k6i387.c21.kafka.us-east-1.amazonaws.com:9198"
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def _get_envname():
|
|
11
|
+
return os.getenv("DIASPORA_SDK_ENVIRONMENT", "production")
|
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
import json
|
|
2
2
|
|
|
3
|
-
from diaspora_event_sdk.sdk.login_manager
|
|
4
|
-
from diaspora_event_sdk.sdk.login_manager import requires_login
|
|
5
|
-
from globus_compute_sdk.sdk.login_manager import LoginManagerProtocol
|
|
3
|
+
from diaspora_event_sdk.sdk.login_manager import LoginManager, LoginManagerProtocol, requires_login
|
|
6
4
|
|
|
7
5
|
from ._environments import TOKEN_EXCHANGE, DIASPORA_RESOURCE_SERVER
|
|
8
6
|
|
|
@@ -35,6 +33,9 @@ class Client:
|
|
|
35
33
|
|
|
36
34
|
@requires_login
|
|
37
35
|
def create_key(self):
|
|
36
|
+
"""
|
|
37
|
+
Invalidate previous keys (if any) and generate a new one
|
|
38
|
+
"""
|
|
38
39
|
resp = self.web_client.create_key(self.subject_openid)
|
|
39
40
|
if resp["status"] == "error":
|
|
40
41
|
raise Exception("should not happen")
|
|
@@ -53,28 +54,27 @@ class Client:
|
|
|
53
54
|
],
|
|
54
55
|
)
|
|
55
56
|
self.login_manager._token_storage._connection.commit()
|
|
56
|
-
return
|
|
57
|
+
return {"username": self.subject_openid, "password": tokens['secret_key']}
|
|
57
58
|
|
|
58
59
|
@requires_login
|
|
59
|
-
def
|
|
60
|
+
def retrieve_key(self):
|
|
61
|
+
"""
|
|
62
|
+
Attempt to retrieve the key from local token storage, call create_key if not found
|
|
63
|
+
"""
|
|
60
64
|
tokens = self.login_manager._token_storage.get_token_data(
|
|
61
65
|
DIASPORA_RESOURCE_SERVER)
|
|
62
66
|
if "access_key" in tokens and "secret_key" in tokens:
|
|
63
|
-
return {
|
|
64
|
-
|
|
65
|
-
"access_key": tokens['access_key'],
|
|
66
|
-
"secret_key": tokens['secret_key']
|
|
67
|
-
}
|
|
68
|
-
return self.create_key()
|
|
67
|
+
return {"username": self.subject_openid, "password": tokens['secret_key']}
|
|
68
|
+
return self.retrieve_key()
|
|
69
69
|
|
|
70
70
|
@requires_login
|
|
71
|
-
def
|
|
72
|
-
return self.web_client.
|
|
71
|
+
def list_topics(self):
|
|
72
|
+
return self.web_client.list_topics(self.subject_openid)
|
|
73
73
|
|
|
74
74
|
@requires_login
|
|
75
|
-
def
|
|
76
|
-
return self.web_client.
|
|
75
|
+
def register_topic(self, topic):
|
|
76
|
+
return self.web_client.register_topic(self.subject_openid, topic)
|
|
77
77
|
|
|
78
78
|
@requires_login
|
|
79
|
-
def
|
|
80
|
-
return self.web_client.
|
|
79
|
+
def unregister_topic(self, topic):
|
|
80
|
+
return self.web_client.unregister_topic(self.subject_openid, topic)
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from kafka import KafkaProducer, KafkaConsumer, KafkaAdminClient
|
|
3
|
+
from kafka.admin import NewTopic
|
|
4
|
+
from typing import Dict, Any
|
|
5
|
+
|
|
6
|
+
from .client import Client
|
|
7
|
+
from ._environments import MSK_SCRAM_ENDPOINT
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def get_diaspora_config(extra_configs: Dict[str, Any] = {}) -> Dict[str, Any]:
|
|
11
|
+
"""
|
|
12
|
+
Retrieve default Diaspora event fabric connection configurations for Kafka clients.
|
|
13
|
+
Merges default configurations with custom ones provided.
|
|
14
|
+
"""
|
|
15
|
+
try:
|
|
16
|
+
keys = Client().retrieve_key()
|
|
17
|
+
except Exception as e:
|
|
18
|
+
raise RuntimeError("Failed to retrieve Kafka keys") from e
|
|
19
|
+
|
|
20
|
+
conf = {
|
|
21
|
+
"bootstrap_servers": MSK_SCRAM_ENDPOINT,
|
|
22
|
+
"security_protocol": "SASL_SSL",
|
|
23
|
+
"sasl_mechanism": "SCRAM-SHA-512",
|
|
24
|
+
"api_version": (3, 5, 1),
|
|
25
|
+
"sasl_plain_username": keys["username"],
|
|
26
|
+
"sasl_plain_password": keys["password"],
|
|
27
|
+
}
|
|
28
|
+
conf.update(extra_configs)
|
|
29
|
+
return conf
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class KafkaAdmin(KafkaAdminClient):
|
|
33
|
+
def __init__(self, **configs):
|
|
34
|
+
super().__init__(**get_diaspora_config(configs))
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class KafkaProducer(KafkaProducer):
|
|
38
|
+
def __init__(self, **configs):
|
|
39
|
+
configs.setdefault("value_serializer",
|
|
40
|
+
lambda v: json.dumps(v).encode('utf-8'))
|
|
41
|
+
super().__init__(**get_diaspora_config(configs))
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class KafkaConsumer(KafkaConsumer):
|
|
45
|
+
def __init__(self, *topics, **configs):
|
|
46
|
+
super().__init__(*topics, **get_diaspora_config(configs))
|
|
@@ -8,7 +8,7 @@ import typing as t
|
|
|
8
8
|
|
|
9
9
|
import globus_sdk
|
|
10
10
|
from globus_sdk.scopes import AuthScopes, ScopeBuilder
|
|
11
|
-
from
|
|
11
|
+
from diaspora_event_sdk.sdk.login_manager.tokenstore import get_token_storage_adapter
|
|
12
12
|
|
|
13
13
|
from .._environments import DIASPORA_RESOURCE_SERVER
|
|
14
14
|
from ..web_client import WebClient
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
|
|
5
|
+
import globus_sdk
|
|
6
|
+
|
|
7
|
+
from ..web_client import WebClient
|
|
8
|
+
|
|
9
|
+
# these were added to stdlib typing in 3.8, so the import must be conditional
|
|
10
|
+
# mypy and other tools expect and document a sys.version_info check
|
|
11
|
+
if sys.version_info >= (3, 8):
|
|
12
|
+
from typing import Protocol, runtime_checkable
|
|
13
|
+
else:
|
|
14
|
+
from typing_extensions import Protocol, runtime_checkable
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@runtime_checkable
|
|
18
|
+
class LoginManagerProtocol(Protocol):
|
|
19
|
+
def ensure_logged_in(self) -> None:
|
|
20
|
+
...
|
|
21
|
+
|
|
22
|
+
def logout(self) -> bool:
|
|
23
|
+
...
|
|
24
|
+
|
|
25
|
+
def get_auth_client(self) -> globus_sdk.AuthClient:
|
|
26
|
+
...
|
|
27
|
+
|
|
28
|
+
def get_web_client(self, *, base_url: str | None = None) -> WebClient:
|
|
29
|
+
...
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
import pathlib
|
|
6
|
+
|
|
7
|
+
from globus_sdk.tokenstorage import SQLiteAdapter
|
|
8
|
+
|
|
9
|
+
from .._environments import _get_envname
|
|
10
|
+
from .client_login import get_client_login, is_client_login
|
|
11
|
+
from .globus_auth import internal_auth_client
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _home() -> pathlib.Path:
|
|
15
|
+
# this is a hook point for tests to patch over
|
|
16
|
+
# it just returns `pathlib.Path.home()`
|
|
17
|
+
# replace this with a mock to return some test directory
|
|
18
|
+
return pathlib.Path.home()
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def invalidate_old_config() -> None:
|
|
22
|
+
token_file = _home() / ".funcx" / "credentials" / "funcx_sdk_tokens.json"
|
|
23
|
+
|
|
24
|
+
if token_file.exists():
|
|
25
|
+
try:
|
|
26
|
+
auth_client = internal_auth_client()
|
|
27
|
+
with open(token_file) as fp:
|
|
28
|
+
data = json.load(fp)
|
|
29
|
+
for token_data in data.values():
|
|
30
|
+
if "access_token" in token_data:
|
|
31
|
+
auth_client.oauth2_revoke_token(token_data["access_token"])
|
|
32
|
+
if "refresh_token" in token_data:
|
|
33
|
+
auth_client.oauth2_revoke_token(
|
|
34
|
+
token_data["refresh_token"])
|
|
35
|
+
finally:
|
|
36
|
+
os.remove(token_file)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def ensure_compute_dir() -> pathlib.Path:
|
|
40
|
+
legacy_dirname = _home() / ".funcx"
|
|
41
|
+
dirname = _home() / ".globus_compute"
|
|
42
|
+
|
|
43
|
+
user_dir = os.getenv("GLOBUS_COMPUTE_USER_DIR")
|
|
44
|
+
if user_dir:
|
|
45
|
+
dirname = pathlib.Path(user_dir)
|
|
46
|
+
|
|
47
|
+
if dirname.is_dir():
|
|
48
|
+
pass
|
|
49
|
+
elif dirname.is_file():
|
|
50
|
+
raise FileExistsError(
|
|
51
|
+
f"Error creating directory {dirname}, "
|
|
52
|
+
"please remove or rename the conflicting file"
|
|
53
|
+
)
|
|
54
|
+
elif legacy_dirname.is_dir() and not user_dir:
|
|
55
|
+
legacy_dirname.replace(dirname)
|
|
56
|
+
legacy_dirname.symlink_to(dirname, target_is_directory=True)
|
|
57
|
+
else:
|
|
58
|
+
dirname.mkdir(mode=0o700, parents=True, exist_ok=True)
|
|
59
|
+
|
|
60
|
+
return dirname
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _get_storage_filename():
|
|
64
|
+
datadir = ensure_compute_dir()
|
|
65
|
+
return os.path.join(datadir, "storage.db")
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def _resolve_namespace(environment: str | None) -> str:
|
|
69
|
+
"""
|
|
70
|
+
Return the namespace used to save tokens. This will check
|
|
71
|
+
if a client login is being used and return either:
|
|
72
|
+
user/<envname>
|
|
73
|
+
or
|
|
74
|
+
clientprofile/<envname>/<clientid>
|
|
75
|
+
|
|
76
|
+
e.g.
|
|
77
|
+
|
|
78
|
+
user/production
|
|
79
|
+
"""
|
|
80
|
+
env = environment if environment is not None else _get_envname()
|
|
81
|
+
|
|
82
|
+
if is_client_login():
|
|
83
|
+
client_id = get_client_login().client_id
|
|
84
|
+
return f"clientprofile/{env}/{client_id}"
|
|
85
|
+
|
|
86
|
+
return f"user/{env}"
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def get_token_storage_adapter(*, environment: str | None = None) -> SQLiteAdapter:
|
|
90
|
+
# when initializing the token storage adapter, check if the storage file exists
|
|
91
|
+
# if it does not, then use this as a flag to clean the old config
|
|
92
|
+
fname = _get_storage_filename()
|
|
93
|
+
if not os.path.exists(fname):
|
|
94
|
+
invalidate_old_config()
|
|
95
|
+
# namespace is equal to the current environment
|
|
96
|
+
return SQLiteAdapter(
|
|
97
|
+
fname,
|
|
98
|
+
namespace=_resolve_namespace(environment),
|
|
99
|
+
connect_params={"check_same_thread": False},
|
|
100
|
+
)
|
|
File without changes
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import typing as t
|
|
4
|
+
import uuid
|
|
5
|
+
|
|
6
|
+
# older pythons don't like aliases using |, even with a __future__ import
|
|
7
|
+
UUID_LIKE_T = t.Union[uuid.UUID, str]
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def as_uuid(uuid_like: UUID_LIKE_T) -> uuid.UUID:
|
|
11
|
+
return uuid_like if isinstance(uuid_like, uuid.UUID) else uuid.UUID(uuid_like)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def as_optional_uuid(optional_uuid_like: UUID_LIKE_T | None) -> uuid.UUID | None:
|
|
15
|
+
return as_uuid(optional_uuid_like) if optional_uuid_like else None
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
|
|
2
|
+
import typing as t
|
|
3
|
+
|
|
4
|
+
import globus_sdk
|
|
5
|
+
from globus_sdk.exc.api import GlobusAPIError
|
|
6
|
+
from diaspora_event_sdk.sdk.utils.uuid_like import UUID_LIKE_T
|
|
7
|
+
|
|
8
|
+
from ._environments import TOKEN_EXCHANGE
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class WebClient(globus_sdk.BaseClient):
|
|
12
|
+
|
|
13
|
+
def __init__(
|
|
14
|
+
self,
|
|
15
|
+
*,
|
|
16
|
+
environment: t.Optional[str] = None,
|
|
17
|
+
base_url: t.Optional[str] = None,
|
|
18
|
+
app_name: t.Optional[str] = None,
|
|
19
|
+
**kwargs,
|
|
20
|
+
):
|
|
21
|
+
if base_url is None:
|
|
22
|
+
base_url = TOKEN_EXCHANGE
|
|
23
|
+
|
|
24
|
+
super().__init__(environment=environment, base_url=base_url, **kwargs)
|
|
25
|
+
|
|
26
|
+
self._user_app_name = None
|
|
27
|
+
self.user_app_name = app_name
|
|
28
|
+
|
|
29
|
+
def create_key(self, subject: UUID_LIKE_T) -> globus_sdk.GlobusHTTPResponse:
|
|
30
|
+
return self.post("/v1/create_key", headers={"Subject": subject})
|
|
31
|
+
|
|
32
|
+
def list_topics(self, subject: UUID_LIKE_T) -> globus_sdk.GlobusHTTPResponse:
|
|
33
|
+
return self.get("/v1/list_topics", headers={"Subject": subject})
|
|
34
|
+
|
|
35
|
+
def register_topic(self, subject: UUID_LIKE_T, topic: str) -> globus_sdk.GlobusHTTPResponse:
|
|
36
|
+
return self.post("/v1/register_topic", headers={"Subject": subject, "Topic": topic})
|
|
37
|
+
|
|
38
|
+
def unregister_topic(self, subject: UUID_LIKE_T, topic: str) -> globus_sdk.GlobusHTTPResponse:
|
|
39
|
+
return self.post("/v1/unregister_topic", headers={"Subject": subject, "Topic": topic})
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.0.5"
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: diaspora-event-sdk
|
|
3
|
+
Version: 0.0.5
|
|
4
|
+
Summary: SDK of Diaspora Event Fabric: Resilience-enabling services for science from HPC to edge
|
|
5
|
+
Home-page: https://github.com/globus-labs/diaspora-event-sdk
|
|
6
|
+
License: LICENSE
|
|
7
|
+
Description-Content-Type: text/markdown
|
|
8
|
+
License-File: LICENSE
|
|
9
|
+
|
|
10
|
+
# Diaspora Event Fabric: Resilience-enabling services for science from HPC to edge
|
|
11
|
+
|
|
12
|
+
### Install
|
|
13
|
+
```bash
|
|
14
|
+
pip install diaspora-event-sdk
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
## Use kafka-python
|
|
18
|
+
|
|
19
|
+
### Register Topic (create topic ACLs)
|
|
20
|
+
|
|
21
|
+
Before you can create, describe, and delete topics we need to set the appropriate ACLs in ZooKeeper. Here we use the Client to register ACLs for the desired topic name.
|
|
22
|
+
|
|
23
|
+
```python
|
|
24
|
+
from diaspora_event_sdk import Client as GlobusClient
|
|
25
|
+
c = GlobusClient()
|
|
26
|
+
topic = "topic-" + c.subject_openid[-12:]
|
|
27
|
+
print(c.register_topic(topic))
|
|
28
|
+
print(c.list_topics())
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
### Create Topic
|
|
32
|
+
|
|
33
|
+
Now use the KafkaAdmin to create the topic.
|
|
34
|
+
|
|
35
|
+
```python
|
|
36
|
+
from diaspora_event_sdk import KafkaAdmin, NewTopic
|
|
37
|
+
admin = KafkaAdmin()
|
|
38
|
+
print(admin.create_topics(new_topics=[
|
|
39
|
+
NewTopic(name=topic, num_partitions=1, replication_factor=1)]))
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
### Start Producer
|
|
43
|
+
|
|
44
|
+
Once the topic is created we can publish to it. The KafkaProducer wraps the [Python KafkaProducer](https://kafka-python.readthedocs.io/en/master/apidoc/KafkaProducer.html) Event publication can be either synchronous or asynchronous. Below demonstrates the synchronous approach.
|
|
45
|
+
|
|
46
|
+
```python
|
|
47
|
+
from diaspora_event_sdk import KafkaProducer
|
|
48
|
+
producer = KafkaProducer()
|
|
49
|
+
future = producer.send(
|
|
50
|
+
topic, {'message': 'Synchronous message from Diaspora SDK'})
|
|
51
|
+
result = future.get(timeout=10)
|
|
52
|
+
print(result)
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
### Start Consumer
|
|
56
|
+
|
|
57
|
+
A consumer can be configured to monitor the topic and act on events as they are published. The KafkaConsumer wraps the [Python KafkaConsumer](https://kafka-python.readthedocs.io/en/master/apidoc/KafkaConsumer.html). Here we use the `auto_offset_reset` to consume from the first event published to the topic. Removing this field will have the consumer act only on new events.
|
|
58
|
+
|
|
59
|
+
```python
|
|
60
|
+
from diaspora_event_sdk import KafkaConsumer
|
|
61
|
+
consumer = KafkaConsumer(topic, auto_offset_reset='earliest')
|
|
62
|
+
for msg in consumer:
|
|
63
|
+
print(msg)
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
### Delete Topic
|
|
67
|
+
```python
|
|
68
|
+
from diaspora_event_sdk import KafkaAdmin
|
|
69
|
+
admin = KafkaAdmin()
|
|
70
|
+
res = admin.delete_topics(topics=[topic])
|
|
71
|
+
print(res)
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
### Unregister Topic (remove topic ACLs)
|
|
75
|
+
```python
|
|
76
|
+
from diaspora_event_sdk import Client as GlobusClient
|
|
77
|
+
c = GlobusClient()
|
|
78
|
+
topic = "topic-" + c.subject_openid[-12:]
|
|
79
|
+
print(c.unregister_topic(topic))
|
|
80
|
+
print(c.list_topics())
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
## Use other Kafka libraries
|
|
84
|
+
```python
|
|
85
|
+
from diaspora_event_sdk import Client as GlobusClient
|
|
86
|
+
c = GlobusClient()
|
|
87
|
+
print(c.retrieve_key())
|
|
88
|
+
```
|
|
89
|
+
For other Kafka clients, select SASL/SCRAM authentication, and use `username` and `password` as authentication credential. Other connection parameters see [here]().
|
{diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk.egg-info/SOURCES.txt
RENAMED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
LICENSE
|
|
2
|
-
README.md
|
|
3
2
|
setup.py
|
|
4
3
|
diaspora_event_sdk/__init__.py
|
|
5
4
|
diaspora_event_sdk/version.py
|
|
@@ -19,4 +18,8 @@ diaspora_event_sdk/sdk/login_manager/client_login.py
|
|
|
19
18
|
diaspora_event_sdk/sdk/login_manager/decorators.py
|
|
20
19
|
diaspora_event_sdk/sdk/login_manager/globus_auth.py
|
|
21
20
|
diaspora_event_sdk/sdk/login_manager/login_flow.py
|
|
22
|
-
diaspora_event_sdk/sdk/login_manager/manager.py
|
|
21
|
+
diaspora_event_sdk/sdk/login_manager/manager.py
|
|
22
|
+
diaspora_event_sdk/sdk/login_manager/protocol.py
|
|
23
|
+
diaspora_event_sdk/sdk/login_manager/tokenstore.py
|
|
24
|
+
diaspora_event_sdk/sdk/utils/__init__.py
|
|
25
|
+
diaspora_event_sdk/sdk/utils/uuid_like.py
|
|
@@ -1,76 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: diaspora-event-sdk
|
|
3
|
-
Version: 0.0.3
|
|
4
|
-
Summary: SDK of Diaspora Event Fabric: Resilience-enabling services for science from HPC to edge
|
|
5
|
-
Home-page: https://github.com/globus-labs/diaspora-event-sdk
|
|
6
|
-
License: LICENSE
|
|
7
|
-
Description-Content-Type: text/markdown
|
|
8
|
-
License-File: LICENSE
|
|
9
|
-
|
|
10
|
-
# Diaspora Event Fabric: Resilience-enabling services for science from HPC to edge
|
|
11
|
-
|
|
12
|
-
### Install
|
|
13
|
-
```bash
|
|
14
|
-
pip install diaspora-event-sdk
|
|
15
|
-
```
|
|
16
|
-
|
|
17
|
-
## Use kafka-python
|
|
18
|
-
|
|
19
|
-
### Claim Topic Ownership
|
|
20
|
-
```python
|
|
21
|
-
from diaspora_event_sdk import Client as GlobusClient
|
|
22
|
-
c = GlobusClient()
|
|
23
|
-
topic = f"topic-of-{c.subject_openid}" # or any unclaimed topic
|
|
24
|
-
print(c.acl_add(topic))
|
|
25
|
-
print(c.acl_list())
|
|
26
|
-
```
|
|
27
|
-
|
|
28
|
-
### Create Topic
|
|
29
|
-
```python
|
|
30
|
-
from diaspora_event_sdk import KafkaAdmin, NewTopic
|
|
31
|
-
admin = KafkaAdmin()
|
|
32
|
-
res = admin.create_topics(new_topics=[NewTopic(name=topic, num_partitions=2, replication_factor=2)])
|
|
33
|
-
print(res)
|
|
34
|
-
```
|
|
35
|
-
|
|
36
|
-
### Start Producer
|
|
37
|
-
```python
|
|
38
|
-
from diaspora_event_sdk import Producer
|
|
39
|
-
future = producer.send(
|
|
40
|
-
topic, {'message': 'Synchronous message from Diaspora SDK'})
|
|
41
|
-
result = future.get(timeout=10)
|
|
42
|
-
print(result)
|
|
43
|
-
```
|
|
44
|
-
### Start Consumer
|
|
45
|
-
```python
|
|
46
|
-
from diaspora_event_sdk import Consumer # Kafka producer
|
|
47
|
-
consumer = Consumer(topic)
|
|
48
|
-
for msg in consumer:
|
|
49
|
-
print(msg)
|
|
50
|
-
```
|
|
51
|
-
### Delete Topic
|
|
52
|
-
```python
|
|
53
|
-
from diaspora_event_sdk import KafkaAdmin
|
|
54
|
-
admin = KafkaAdmin()
|
|
55
|
-
res = admin.delete_topics(topics=[topic])
|
|
56
|
-
print(res)
|
|
57
|
-
|
|
58
|
-
```
|
|
59
|
-
|
|
60
|
-
### Release Topic Ownership
|
|
61
|
-
```python
|
|
62
|
-
from diaspora_event_sdk import Client as GlobusClient
|
|
63
|
-
c = GlobusClient()
|
|
64
|
-
topic = f"topic-of-{c.subject_openid}" # or any topic you claimed
|
|
65
|
-
print(c.acl_remove(topic))
|
|
66
|
-
print(c.acl_list())
|
|
67
|
-
```
|
|
68
|
-
|
|
69
|
-
## Use other Kafka libraries
|
|
70
|
-
```python
|
|
71
|
-
from diaspora_event_sdk import Client as GlobusClient
|
|
72
|
-
c = GlobusClient()
|
|
73
|
-
c.retrieve_or_create_key()
|
|
74
|
-
```
|
|
75
|
-
For SASL/SCRAM authentication, use `username` and `secret_key` as authentication credential;
|
|
76
|
-
For AWS_MSK_IAM authentication, use `access_key` and `secret_key` as authentication credential.
|
|
@@ -1,67 +0,0 @@
|
|
|
1
|
-
# Diaspora Event Fabric: Resilience-enabling services for science from HPC to edge
|
|
2
|
-
|
|
3
|
-
### Install
|
|
4
|
-
```bash
|
|
5
|
-
pip install diaspora-event-sdk
|
|
6
|
-
```
|
|
7
|
-
|
|
8
|
-
## Use kafka-python
|
|
9
|
-
|
|
10
|
-
### Claim Topic Ownership
|
|
11
|
-
```python
|
|
12
|
-
from diaspora_event_sdk import Client as GlobusClient
|
|
13
|
-
c = GlobusClient()
|
|
14
|
-
topic = f"topic-of-{c.subject_openid}" # or any unclaimed topic
|
|
15
|
-
print(c.acl_add(topic))
|
|
16
|
-
print(c.acl_list())
|
|
17
|
-
```
|
|
18
|
-
|
|
19
|
-
### Create Topic
|
|
20
|
-
```python
|
|
21
|
-
from diaspora_event_sdk import KafkaAdmin, NewTopic
|
|
22
|
-
admin = KafkaAdmin()
|
|
23
|
-
res = admin.create_topics(new_topics=[NewTopic(name=topic, num_partitions=2, replication_factor=2)])
|
|
24
|
-
print(res)
|
|
25
|
-
```
|
|
26
|
-
|
|
27
|
-
### Start Producer
|
|
28
|
-
```python
|
|
29
|
-
from diaspora_event_sdk import Producer
|
|
30
|
-
future = producer.send(
|
|
31
|
-
topic, {'message': 'Synchronous message from Diaspora SDK'})
|
|
32
|
-
result = future.get(timeout=10)
|
|
33
|
-
print(result)
|
|
34
|
-
```
|
|
35
|
-
### Start Consumer
|
|
36
|
-
```python
|
|
37
|
-
from diaspora_event_sdk import Consumer # Kafka producer
|
|
38
|
-
consumer = Consumer(topic)
|
|
39
|
-
for msg in consumer:
|
|
40
|
-
print(msg)
|
|
41
|
-
```
|
|
42
|
-
### Delete Topic
|
|
43
|
-
```python
|
|
44
|
-
from diaspora_event_sdk import KafkaAdmin
|
|
45
|
-
admin = KafkaAdmin()
|
|
46
|
-
res = admin.delete_topics(topics=[topic])
|
|
47
|
-
print(res)
|
|
48
|
-
|
|
49
|
-
```
|
|
50
|
-
|
|
51
|
-
### Release Topic Ownership
|
|
52
|
-
```python
|
|
53
|
-
from diaspora_event_sdk import Client as GlobusClient
|
|
54
|
-
c = GlobusClient()
|
|
55
|
-
topic = f"topic-of-{c.subject_openid}" # or any topic you claimed
|
|
56
|
-
print(c.acl_remove(topic))
|
|
57
|
-
print(c.acl_list())
|
|
58
|
-
```
|
|
59
|
-
|
|
60
|
-
## Use other Kafka libraries
|
|
61
|
-
```python
|
|
62
|
-
from diaspora_event_sdk import Client as GlobusClient
|
|
63
|
-
c = GlobusClient()
|
|
64
|
-
c.retrieve_or_create_key()
|
|
65
|
-
```
|
|
66
|
-
For SASL/SCRAM authentication, use `username` and `secret_key` as authentication credential;
|
|
67
|
-
For AWS_MSK_IAM authentication, use `access_key` and `secret_key` as authentication credential.
|
|
@@ -1,46 +0,0 @@
|
|
|
1
|
-
import json
|
|
2
|
-
|
|
3
|
-
from kafka import KafkaProducer, KafkaConsumer, KafkaAdminClient
|
|
4
|
-
from kafka.admin import NewTopic
|
|
5
|
-
|
|
6
|
-
from .client import Client
|
|
7
|
-
from ._environments import MSK_SCRAM_ENDPOINT
|
|
8
|
-
|
|
9
|
-
DEFAULT_CONFIGS = {
|
|
10
|
-
"bootstrap_servers": MSK_SCRAM_ENDPOINT,
|
|
11
|
-
"security_protocol": "SASL_SSL",
|
|
12
|
-
"sasl_mechanism": "SCRAM-SHA-512",
|
|
13
|
-
"api_version": (3, 5, 1)
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
class KafkaAdmin(KafkaAdminClient):
|
|
18
|
-
def __init__(self, **configs):
|
|
19
|
-
keys = Client().retrieve_or_create_key()
|
|
20
|
-
conf = DEFAULT_CONFIGS.copy()
|
|
21
|
-
conf["sasl_plain_username"] = keys["username"]
|
|
22
|
-
conf["sasl_plain_password"] = keys["secret_key"]
|
|
23
|
-
conf.update(configs)
|
|
24
|
-
super().__init__(**conf)
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
class Producer(KafkaProducer):
|
|
28
|
-
def __init__(self, **configs):
|
|
29
|
-
keys = Client().retrieve_or_create_key()
|
|
30
|
-
conf = DEFAULT_CONFIGS.copy()
|
|
31
|
-
conf["sasl_plain_username"] = keys["username"]
|
|
32
|
-
conf["sasl_plain_password"] = keys["secret_key"]
|
|
33
|
-
conf["value_serializer"] = lambda v: json.dumps(
|
|
34
|
-
v).encode('utf-8')
|
|
35
|
-
conf.update(configs)
|
|
36
|
-
super().__init__(**conf)
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
class Consumer(KafkaConsumer):
|
|
40
|
-
def __init__(self, *topics, **configs):
|
|
41
|
-
keys = Client().retrieve_or_create_key()
|
|
42
|
-
conf = DEFAULT_CONFIGS.copy()
|
|
43
|
-
conf["sasl_plain_username"] = keys["username"]
|
|
44
|
-
conf["sasl_plain_password"] = keys["secret_key"]
|
|
45
|
-
conf.update(configs)
|
|
46
|
-
super().__init__(*topics, **conf)
|
|
@@ -1,39 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
import typing as t
|
|
3
|
-
|
|
4
|
-
import globus_sdk
|
|
5
|
-
from globus_compute_sdk.sdk.utils.uuid_like import UUID_LIKE_T
|
|
6
|
-
from globus_sdk.exc.api import GlobusAPIError
|
|
7
|
-
|
|
8
|
-
from ._environments import TOKEN_EXCHANGE
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class WebClient(globus_sdk.BaseClient):
|
|
12
|
-
|
|
13
|
-
def __init__(
|
|
14
|
-
self,
|
|
15
|
-
*,
|
|
16
|
-
environment: t.Optional[str] = None,
|
|
17
|
-
base_url: t.Optional[str] = None,
|
|
18
|
-
app_name: t.Optional[str] = None,
|
|
19
|
-
**kwargs,
|
|
20
|
-
):
|
|
21
|
-
if base_url is None:
|
|
22
|
-
base_url = TOKEN_EXCHANGE
|
|
23
|
-
|
|
24
|
-
super().__init__(environment=environment, base_url=base_url, **kwargs)
|
|
25
|
-
|
|
26
|
-
self._user_app_name = None
|
|
27
|
-
self.user_app_name = app_name
|
|
28
|
-
|
|
29
|
-
def create_key(self, subject: UUID_LIKE_T) -> globus_sdk.GlobusHTTPResponse:
|
|
30
|
-
return self.post("/create_key", headers={"Subject": subject})
|
|
31
|
-
|
|
32
|
-
def acl_list(self, subject: UUID_LIKE_T) -> globus_sdk.GlobusHTTPResponse:
|
|
33
|
-
return self.get("/acl_list", headers={"Subject": subject})
|
|
34
|
-
|
|
35
|
-
def acl_add(self, subject: UUID_LIKE_T, topic: str) -> globus_sdk.GlobusHTTPResponse:
|
|
36
|
-
return self.post("/acl_add", headers={"Subject": subject, "Topic": topic})
|
|
37
|
-
|
|
38
|
-
def acl_remove(self, subject: UUID_LIKE_T, topic: str) -> globus_sdk.GlobusHTTPResponse:
|
|
39
|
-
return self.post("/acl_remove", headers={"Subject": subject, "Topic": topic})
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "0.0.3"
|
|
@@ -1,76 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: diaspora-event-sdk
|
|
3
|
-
Version: 0.0.3
|
|
4
|
-
Summary: SDK of Diaspora Event Fabric: Resilience-enabling services for science from HPC to edge
|
|
5
|
-
Home-page: https://github.com/globus-labs/diaspora-event-sdk
|
|
6
|
-
License: LICENSE
|
|
7
|
-
Description-Content-Type: text/markdown
|
|
8
|
-
License-File: LICENSE
|
|
9
|
-
|
|
10
|
-
# Diaspora Event Fabric: Resilience-enabling services for science from HPC to edge
|
|
11
|
-
|
|
12
|
-
### Install
|
|
13
|
-
```bash
|
|
14
|
-
pip install diaspora-event-sdk
|
|
15
|
-
```
|
|
16
|
-
|
|
17
|
-
## Use kafka-python
|
|
18
|
-
|
|
19
|
-
### Claim Topic Ownership
|
|
20
|
-
```python
|
|
21
|
-
from diaspora_event_sdk import Client as GlobusClient
|
|
22
|
-
c = GlobusClient()
|
|
23
|
-
topic = f"topic-of-{c.subject_openid}" # or any unclaimed topic
|
|
24
|
-
print(c.acl_add(topic))
|
|
25
|
-
print(c.acl_list())
|
|
26
|
-
```
|
|
27
|
-
|
|
28
|
-
### Create Topic
|
|
29
|
-
```python
|
|
30
|
-
from diaspora_event_sdk import KafkaAdmin, NewTopic
|
|
31
|
-
admin = KafkaAdmin()
|
|
32
|
-
res = admin.create_topics(new_topics=[NewTopic(name=topic, num_partitions=2, replication_factor=2)])
|
|
33
|
-
print(res)
|
|
34
|
-
```
|
|
35
|
-
|
|
36
|
-
### Start Producer
|
|
37
|
-
```python
|
|
38
|
-
from diaspora_event_sdk import Producer
|
|
39
|
-
future = producer.send(
|
|
40
|
-
topic, {'message': 'Synchronous message from Diaspora SDK'})
|
|
41
|
-
result = future.get(timeout=10)
|
|
42
|
-
print(result)
|
|
43
|
-
```
|
|
44
|
-
### Start Consumer
|
|
45
|
-
```python
|
|
46
|
-
from diaspora_event_sdk import Consumer # Kafka producer
|
|
47
|
-
consumer = Consumer(topic)
|
|
48
|
-
for msg in consumer:
|
|
49
|
-
print(msg)
|
|
50
|
-
```
|
|
51
|
-
### Delete Topic
|
|
52
|
-
```python
|
|
53
|
-
from diaspora_event_sdk import KafkaAdmin
|
|
54
|
-
admin = KafkaAdmin()
|
|
55
|
-
res = admin.delete_topics(topics=[topic])
|
|
56
|
-
print(res)
|
|
57
|
-
|
|
58
|
-
```
|
|
59
|
-
|
|
60
|
-
### Release Topic Ownership
|
|
61
|
-
```python
|
|
62
|
-
from diaspora_event_sdk import Client as GlobusClient
|
|
63
|
-
c = GlobusClient()
|
|
64
|
-
topic = f"topic-of-{c.subject_openid}" # or any topic you claimed
|
|
65
|
-
print(c.acl_remove(topic))
|
|
66
|
-
print(c.acl_list())
|
|
67
|
-
```
|
|
68
|
-
|
|
69
|
-
## Use other Kafka libraries
|
|
70
|
-
```python
|
|
71
|
-
from diaspora_event_sdk import Client as GlobusClient
|
|
72
|
-
c = GlobusClient()
|
|
73
|
-
c.retrieve_or_create_key()
|
|
74
|
-
```
|
|
75
|
-
For SASL/SCRAM authentication, use `username` and `secret_key` as authentication credential;
|
|
76
|
-
For AWS_MSK_IAM authentication, use `access_key` and `secret_key` as authentication credential.
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{diaspora-event-sdk-0.0.3 → diaspora-event-sdk-0.0.5}/diaspora_event_sdk.egg-info/top_level.txt
RENAMED
|
File without changes
|
|
File without changes
|