diaspora-event-sdk 0.0.3__py3-none-any.whl → 0.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -7,7 +7,8 @@ __author__ = "The Diaspora Event Team"
7
7
  __version__ = _version
8
8
 
9
9
  from diaspora_event_sdk.sdk.client import Client # Globus client
10
- from diaspora_event_sdk.sdk.kafka_client import Producer, Consumer, KafkaAdmin, NewTopic
10
+ from diaspora_event_sdk.sdk.kafka_client import KafkaProducer, KafkaConsumer, KafkaAdmin, NewTopic
11
11
 
12
12
 
13
- __all__ = ("Client", "Producer", "Consumer", "KafkaAdmin", "NewTopic")
13
+ __all__ = ("Client", "KafkaProducer",
14
+ "KafkaConsumer", "KafkaAdmin", "NewTopic")
@@ -1,4 +1,11 @@
1
+ import os
2
+
3
+
1
4
  TOKEN_EXCHANGE = "http://3.220.110.101"
2
5
  DIASPORA_RESOURCE_SERVER = '2b9d2f5c-fa32-45b5-875b-b24cd343b917'
3
6
  MSK_SCRAM_ENDPOINT = "b-1-public.diaspora.k6i387.c21.kafka.us-east-1.amazonaws.com:9196,b-2-public.diaspora.k6i387.c21.kafka.us-east-1.amazonaws.com:9196"
4
7
  MSK_IAM_ENDPOINT = "b-1-public.diaspora.k6i387.c21.kafka.us-east-1.amazonaws.com:9198,b-2-public.diaspora.k6i387.c21.kafka.us-east-1.amazonaws.com:9198"
8
+
9
+
10
+ def _get_envname():
11
+ return os.getenv("DIASPORA_SDK_ENVIRONMENT", "production")
@@ -1,8 +1,6 @@
1
1
  import json
2
2
 
3
- from diaspora_event_sdk.sdk.login_manager.manager import LoginManager
4
- from diaspora_event_sdk.sdk.login_manager import requires_login
5
- from globus_compute_sdk.sdk.login_manager import LoginManagerProtocol
3
+ from diaspora_event_sdk.sdk.login_manager import LoginManager, LoginManagerProtocol, requires_login
6
4
 
7
5
  from ._environments import TOKEN_EXCHANGE, DIASPORA_RESOURCE_SERVER
8
6
 
@@ -35,6 +33,9 @@ class Client:
35
33
 
36
34
  @requires_login
37
35
  def create_key(self):
36
+ """
37
+ Invalidate previous keys (if any) and generate a new one
38
+ """
38
39
  resp = self.web_client.create_key(self.subject_openid)
39
40
  if resp["status"] == "error":
40
41
  raise Exception("should not happen")
@@ -53,28 +54,27 @@ class Client:
53
54
  ],
54
55
  )
55
56
  self.login_manager._token_storage._connection.commit()
56
- return resp
57
+ return {"username": self.subject_openid, "password": tokens['secret_key']}
57
58
 
58
59
  @requires_login
59
- def retrieve_or_create_key(self):
60
+ def retrieve_key(self):
61
+ """
62
+ Attempt to retrieve the key from local token storage, call create_key if not found
63
+ """
60
64
  tokens = self.login_manager._token_storage.get_token_data(
61
65
  DIASPORA_RESOURCE_SERVER)
62
66
  if "access_key" in tokens and "secret_key" in tokens:
63
- return {
64
- "status": "succeed", "username": self.subject_openid,
65
- "access_key": tokens['access_key'],
66
- "secret_key": tokens['secret_key']
67
- }
68
- return self.create_key()
67
+ return {"username": self.subject_openid, "password": tokens['secret_key']}
68
+ return self.retrieve_key()
69
69
 
70
70
  @requires_login
71
- def acl_list(self):
72
- return self.web_client.acl_list(self.subject_openid)
71
+ def list_topics(self):
72
+ return self.web_client.list_topics(self.subject_openid)
73
73
 
74
74
  @requires_login
75
- def acl_add(self, topic):
76
- return self.web_client.acl_add(self.subject_openid, topic)
75
+ def register_topic(self, topic):
76
+ return self.web_client.register_topic(self.subject_openid, topic)
77
77
 
78
78
  @requires_login
79
- def acl_remove(self, topic):
80
- return self.web_client.acl_remove(self.subject_openid, topic)
79
+ def unregister_topic(self, topic):
80
+ return self.web_client.unregister_topic(self.subject_openid, topic)
@@ -1,46 +1,46 @@
1
1
  import json
2
-
3
2
  from kafka import KafkaProducer, KafkaConsumer, KafkaAdminClient
4
3
  from kafka.admin import NewTopic
4
+ from typing import Dict, Any
5
5
 
6
6
  from .client import Client
7
7
  from ._environments import MSK_SCRAM_ENDPOINT
8
8
 
9
- DEFAULT_CONFIGS = {
10
- "bootstrap_servers": MSK_SCRAM_ENDPOINT,
11
- "security_protocol": "SASL_SSL",
12
- "sasl_mechanism": "SCRAM-SHA-512",
13
- "api_version": (3, 5, 1)
14
- }
9
+
10
+ def get_diaspora_config(extra_configs: Dict[str, Any] = {}) -> Dict[str, Any]:
11
+ """
12
+ Retrieve default Diaspora event fabric connection configurations for Kafka clients.
13
+ Merges default configurations with custom ones provided.
14
+ """
15
+ try:
16
+ keys = Client().retrieve_key()
17
+ except Exception as e:
18
+ raise RuntimeError("Failed to retrieve Kafka keys") from e
19
+
20
+ conf = {
21
+ "bootstrap_servers": MSK_SCRAM_ENDPOINT,
22
+ "security_protocol": "SASL_SSL",
23
+ "sasl_mechanism": "SCRAM-SHA-512",
24
+ "api_version": (3, 5, 1),
25
+ "sasl_plain_username": keys["username"],
26
+ "sasl_plain_password": keys["password"],
27
+ }
28
+ conf.update(extra_configs)
29
+ return conf
15
30
 
16
31
 
17
32
  class KafkaAdmin(KafkaAdminClient):
18
33
  def __init__(self, **configs):
19
- keys = Client().retrieve_or_create_key()
20
- conf = DEFAULT_CONFIGS.copy()
21
- conf["sasl_plain_username"] = keys["username"]
22
- conf["sasl_plain_password"] = keys["secret_key"]
23
- conf.update(configs)
24
- super().__init__(**conf)
34
+ super().__init__(**get_diaspora_config(configs))
25
35
 
26
36
 
27
- class Producer(KafkaProducer):
37
+ class KafkaProducer(KafkaProducer):
28
38
  def __init__(self, **configs):
29
- keys = Client().retrieve_or_create_key()
30
- conf = DEFAULT_CONFIGS.copy()
31
- conf["sasl_plain_username"] = keys["username"]
32
- conf["sasl_plain_password"] = keys["secret_key"]
33
- conf["value_serializer"] = lambda v: json.dumps(
34
- v).encode('utf-8')
35
- conf.update(configs)
36
- super().__init__(**conf)
39
+ configs.setdefault("value_serializer",
40
+ lambda v: json.dumps(v).encode('utf-8'))
41
+ super().__init__(**get_diaspora_config(configs))
37
42
 
38
43
 
39
- class Consumer(KafkaConsumer):
44
+ class KafkaConsumer(KafkaConsumer):
40
45
  def __init__(self, *topics, **configs):
41
- keys = Client().retrieve_or_create_key()
42
- conf = DEFAULT_CONFIGS.copy()
43
- conf["sasl_plain_username"] = keys["username"]
44
- conf["sasl_plain_password"] = keys["secret_key"]
45
- conf.update(configs)
46
- super().__init__(*topics, **conf)
46
+ super().__init__(*topics, **get_diaspora_config(configs))
@@ -1,7 +1,10 @@
1
1
  from .decorators import requires_login
2
2
  from .manager import DiasporaScopes, LoginManager
3
+ from .protocol import LoginManagerProtocol
3
4
 
4
5
  __all__ = (
5
6
  "LoginManager",
6
7
  "DiasporaScopes",
7
- )
8
+ "LoginManagerProtocol",
9
+ "requires_login",
10
+ )
@@ -8,7 +8,7 @@ import typing as t
8
8
 
9
9
  import globus_sdk
10
10
  from globus_sdk.scopes import AuthScopes, ScopeBuilder
11
- from globus_compute_sdk.sdk.login_manager.tokenstore import get_token_storage_adapter
11
+ from diaspora_event_sdk.sdk.login_manager.tokenstore import get_token_storage_adapter
12
12
 
13
13
  from .._environments import DIASPORA_RESOURCE_SERVER
14
14
  from ..web_client import WebClient
@@ -0,0 +1,29 @@
1
+ from __future__ import annotations
2
+
3
+ import sys
4
+
5
+ import globus_sdk
6
+
7
+ from ..web_client import WebClient
8
+
9
+ # these were added to stdlib typing in 3.8, so the import must be conditional
10
+ # mypy and other tools expect and document a sys.version_info check
11
+ if sys.version_info >= (3, 8):
12
+ from typing import Protocol, runtime_checkable
13
+ else:
14
+ from typing_extensions import Protocol, runtime_checkable
15
+
16
+
17
+ @runtime_checkable
18
+ class LoginManagerProtocol(Protocol):
19
+ def ensure_logged_in(self) -> None:
20
+ ...
21
+
22
+ def logout(self) -> bool:
23
+ ...
24
+
25
+ def get_auth_client(self) -> globus_sdk.AuthClient:
26
+ ...
27
+
28
+ def get_web_client(self, *, base_url: str | None = None) -> WebClient:
29
+ ...
@@ -0,0 +1,100 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ import os
5
+ import pathlib
6
+
7
+ from globus_sdk.tokenstorage import SQLiteAdapter
8
+
9
+ from .._environments import _get_envname
10
+ from .client_login import get_client_login, is_client_login
11
+ from .globus_auth import internal_auth_client
12
+
13
+
14
+ def _home() -> pathlib.Path:
15
+ # this is a hook point for tests to patch over
16
+ # it just returns `pathlib.Path.home()`
17
+ # replace this with a mock to return some test directory
18
+ return pathlib.Path.home()
19
+
20
+
21
+ def invalidate_old_config() -> None:
22
+ token_file = _home() / ".funcx" / "credentials" / "funcx_sdk_tokens.json"
23
+
24
+ if token_file.exists():
25
+ try:
26
+ auth_client = internal_auth_client()
27
+ with open(token_file) as fp:
28
+ data = json.load(fp)
29
+ for token_data in data.values():
30
+ if "access_token" in token_data:
31
+ auth_client.oauth2_revoke_token(token_data["access_token"])
32
+ if "refresh_token" in token_data:
33
+ auth_client.oauth2_revoke_token(
34
+ token_data["refresh_token"])
35
+ finally:
36
+ os.remove(token_file)
37
+
38
+
39
+ def ensure_compute_dir() -> pathlib.Path:
40
+ legacy_dirname = _home() / ".funcx"
41
+ dirname = _home() / ".globus_compute"
42
+
43
+ user_dir = os.getenv("GLOBUS_COMPUTE_USER_DIR")
44
+ if user_dir:
45
+ dirname = pathlib.Path(user_dir)
46
+
47
+ if dirname.is_dir():
48
+ pass
49
+ elif dirname.is_file():
50
+ raise FileExistsError(
51
+ f"Error creating directory {dirname}, "
52
+ "please remove or rename the conflicting file"
53
+ )
54
+ elif legacy_dirname.is_dir() and not user_dir:
55
+ legacy_dirname.replace(dirname)
56
+ legacy_dirname.symlink_to(dirname, target_is_directory=True)
57
+ else:
58
+ dirname.mkdir(mode=0o700, parents=True, exist_ok=True)
59
+
60
+ return dirname
61
+
62
+
63
+ def _get_storage_filename():
64
+ datadir = ensure_compute_dir()
65
+ return os.path.join(datadir, "storage.db")
66
+
67
+
68
+ def _resolve_namespace(environment: str | None) -> str:
69
+ """
70
+ Return the namespace used to save tokens. This will check
71
+ if a client login is being used and return either:
72
+ user/<envname>
73
+ or
74
+ clientprofile/<envname>/<clientid>
75
+
76
+ e.g.
77
+
78
+ user/production
79
+ """
80
+ env = environment if environment is not None else _get_envname()
81
+
82
+ if is_client_login():
83
+ client_id = get_client_login().client_id
84
+ return f"clientprofile/{env}/{client_id}"
85
+
86
+ return f"user/{env}"
87
+
88
+
89
+ def get_token_storage_adapter(*, environment: str | None = None) -> SQLiteAdapter:
90
+ # when initializing the token storage adapter, check if the storage file exists
91
+ # if it does not, then use this as a flag to clean the old config
92
+ fname = _get_storage_filename()
93
+ if not os.path.exists(fname):
94
+ invalidate_old_config()
95
+ # namespace is equal to the current environment
96
+ return SQLiteAdapter(
97
+ fname,
98
+ namespace=_resolve_namespace(environment),
99
+ connect_params={"check_same_thread": False},
100
+ )
File without changes
@@ -0,0 +1,15 @@
1
+ from __future__ import annotations
2
+
3
+ import typing as t
4
+ import uuid
5
+
6
+ # older pythons don't like aliases using |, even with a __future__ import
7
+ UUID_LIKE_T = t.Union[uuid.UUID, str]
8
+
9
+
10
+ def as_uuid(uuid_like: UUID_LIKE_T) -> uuid.UUID:
11
+ return uuid_like if isinstance(uuid_like, uuid.UUID) else uuid.UUID(uuid_like)
12
+
13
+
14
+ def as_optional_uuid(optional_uuid_like: UUID_LIKE_T | None) -> uuid.UUID | None:
15
+ return as_uuid(optional_uuid_like) if optional_uuid_like else None
@@ -2,8 +2,8 @@
2
2
  import typing as t
3
3
 
4
4
  import globus_sdk
5
- from globus_compute_sdk.sdk.utils.uuid_like import UUID_LIKE_T
6
5
  from globus_sdk.exc.api import GlobusAPIError
6
+ from diaspora_event_sdk.sdk.utils.uuid_like import UUID_LIKE_T
7
7
 
8
8
  from ._environments import TOKEN_EXCHANGE
9
9
 
@@ -27,13 +27,13 @@ class WebClient(globus_sdk.BaseClient):
27
27
  self.user_app_name = app_name
28
28
 
29
29
  def create_key(self, subject: UUID_LIKE_T) -> globus_sdk.GlobusHTTPResponse:
30
- return self.post("/create_key", headers={"Subject": subject})
30
+ return self.post("/v1/create_key", headers={"Subject": subject})
31
31
 
32
- def acl_list(self, subject: UUID_LIKE_T) -> globus_sdk.GlobusHTTPResponse:
33
- return self.get("/acl_list", headers={"Subject": subject})
32
+ def list_topics(self, subject: UUID_LIKE_T) -> globus_sdk.GlobusHTTPResponse:
33
+ return self.get("/v1/list_topics", headers={"Subject": subject})
34
34
 
35
- def acl_add(self, subject: UUID_LIKE_T, topic: str) -> globus_sdk.GlobusHTTPResponse:
36
- return self.post("/acl_add", headers={"Subject": subject, "Topic": topic})
35
+ def register_topic(self, subject: UUID_LIKE_T, topic: str) -> globus_sdk.GlobusHTTPResponse:
36
+ return self.post("/v1/register_topic", headers={"Subject": subject, "Topic": topic})
37
37
 
38
- def acl_remove(self, subject: UUID_LIKE_T, topic: str) -> globus_sdk.GlobusHTTPResponse:
39
- return self.post("/acl_remove", headers={"Subject": subject, "Topic": topic})
38
+ def unregister_topic(self, subject: UUID_LIKE_T, topic: str) -> globus_sdk.GlobusHTTPResponse:
39
+ return self.post("/v1/unregister_topic", headers={"Subject": subject, "Topic": topic})
@@ -1 +1 @@
1
- __version__ = "0.0.3"
1
+ __version__ = "0.0.5"
@@ -0,0 +1,91 @@
1
+ Metadata-Version: 2.1
2
+ Name: diaspora-event-sdk
3
+ Version: 0.0.5
4
+ Summary: SDK of Diaspora Event Fabric: Resilience-enabling services for science from HPC to edge
5
+ Home-page: https://github.com/globus-labs/diaspora-event-sdk
6
+ License: LICENSE
7
+ Description-Content-Type: text/markdown
8
+ License-File: LICENSE
9
+ Requires-Dist: globus-sdk
10
+ Requires-Dist: kafka-python
11
+
12
+ # Diaspora Event Fabric: Resilience-enabling services for science from HPC to edge
13
+
14
+ ### Install
15
+ ```bash
16
+ pip install diaspora-event-sdk
17
+ ```
18
+
19
+ ## Use kafka-python
20
+
21
+ ### Register Topic (create topic ACLs)
22
+
23
+ Before you can create, describe, and delete topics we need to set the appropriate ACLs in ZooKeeper. Here we use the Client to register ACLs for the desired topic name.
24
+
25
+ ```python
26
+ from diaspora_event_sdk import Client as GlobusClient
27
+ c = GlobusClient()
28
+ topic = "topic-" + c.subject_openid[-12:]
29
+ print(c.register_topic(topic))
30
+ print(c.list_topics())
31
+ ```
32
+
33
+ ### Create Topic
34
+
35
+ Now use the KafkaAdmin to create the topic.
36
+
37
+ ```python
38
+ from diaspora_event_sdk import KafkaAdmin, NewTopic
39
+ admin = KafkaAdmin()
40
+ print(admin.create_topics(new_topics=[
41
+ NewTopic(name=topic, num_partitions=1, replication_factor=1)]))
42
+ ```
43
+
44
+ ### Start Producer
45
+
46
+ Once the topic is created we can publish to it. The KafkaProducer wraps the [Python KafkaProducer](https://kafka-python.readthedocs.io/en/master/apidoc/KafkaProducer.html) Event publication can be either synchronous or asynchronous. Below demonstrates the synchronous approach.
47
+
48
+ ```python
49
+ from diaspora_event_sdk import KafkaProducer
50
+ producer = KafkaProducer()
51
+ future = producer.send(
52
+ topic, {'message': 'Synchronous message from Diaspora SDK'})
53
+ result = future.get(timeout=10)
54
+ print(result)
55
+ ```
56
+
57
+ ### Start Consumer
58
+
59
+ A consumer can be configured to monitor the topic and act on events as they are published. The KafkaConsumer wraps the [Python KafkaConsumer](https://kafka-python.readthedocs.io/en/master/apidoc/KafkaConsumer.html). Here we use the `auto_offset_reset` to consume from the first event published to the topic. Removing this field will have the consumer act only on new events.
60
+
61
+ ```python
62
+ from diaspora_event_sdk import KafkaConsumer
63
+ consumer = KafkaConsumer(topic, auto_offset_reset='earliest')
64
+ for msg in consumer:
65
+ print(msg)
66
+ ```
67
+
68
+ ### Delete Topic
69
+ ```python
70
+ from diaspora_event_sdk import KafkaAdmin
71
+ admin = KafkaAdmin()
72
+ res = admin.delete_topics(topics=[topic])
73
+ print(res)
74
+ ```
75
+
76
+ ### Unregister Topic (remove topic ACLs)
77
+ ```python
78
+ from diaspora_event_sdk import Client as GlobusClient
79
+ c = GlobusClient()
80
+ topic = "topic-" + c.subject_openid[-12:]
81
+ print(c.unregister_topic(topic))
82
+ print(c.list_topics())
83
+ ```
84
+
85
+ ## Use other Kafka libraries
86
+ ```python
87
+ from diaspora_event_sdk import Client as GlobusClient
88
+ c = GlobusClient()
89
+ print(c.retrieve_key())
90
+ ```
91
+ For other Kafka clients, select SASL/SCRAM authentication, and use `username` and `password` as authentication credential. Other connection parameters see [here]().
@@ -0,0 +1,23 @@
1
+ diaspora_event_sdk/__init__.py,sha256=qLu5r-LxlQuJRJySWbiDGoaBL1A3STX9v5IrwvN1vtE,477
2
+ diaspora_event_sdk/version.py,sha256=S7u1lbuWmM3A3ajykBialmPoJUK6Jg-WmNqM-9OZFdk,22
3
+ diaspora_event_sdk/sdk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ diaspora_event_sdk/sdk/_environments.py,sha256=X5TekWUUJ_cmgb3jdhK90dDlqj_AmAP5CnXw5ABjh_w,511
5
+ diaspora_event_sdk/sdk/client.py,sha256=s3QrXcPb64oSgOaGGODTH9eV_1IVeZWi1ZDh6-dBIO4,3051
6
+ diaspora_event_sdk/sdk/decorators.py,sha256=Gel8AyhIjbf4-FNintTNcOqvC9hHH_YwbOH257Nfmf0,884
7
+ diaspora_event_sdk/sdk/kafka_client.py,sha256=xy-b7ZfNkgExwa5d65IlUOaAZYAqibyJLQ1v0ruJzV8,1457
8
+ diaspora_event_sdk/sdk/web_client.py,sha256=rHYeEltujO191SksBXAMNrS6C_qasdf0xrFfBEcGhtQ,1349
9
+ diaspora_event_sdk/sdk/login_manager/__init__.py,sha256=yeqVgjeHLMX0WZJu2feJmq-fbeXvSxWghVV81ygfY-w,239
10
+ diaspora_event_sdk/sdk/login_manager/client_login.py,sha256=gvR4PkIqQpIywNieJQ_u11PHUmdLxQ0Ho-QgPSfu8bw,1798
11
+ diaspora_event_sdk/sdk/login_manager/decorators.py,sha256=EFEp71d0oJ7vo2H8W7DJ2gPrDfGzeNXUNxri1C0l8h0,1047
12
+ diaspora_event_sdk/sdk/login_manager/globus_auth.py,sha256=9Hymp0tv91OI5dBMUgh4rGv_5xLVLhFEK7Hu0t8CJFQ,389
13
+ diaspora_event_sdk/sdk/login_manager/login_flow.py,sha256=2TodgsvlEYPoZPQPkp6FHOC9IkSM07pS7MIVGS4MZNE,954
14
+ diaspora_event_sdk/sdk/login_manager/manager.py,sha256=uJHosI7ipqGt5KiL-iv2IGB4bNyZ6jQZQi7g4ka0pts,7052
15
+ diaspora_event_sdk/sdk/login_manager/protocol.py,sha256=RCuo2jy_XkpZvbxnKlDfTKs-L6b9_8_JR-Kq9wHwhoM,710
16
+ diaspora_event_sdk/sdk/login_manager/tokenstore.py,sha256=7jRm01rzsbvniaCfYtDDWE3det_1_b6oQkS-YQ2Qjg4,3037
17
+ diaspora_event_sdk/sdk/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
+ diaspora_event_sdk/sdk/utils/uuid_like.py,sha256=xbxf0YXpDhdii16lwPLWRN21qFekHrNrqODSToMPtCg,470
19
+ diaspora_event_sdk-0.0.5.dist-info/LICENSE,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
20
+ diaspora_event_sdk-0.0.5.dist-info/METADATA,sha256=c1OJRiA1CgaM_Hmn-SFjHF6Xvbho4MBQhCgUC6pyEW4,2927
21
+ diaspora_event_sdk-0.0.5.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
22
+ diaspora_event_sdk-0.0.5.dist-info/top_level.txt,sha256=_5Wx8F5rc9mpB093wvCXa6CArtWNXwek2T1LnmkS2vE,19
23
+ diaspora_event_sdk-0.0.5.dist-info/RECORD,,
@@ -1,78 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: diaspora-event-sdk
3
- Version: 0.0.3
4
- Summary: SDK of Diaspora Event Fabric: Resilience-enabling services for science from HPC to edge
5
- Home-page: https://github.com/globus-labs/diaspora-event-sdk
6
- License: LICENSE
7
- Description-Content-Type: text/markdown
8
- License-File: LICENSE
9
- Requires-Dist: globus-compute-sdk
10
- Requires-Dist: kafka-python
11
-
12
- # Diaspora Event Fabric: Resilience-enabling services for science from HPC to edge
13
-
14
- ### Install
15
- ```bash
16
- pip install diaspora-event-sdk
17
- ```
18
-
19
- ## Use kafka-python
20
-
21
- ### Claim Topic Ownership
22
- ```python
23
- from diaspora_event_sdk import Client as GlobusClient
24
- c = GlobusClient()
25
- topic = f"topic-of-{c.subject_openid}" # or any unclaimed topic
26
- print(c.acl_add(topic))
27
- print(c.acl_list())
28
- ```
29
-
30
- ### Create Topic
31
- ```python
32
- from diaspora_event_sdk import KafkaAdmin, NewTopic
33
- admin = KafkaAdmin()
34
- res = admin.create_topics(new_topics=[NewTopic(name=topic, num_partitions=2, replication_factor=2)])
35
- print(res)
36
- ```
37
-
38
- ### Start Producer
39
- ```python
40
- from diaspora_event_sdk import Producer
41
- future = producer.send(
42
- topic, {'message': 'Synchronous message from Diaspora SDK'})
43
- result = future.get(timeout=10)
44
- print(result)
45
- ```
46
- ### Start Consumer
47
- ```python
48
- from diaspora_event_sdk import Consumer # Kafka producer
49
- consumer = Consumer(topic)
50
- for msg in consumer:
51
- print(msg)
52
- ```
53
- ### Delete Topic
54
- ```python
55
- from diaspora_event_sdk import KafkaAdmin
56
- admin = KafkaAdmin()
57
- res = admin.delete_topics(topics=[topic])
58
- print(res)
59
-
60
- ```
61
-
62
- ### Release Topic Ownership
63
- ```python
64
- from diaspora_event_sdk import Client as GlobusClient
65
- c = GlobusClient()
66
- topic = f"topic-of-{c.subject_openid}" # or any topic you claimed
67
- print(c.acl_remove(topic))
68
- print(c.acl_list())
69
- ```
70
-
71
- ## Use other Kafka libraries
72
- ```python
73
- from diaspora_event_sdk import Client as GlobusClient
74
- c = GlobusClient()
75
- c.retrieve_or_create_key()
76
- ```
77
- For SASL/SCRAM authentication, use `username` and `secret_key` as authentication credential;
78
- For AWS_MSK_IAM authentication, use `access_key` and `secret_key` as authentication credential.
@@ -1,19 +0,0 @@
1
- diaspora_event_sdk/__init__.py,sha256=j92L05k8My8Gue0ez4NNBX952ftp97rLCmkCMU_vzRk,446
2
- diaspora_event_sdk/version.py,sha256=4GZKi13lDTD25YBkGakhZyEQZWTER_OWQMNPoH_UM2c,22
3
- diaspora_event_sdk/sdk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- diaspora_event_sdk/sdk/_environments.py,sha256=4hJIrrdAHOt9qY74jnD5i2UvpBKkKqJ7Ys-bygGrP9k,414
5
- diaspora_event_sdk/sdk/client.py,sha256=ZAs0T3Z7dkdad-4t1e3CRfc3o_5WJNlhD-lXpPK-7KE,2985
6
- diaspora_event_sdk/sdk/decorators.py,sha256=Gel8AyhIjbf4-FNintTNcOqvC9hHH_YwbOH257Nfmf0,884
7
- diaspora_event_sdk/sdk/kafka_client.py,sha256=PLSwvsU1ap-_7CCA8Kra9QTzd7xpFQ6M0O2cGS0tvSE,1457
8
- diaspora_event_sdk/sdk/web_client.py,sha256=3Jud6VjkJ_mQ02TyGzP2mLjMBwrynbgJTqJcvZ4M9D8,1305
9
- diaspora_event_sdk/sdk/login_manager/__init__.py,sha256=ErCNru31j_sSl_10uTXv8jsa16VW1LbPG-k3zxgAxZo,145
10
- diaspora_event_sdk/sdk/login_manager/client_login.py,sha256=gvR4PkIqQpIywNieJQ_u11PHUmdLxQ0Ho-QgPSfu8bw,1798
11
- diaspora_event_sdk/sdk/login_manager/decorators.py,sha256=EFEp71d0oJ7vo2H8W7DJ2gPrDfGzeNXUNxri1C0l8h0,1047
12
- diaspora_event_sdk/sdk/login_manager/globus_auth.py,sha256=9Hymp0tv91OI5dBMUgh4rGv_5xLVLhFEK7Hu0t8CJFQ,389
13
- diaspora_event_sdk/sdk/login_manager/login_flow.py,sha256=2TodgsvlEYPoZPQPkp6FHOC9IkSM07pS7MIVGS4MZNE,954
14
- diaspora_event_sdk/sdk/login_manager/manager.py,sha256=ocrDlu_LITKGWV7kUAiMXdtnfTF9sDelniNc-06yux0,7052
15
- diaspora_event_sdk-0.0.3.dist-info/LICENSE,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
16
- diaspora_event_sdk-0.0.3.dist-info/METADATA,sha256=Jk67v5ylh1_redPjGAi13dFdkpse7hDLqrXJkrtqsT8,2035
17
- diaspora_event_sdk-0.0.3.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
18
- diaspora_event_sdk-0.0.3.dist-info/top_level.txt,sha256=_5Wx8F5rc9mpB093wvCXa6CArtWNXwek2T1LnmkS2vE,19
19
- diaspora_event_sdk-0.0.3.dist-info/RECORD,,