diaspora-event-sdk 0.4.4__py3-none-any.whl → 0.4.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- diaspora_event_sdk/examples/DiasporaDemoV3.ipynb +200 -0
- diaspora_event_sdk/examples/__init__.py +2 -0
- diaspora_event_sdk/examples/reliable_client_creation_examples.ipynb +157 -0
- diaspora_event_sdk/examples/reliable_client_creation_examples.py +274 -0
- diaspora_event_sdk/version.py +1 -1
- {diaspora_event_sdk-0.4.4.dist-info → diaspora_event_sdk-0.4.5.dist-info}/METADATA +1 -1
- {diaspora_event_sdk-0.4.4.dist-info → diaspora_event_sdk-0.4.5.dist-info}/RECORD +10 -6
- {diaspora_event_sdk-0.4.4.dist-info → diaspora_event_sdk-0.4.5.dist-info}/WHEEL +0 -0
- {diaspora_event_sdk-0.4.4.dist-info → diaspora_event_sdk-0.4.5.dist-info}/licenses/LICENSE +0 -0
- {diaspora_event_sdk-0.4.4.dist-info → diaspora_event_sdk-0.4.5.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
{
|
|
2
|
+
"cells": [
|
|
3
|
+
{
|
|
4
|
+
"cell_type": "code",
|
|
5
|
+
"execution_count": null,
|
|
6
|
+
"metadata": {},
|
|
7
|
+
"outputs": [],
|
|
8
|
+
"source": [
|
|
9
|
+
"# Setup\n",
|
|
10
|
+
"%pip install -e '.[kafka-python]'\n",
|
|
11
|
+
"\n",
|
|
12
|
+
"import json\n",
|
|
13
|
+
"import uuid\n",
|
|
14
|
+
"from datetime import datetime\n",
|
|
15
|
+
"from diaspora_event_sdk import Client as GlobusClient\n",
|
|
16
|
+
"from diaspora_event_sdk.sdk.kafka_client import KafkaProducer, KafkaConsumer\n",
|
|
17
|
+
"\n",
|
|
18
|
+
"# os.environ[\"DIASPORA_SDK_ENVIRONMENT\"] = \"local\"\n",
|
|
19
|
+
"c = GlobusClient()"
|
|
20
|
+
]
|
|
21
|
+
},
|
|
22
|
+
{
|
|
23
|
+
"cell_type": "code",
|
|
24
|
+
"execution_count": null,
|
|
25
|
+
"metadata": {},
|
|
26
|
+
"outputs": [],
|
|
27
|
+
"source": [
|
|
28
|
+
"# Display current user subject\n",
|
|
29
|
+
"print(f\"Current user subject: {c.subject_openid}\")"
|
|
30
|
+
]
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
"cell_type": "code",
|
|
34
|
+
"execution_count": null,
|
|
35
|
+
"metadata": {},
|
|
36
|
+
"outputs": [],
|
|
37
|
+
"source": [
|
|
38
|
+
"# 1. Create User\n",
|
|
39
|
+
"user_result = c.create_user()\n",
|
|
40
|
+
"print(json.dumps(user_result, indent=2, default=str))"
|
|
41
|
+
]
|
|
42
|
+
},
|
|
43
|
+
{
|
|
44
|
+
"cell_type": "code",
|
|
45
|
+
"execution_count": null,
|
|
46
|
+
"metadata": {},
|
|
47
|
+
"outputs": [],
|
|
48
|
+
"source": [
|
|
49
|
+
"# Display namespace (automatically computed from subject)\n",
|
|
50
|
+
"print(f\"Using namespace: {c.namespace}\")"
|
|
51
|
+
]
|
|
52
|
+
},
|
|
53
|
+
{
|
|
54
|
+
"cell_type": "code",
|
|
55
|
+
"execution_count": null,
|
|
56
|
+
"metadata": {},
|
|
57
|
+
"outputs": [],
|
|
58
|
+
"source": [
|
|
59
|
+
"# 2. Create Key\n",
|
|
60
|
+
"key_result = c.create_key()\n",
|
|
61
|
+
"print(json.dumps(key_result, indent=2, default=str))"
|
|
62
|
+
]
|
|
63
|
+
},
|
|
64
|
+
{
|
|
65
|
+
"cell_type": "code",
|
|
66
|
+
"execution_count": null,
|
|
67
|
+
"metadata": {},
|
|
68
|
+
"outputs": [],
|
|
69
|
+
"source": [
|
|
70
|
+
"# 3. List namespaces\n",
|
|
71
|
+
"namespaces_result = c.list_namespaces()\n",
|
|
72
|
+
"print(json.dumps(namespaces_result, indent=2, default=str))"
|
|
73
|
+
]
|
|
74
|
+
},
|
|
75
|
+
{
|
|
76
|
+
"cell_type": "code",
|
|
77
|
+
"execution_count": null,
|
|
78
|
+
"metadata": {},
|
|
79
|
+
"outputs": [],
|
|
80
|
+
"source": [
|
|
81
|
+
"# 4. Create Topic\n",
|
|
82
|
+
"topic_name = f\"topic-{str(uuid.uuid4())[:5]}\"\n",
|
|
83
|
+
"create_topic_result = c.create_topic(topic_name)\n",
|
|
84
|
+
"print(json.dumps(create_topic_result, indent=2, default=str))"
|
|
85
|
+
]
|
|
86
|
+
},
|
|
87
|
+
{
|
|
88
|
+
"cell_type": "code",
|
|
89
|
+
"execution_count": null,
|
|
90
|
+
"metadata": {},
|
|
91
|
+
"outputs": [],
|
|
92
|
+
"source": [
|
|
93
|
+
"# Kafka topic name format: \"{namespace}.{topic}\"\n",
|
|
94
|
+
"kafka_topic = f\"{c.namespace}.{topic_name}\"\n",
|
|
95
|
+
"print(f\"Kafka topic name: {kafka_topic}\")"
|
|
96
|
+
]
|
|
97
|
+
},
|
|
98
|
+
{
|
|
99
|
+
"cell_type": "code",
|
|
100
|
+
"execution_count": null,
|
|
101
|
+
"metadata": {},
|
|
102
|
+
"outputs": [],
|
|
103
|
+
"source": [
|
|
104
|
+
"# 5. Produce Messages\n",
|
|
105
|
+
"p = KafkaProducer(kafka_topic)\n",
|
|
106
|
+
"for i in range(3):\n",
|
|
107
|
+
" message = {\n",
|
|
108
|
+
" \"message_id\": i + 1,\n",
|
|
109
|
+
" \"timestamp\": datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\"),\n",
|
|
110
|
+
" \"content\": f\"Message {i + 1} from v3 API\",\n",
|
|
111
|
+
" }\n",
|
|
112
|
+
" future = p.send(kafka_topic, message)\n",
|
|
113
|
+
" result = future.get(timeout=30)\n",
|
|
114
|
+
" print(f\"Produced message {i + 1}: offset={result.offset}\")\n",
|
|
115
|
+
"p.close()"
|
|
116
|
+
]
|
|
117
|
+
},
|
|
118
|
+
{
|
|
119
|
+
"cell_type": "code",
|
|
120
|
+
"execution_count": null,
|
|
121
|
+
"metadata": {},
|
|
122
|
+
"outputs": [],
|
|
123
|
+
"source": [
|
|
124
|
+
"# 6. Consume Messages\n",
|
|
125
|
+
"consumer = KafkaConsumer(kafka_topic, auto_offset_reset=\"earliest\")\n",
|
|
126
|
+
"messages = consumer.poll(timeout_ms=10000)\n",
|
|
127
|
+
"for tp, msgs in messages.items():\n",
|
|
128
|
+
" for message in msgs:\n",
|
|
129
|
+
" data = json.loads(message.value.decode(\"utf-8\"))\n",
|
|
130
|
+
" print(f\"Consumed: {data}\")\n",
|
|
131
|
+
"consumer.close()"
|
|
132
|
+
]
|
|
133
|
+
},
|
|
134
|
+
{
|
|
135
|
+
"cell_type": "code",
|
|
136
|
+
"execution_count": null,
|
|
137
|
+
"metadata": {},
|
|
138
|
+
"outputs": [],
|
|
139
|
+
"source": [
|
|
140
|
+
"# 7. Recreate Topic\n",
|
|
141
|
+
"recreate_result = c.recreate_topic(topic_name)\n",
|
|
142
|
+
"print(json.dumps(recreate_result, indent=2, default=str))"
|
|
143
|
+
]
|
|
144
|
+
},
|
|
145
|
+
{
|
|
146
|
+
"cell_type": "code",
|
|
147
|
+
"execution_count": null,
|
|
148
|
+
"metadata": {},
|
|
149
|
+
"outputs": [],
|
|
150
|
+
"source": [
|
|
151
|
+
"# 8. Delete Topic\n",
|
|
152
|
+
"delete_topic_result = c.delete_topic(topic_name)\n",
|
|
153
|
+
"print(json.dumps(delete_topic_result, indent=2, default=str))"
|
|
154
|
+
]
|
|
155
|
+
},
|
|
156
|
+
{
|
|
157
|
+
"cell_type": "code",
|
|
158
|
+
"execution_count": null,
|
|
159
|
+
"metadata": {},
|
|
160
|
+
"outputs": [],
|
|
161
|
+
"source": [
|
|
162
|
+
"# 9. Delete Key\n",
|
|
163
|
+
"delete_key_result = c.delete_key()\n",
|
|
164
|
+
"print(json.dumps(delete_key_result, indent=2, default=str))"
|
|
165
|
+
]
|
|
166
|
+
},
|
|
167
|
+
{
|
|
168
|
+
"cell_type": "code",
|
|
169
|
+
"execution_count": null,
|
|
170
|
+
"metadata": {},
|
|
171
|
+
"outputs": [],
|
|
172
|
+
"source": [
|
|
173
|
+
"# 10. Delete User\n",
|
|
174
|
+
"delete_user_result = c.delete_user()\n",
|
|
175
|
+
"print(json.dumps(delete_user_result, indent=2, default=str))"
|
|
176
|
+
]
|
|
177
|
+
}
|
|
178
|
+
],
|
|
179
|
+
"metadata": {
|
|
180
|
+
"kernelspec": {
|
|
181
|
+
"display_name": ".venv",
|
|
182
|
+
"language": "python",
|
|
183
|
+
"name": "python3"
|
|
184
|
+
},
|
|
185
|
+
"language_info": {
|
|
186
|
+
"codemirror_mode": {
|
|
187
|
+
"name": "ipython",
|
|
188
|
+
"version": 3
|
|
189
|
+
},
|
|
190
|
+
"file_extension": ".py",
|
|
191
|
+
"mimetype": "text/x-python",
|
|
192
|
+
"name": "python",
|
|
193
|
+
"nbconvert_exporter": "python",
|
|
194
|
+
"pygments_lexer": "ipython3",
|
|
195
|
+
"version": "3.14.2"
|
|
196
|
+
}
|
|
197
|
+
},
|
|
198
|
+
"nbformat": 4,
|
|
199
|
+
"nbformat_minor": 2
|
|
200
|
+
}
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
{
|
|
2
|
+
"cells": [
|
|
3
|
+
{
|
|
4
|
+
"cell_type": "code",
|
|
5
|
+
"execution_count": null,
|
|
6
|
+
"metadata": {},
|
|
7
|
+
"outputs": [],
|
|
8
|
+
"source": [
|
|
9
|
+
"# Install dependencies\n",
|
|
10
|
+
"%pip install -e '.[kafka-python]'\n",
|
|
11
|
+
"\n",
|
|
12
|
+
"# Import the reliable topic creation functions\n",
|
|
13
|
+
"from diaspora_event_sdk.sdk.kafka_client import (\n",
|
|
14
|
+
" reliable_client_creation,\n",
|
|
15
|
+
" KafkaProducer,\n",
|
|
16
|
+
" KafkaConsumer,\n",
|
|
17
|
+
")\n",
|
|
18
|
+
"from diaspora_event_sdk import Client as GlobusClient\n",
|
|
19
|
+
"import json\n",
|
|
20
|
+
"import uuid\n",
|
|
21
|
+
"import time\n",
|
|
22
|
+
"from datetime import datetime\n",
|
|
23
|
+
"\n",
|
|
24
|
+
"# Initialize client\n",
|
|
25
|
+
"# os.environ[\"DIASPORA_SDK_ENVIRONMENT\"] = \"local\"\n",
|
|
26
|
+
"c = GlobusClient()"
|
|
27
|
+
]
|
|
28
|
+
},
|
|
29
|
+
{
|
|
30
|
+
"cell_type": "code",
|
|
31
|
+
"execution_count": null,
|
|
32
|
+
"metadata": {},
|
|
33
|
+
"outputs": [],
|
|
34
|
+
"source": [
|
|
35
|
+
"# Run reliable_client_creation to set up client and test basic flow\n",
|
|
36
|
+
"reliable_client_creation()"
|
|
37
|
+
]
|
|
38
|
+
},
|
|
39
|
+
{
|
|
40
|
+
"cell_type": "code",
|
|
41
|
+
"execution_count": null,
|
|
42
|
+
"metadata": {},
|
|
43
|
+
"outputs": [],
|
|
44
|
+
"source": [
|
|
45
|
+
"# Function to randomly create a topic, produce, and consume messages\n",
|
|
46
|
+
"def random_topic_produce_consume():\n",
|
|
47
|
+
" \"\"\"Create a random topic, produce 3 messages, and consume them.\"\"\"\n",
|
|
48
|
+
" topic_name = f\"topic-{str(uuid.uuid4())[:5]}\"\n",
|
|
49
|
+
" kafka_topic = f\"{c.namespace}.{topic_name}\"\n",
|
|
50
|
+
"\n",
|
|
51
|
+
" print(f\"\\n{'=' * 60}\")\n",
|
|
52
|
+
" print(f\"Creating topic: {topic_name}\")\n",
|
|
53
|
+
" print(f\"Kafka topic: {kafka_topic}\")\n",
|
|
54
|
+
" print(f\"{'=' * 60}\")\n",
|
|
55
|
+
"\n",
|
|
56
|
+
" # Create topic\n",
|
|
57
|
+
" topic_result = c.create_topic(topic_name)\n",
|
|
58
|
+
" print(f\"Topic creation: {json.dumps(topic_result, indent=2, default=str)}\")\n",
|
|
59
|
+
"\n",
|
|
60
|
+
" if topic_result.get(\"status\") != \"success\":\n",
|
|
61
|
+
" print(f\"Failed to create topic: {topic_result}\")\n",
|
|
62
|
+
" return None\n",
|
|
63
|
+
"\n",
|
|
64
|
+
" time.sleep(3) # Wait for topic to be ready\n",
|
|
65
|
+
"\n",
|
|
66
|
+
" # Produce messages\n",
|
|
67
|
+
" print(f\"\\nProducing 3 messages to {kafka_topic}...\")\n",
|
|
68
|
+
" producer = KafkaProducer(kafka_topic)\n",
|
|
69
|
+
" for i in range(3):\n",
|
|
70
|
+
" message = {\n",
|
|
71
|
+
" \"message_id\": i + 1,\n",
|
|
72
|
+
" \"timestamp\": datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\"),\n",
|
|
73
|
+
" \"content\": f\"Message {i + 1} from random_topic_produce_consume\",\n",
|
|
74
|
+
" }\n",
|
|
75
|
+
" future = producer.send(kafka_topic, message)\n",
|
|
76
|
+
" result = future.get(timeout=30)\n",
|
|
77
|
+
" print(f\" Produced message {i + 1}: offset={result.offset}\")\n",
|
|
78
|
+
" producer.close()\n",
|
|
79
|
+
"\n",
|
|
80
|
+
" time.sleep(2) # Wait before consuming\n",
|
|
81
|
+
"\n",
|
|
82
|
+
" # Consume messages\n",
|
|
83
|
+
" print(f\"\\nConsuming messages from {kafka_topic}...\")\n",
|
|
84
|
+
" consumer = KafkaConsumer(kafka_topic, auto_offset_reset=\"earliest\")\n",
|
|
85
|
+
" messages = consumer.poll(timeout_ms=10000)\n",
|
|
86
|
+
" consumed_count = 0\n",
|
|
87
|
+
" for tp, msgs in messages.items():\n",
|
|
88
|
+
" for message in msgs:\n",
|
|
89
|
+
" data = json.loads(message.value.decode(\"utf-8\"))\n",
|
|
90
|
+
" consumed_count += 1\n",
|
|
91
|
+
" print(f\" Consumed message {consumed_count}: {data}\")\n",
|
|
92
|
+
" consumer.close()\n",
|
|
93
|
+
" print(f\"\\nTotal messages consumed: {consumed_count}\")\n",
|
|
94
|
+
"\n",
|
|
95
|
+
" # Delete topic\n",
|
|
96
|
+
" print(f\"\\nDeleting topic {topic_name}...\")\n",
|
|
97
|
+
" delete_result = c.delete_topic(topic_name)\n",
|
|
98
|
+
" print(f\"Topic deletion: {json.dumps(delete_result, indent=2, default=str)}\")\n",
|
|
99
|
+
"\n",
|
|
100
|
+
" return kafka_topic\n",
|
|
101
|
+
"\n",
|
|
102
|
+
"\n",
|
|
103
|
+
"# Call the function three times\n",
|
|
104
|
+
"for i in range(5):\n",
|
|
105
|
+
" print(f\"\\n\\n{'#' * 60}\")\n",
|
|
106
|
+
" print(f\"# Iteration {i + 1} of 5\")\n",
|
|
107
|
+
" print(f\"{'#' * 60}\")\n",
|
|
108
|
+
" result = random_topic_produce_consume()\n",
|
|
109
|
+
" if result:\n",
|
|
110
|
+
" print(f\"✓ Successfully completed iteration {i + 1}: {result}\")\n",
|
|
111
|
+
" else:\n",
|
|
112
|
+
" print(f\"✗ Failed iteration {i + 1}\")\n",
|
|
113
|
+
" if i < 2: # Don't sleep after last iteration\n",
|
|
114
|
+
" time.sleep(2)"
|
|
115
|
+
]
|
|
116
|
+
},
|
|
117
|
+
{
|
|
118
|
+
"cell_type": "code",
|
|
119
|
+
"execution_count": null,
|
|
120
|
+
"metadata": {},
|
|
121
|
+
"outputs": [],
|
|
122
|
+
"source": [
|
|
123
|
+
"# Cleanup: Delete user at the end\n",
|
|
124
|
+
"print(f\"\\n{'=' * 60}\")\n",
|
|
125
|
+
"print(\"Cleaning up: Deleting user...\")\n",
|
|
126
|
+
"print(f\"{'=' * 60}\")\n",
|
|
127
|
+
"delete_user_result = c.delete_user()\n",
|
|
128
|
+
"print(f\"User deletion: {json.dumps(delete_user_result, indent=2, default=str)}\")\n",
|
|
129
|
+
"if delete_user_result.get(\"status\") == \"success\":\n",
|
|
130
|
+
" print(\"✓ User successfully deleted\")\n",
|
|
131
|
+
"else:\n",
|
|
132
|
+
" print(f\"✗ Failed to delete user: {delete_user_result}\")"
|
|
133
|
+
]
|
|
134
|
+
}
|
|
135
|
+
],
|
|
136
|
+
"metadata": {
|
|
137
|
+
"kernelspec": {
|
|
138
|
+
"display_name": ".venv",
|
|
139
|
+
"language": "python",
|
|
140
|
+
"name": "python3"
|
|
141
|
+
},
|
|
142
|
+
"language_info": {
|
|
143
|
+
"codemirror_mode": {
|
|
144
|
+
"name": "ipython",
|
|
145
|
+
"version": 3
|
|
146
|
+
},
|
|
147
|
+
"file_extension": ".py",
|
|
148
|
+
"mimetype": "text/x-python",
|
|
149
|
+
"name": "python",
|
|
150
|
+
"nbconvert_exporter": "python",
|
|
151
|
+
"pygments_lexer": "ipython3",
|
|
152
|
+
"version": "3.14.2"
|
|
153
|
+
}
|
|
154
|
+
},
|
|
155
|
+
"nbformat": 4,
|
|
156
|
+
"nbformat_minor": 2
|
|
157
|
+
}
|
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Reliable Client Creation Examples
|
|
3
|
+
|
|
4
|
+
This script demonstrates reliable client creation and basic Kafka operations
|
|
5
|
+
including topic creation, message production, and consumption.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
# Install dependencies
|
|
9
|
+
# Run: pip install -e '.[kafka-python]'
|
|
10
|
+
|
|
11
|
+
# Import the reliable topic creation functions
|
|
12
|
+
from diaspora_event_sdk.sdk.kafka_client import (
|
|
13
|
+
reliable_client_creation,
|
|
14
|
+
KafkaProducer,
|
|
15
|
+
KafkaConsumer,
|
|
16
|
+
)
|
|
17
|
+
from diaspora_event_sdk import Client as GlobusClient
|
|
18
|
+
from kafka.errors import KafkaTimeoutError
|
|
19
|
+
import uuid
|
|
20
|
+
import time
|
|
21
|
+
import argparse
|
|
22
|
+
import traceback
|
|
23
|
+
import os
|
|
24
|
+
from datetime import datetime
|
|
25
|
+
|
|
26
|
+
# Configure logging to show INFO level messages from kafka_client
|
|
27
|
+
# logging.basicConfig(
|
|
28
|
+
# level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
|
29
|
+
# )
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def reliable_produce(namespace, topic, num_messages=3, max_retries=3):
|
|
33
|
+
"""Produce messages to a topic with retry logic for timeout errors.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
namespace: The namespace for the topic
|
|
37
|
+
topic: The topic name (without namespace prefix)
|
|
38
|
+
num_messages: Number of messages to produce (default: 3)
|
|
39
|
+
max_retries: Maximum number of retries on timeout (default: 3)
|
|
40
|
+
|
|
41
|
+
Returns:
|
|
42
|
+
bool: True if successful, False otherwise
|
|
43
|
+
"""
|
|
44
|
+
kafka_topic = f"{namespace}.{topic}"
|
|
45
|
+
|
|
46
|
+
for attempt in range(max_retries):
|
|
47
|
+
producer = None
|
|
48
|
+
try:
|
|
49
|
+
if attempt > 0:
|
|
50
|
+
print(f" Retry {attempt + 1}/{max_retries}")
|
|
51
|
+
producer = KafkaProducer(kafka_topic)
|
|
52
|
+
for i in range(num_messages):
|
|
53
|
+
message = {
|
|
54
|
+
"message_id": i + 1,
|
|
55
|
+
"timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
|
56
|
+
"content": f"Message {i + 1} from reliable_produce",
|
|
57
|
+
}
|
|
58
|
+
future = producer.send(kafka_topic, message)
|
|
59
|
+
print(f" Sending message {i + 1}/{num_messages}...")
|
|
60
|
+
future.get(timeout=30)
|
|
61
|
+
return True
|
|
62
|
+
except KafkaTimeoutError:
|
|
63
|
+
if attempt < max_retries - 1:
|
|
64
|
+
time.sleep(2) # Wait before retry
|
|
65
|
+
else:
|
|
66
|
+
print(f"✗ Produce failed after {max_retries} attempts")
|
|
67
|
+
return False
|
|
68
|
+
except Exception as e:
|
|
69
|
+
print(f"✗ Produce error: {e}")
|
|
70
|
+
return False
|
|
71
|
+
finally:
|
|
72
|
+
if producer is not None:
|
|
73
|
+
try:
|
|
74
|
+
producer.close(timeout=1)
|
|
75
|
+
except Exception:
|
|
76
|
+
# Ignore errors during cleanup to avoid "Exception ignored" warnings
|
|
77
|
+
pass
|
|
78
|
+
|
|
79
|
+
return False
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def reliable_consume(namespace, topic, max_retries=3):
|
|
83
|
+
"""Consume messages from a topic with retry logic for timeout errors.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
namespace: The namespace for the topic
|
|
87
|
+
topic: The topic name (without namespace prefix)
|
|
88
|
+
max_retries: Maximum number of retries on timeout (default: 3)
|
|
89
|
+
|
|
90
|
+
Returns:
|
|
91
|
+
tuple: (success: bool, consumed_count: int)
|
|
92
|
+
"""
|
|
93
|
+
kafka_topic = f"{namespace}.{topic}"
|
|
94
|
+
|
|
95
|
+
for attempt in range(max_retries):
|
|
96
|
+
consumer = None
|
|
97
|
+
try:
|
|
98
|
+
if attempt > 0:
|
|
99
|
+
print(f" Retry {attempt + 1}/{max_retries}")
|
|
100
|
+
print(" Polling for messages...")
|
|
101
|
+
consumer = KafkaConsumer(kafka_topic, auto_offset_reset="earliest")
|
|
102
|
+
messages = consumer.poll(timeout_ms=10000)
|
|
103
|
+
consumed_count = 0
|
|
104
|
+
for tp, msgs in messages.items():
|
|
105
|
+
for message in msgs:
|
|
106
|
+
consumed_count += 1
|
|
107
|
+
return True, consumed_count
|
|
108
|
+
except KafkaTimeoutError:
|
|
109
|
+
if attempt < max_retries - 1:
|
|
110
|
+
time.sleep(2) # Wait before retry
|
|
111
|
+
else:
|
|
112
|
+
print(f"✗ Consume failed after {max_retries} attempts")
|
|
113
|
+
return False, 0
|
|
114
|
+
except Exception as e:
|
|
115
|
+
print(f"✗ Consume error: {e}")
|
|
116
|
+
return False, 0
|
|
117
|
+
finally:
|
|
118
|
+
if consumer is not None:
|
|
119
|
+
try:
|
|
120
|
+
consumer.close()
|
|
121
|
+
except Exception:
|
|
122
|
+
# Ignore errors during cleanup to avoid "Exception ignored" warnings
|
|
123
|
+
pass
|
|
124
|
+
|
|
125
|
+
return False, 0
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def run_produce_consume_cycle(produce_consumer_per_iteration=1):
|
|
129
|
+
"""Run the reliable client creation examples with produce/consume cycles.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
produce_consumer_per_iteration: Number of times to run the topic produce/consume cycle
|
|
133
|
+
|
|
134
|
+
Returns:
|
|
135
|
+
tuple: (success: bool, log_file: str or None)
|
|
136
|
+
"""
|
|
137
|
+
try:
|
|
138
|
+
# Initialize client
|
|
139
|
+
# os.environ["DIASPORA_SDK_ENVIRONMENT"] = "local"
|
|
140
|
+
c = GlobusClient()
|
|
141
|
+
|
|
142
|
+
# Run reliable_client_creation to set up client and test basic flow
|
|
143
|
+
print(" → reliable_client_creation()")
|
|
144
|
+
reliable_client_creation()
|
|
145
|
+
|
|
146
|
+
# Run produce/consume cycles
|
|
147
|
+
for i in range(produce_consumer_per_iteration):
|
|
148
|
+
print(f"Iteration {i + 1}/{produce_consumer_per_iteration}")
|
|
149
|
+
|
|
150
|
+
# Create topic
|
|
151
|
+
topic_name = f"topic-{str(uuid.uuid4())[:5]}"
|
|
152
|
+
print(" → create_topic()")
|
|
153
|
+
topic_result = c.create_topic(topic_name)
|
|
154
|
+
|
|
155
|
+
if topic_result.get("status") != "success":
|
|
156
|
+
print("✗ Failed to create topic")
|
|
157
|
+
continue
|
|
158
|
+
|
|
159
|
+
time.sleep(3) # Wait for topic to be ready
|
|
160
|
+
|
|
161
|
+
# Produce messages with retry logic
|
|
162
|
+
print(" → reliable_produce()")
|
|
163
|
+
produce_success = reliable_produce(c.namespace, topic_name)
|
|
164
|
+
|
|
165
|
+
time.sleep(2) # Wait before consuming
|
|
166
|
+
|
|
167
|
+
# Consume messages with retry logic
|
|
168
|
+
print(" → reliable_consume()")
|
|
169
|
+
consume_success, consumed_count = reliable_consume(c.namespace, topic_name)
|
|
170
|
+
|
|
171
|
+
# Delete topic
|
|
172
|
+
print(" → delete_topic()")
|
|
173
|
+
c.delete_topic(topic_name)
|
|
174
|
+
|
|
175
|
+
if produce_success and consume_success:
|
|
176
|
+
print(f"✓ Iteration {i + 1} completed")
|
|
177
|
+
else:
|
|
178
|
+
print(f"✗ Iteration {i + 1} failed")
|
|
179
|
+
|
|
180
|
+
if (
|
|
181
|
+
i < produce_consumer_per_iteration - 1
|
|
182
|
+
): # Don't sleep after last iteration
|
|
183
|
+
time.sleep(2)
|
|
184
|
+
|
|
185
|
+
# Cleanup: Delete user at the end
|
|
186
|
+
print(" → delete_user()")
|
|
187
|
+
delete_user_result = c.delete_user()
|
|
188
|
+
if delete_user_result.get("status") != "success":
|
|
189
|
+
print("✗ Failed to delete user")
|
|
190
|
+
|
|
191
|
+
return True, None
|
|
192
|
+
except Exception as e:
|
|
193
|
+
# Get the script directory for log file
|
|
194
|
+
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
195
|
+
log_filename = os.path.join(
|
|
196
|
+
script_dir, f"error_log_{datetime.now().strftime('%Y%m%d_%H%M%S')}.log"
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
# Get full traceback
|
|
200
|
+
full_traceback = traceback.format_exc()
|
|
201
|
+
|
|
202
|
+
# Write to log file
|
|
203
|
+
with open(log_filename, "w") as log_file:
|
|
204
|
+
log_file.write(
|
|
205
|
+
f"Exception occurred at {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n"
|
|
206
|
+
)
|
|
207
|
+
log_file.write(f"{'=' * 80}\n")
|
|
208
|
+
log_file.write(f"Exception type: {type(e).__name__}\n")
|
|
209
|
+
log_file.write(f"Exception message: {str(e)}\n")
|
|
210
|
+
log_file.write(f"{'=' * 80}\n")
|
|
211
|
+
log_file.write("Full traceback:\n")
|
|
212
|
+
log_file.write(full_traceback)
|
|
213
|
+
|
|
214
|
+
print(f"✗ Exception: {type(e).__name__}: {str(e)}")
|
|
215
|
+
print(f"Log: {log_filename}")
|
|
216
|
+
|
|
217
|
+
return False, log_filename
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def main():
|
|
221
|
+
"""Main entry point for the script."""
|
|
222
|
+
parser = argparse.ArgumentParser(
|
|
223
|
+
description="Reliable client creation examples with configurable iterations"
|
|
224
|
+
)
|
|
225
|
+
parser.add_argument(
|
|
226
|
+
"--produce-consumer-per-iteration",
|
|
227
|
+
type=int,
|
|
228
|
+
default=1,
|
|
229
|
+
help="Number of topic produce/consume cycles to run per main() call (default: 1)",
|
|
230
|
+
)
|
|
231
|
+
parser.add_argument(
|
|
232
|
+
"--iterations",
|
|
233
|
+
type=int,
|
|
234
|
+
default=3,
|
|
235
|
+
help="Number of times to call main() (default: 3)",
|
|
236
|
+
)
|
|
237
|
+
args = parser.parse_args()
|
|
238
|
+
|
|
239
|
+
# Track statistics
|
|
240
|
+
successful_runs = 0
|
|
241
|
+
failed_runs = 0
|
|
242
|
+
log_files = []
|
|
243
|
+
|
|
244
|
+
try:
|
|
245
|
+
for i in range(args.iterations):
|
|
246
|
+
print(f"\nRun {i + 1}/{args.iterations}")
|
|
247
|
+
success, log_file = run_produce_consume_cycle(
|
|
248
|
+
produce_consumer_per_iteration=args.produce_consumer_per_iteration
|
|
249
|
+
)
|
|
250
|
+
if success:
|
|
251
|
+
successful_runs += 1
|
|
252
|
+
else:
|
|
253
|
+
failed_runs += 1
|
|
254
|
+
if log_file:
|
|
255
|
+
log_files.append(log_file)
|
|
256
|
+
if i < args.iterations - 1: # Don't sleep after last iteration
|
|
257
|
+
time.sleep(1)
|
|
258
|
+
except KeyboardInterrupt:
|
|
259
|
+
print(f"\n\nInterrupted by user after {successful_runs + failed_runs} runs")
|
|
260
|
+
|
|
261
|
+
# Print final statistics
|
|
262
|
+
print("\nFINAL STATISTICS")
|
|
263
|
+
print(f"Successfully completed runs: {successful_runs}")
|
|
264
|
+
print(f"Failed runs: {failed_runs}")
|
|
265
|
+
print(f"Total runs: {successful_runs + failed_runs}")
|
|
266
|
+
|
|
267
|
+
if log_files:
|
|
268
|
+
print(f"\nError log files ({len(log_files)}):")
|
|
269
|
+
for log_file in log_files:
|
|
270
|
+
print(f" {log_file}")
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
if __name__ == "__main__":
|
|
274
|
+
main()
|
diaspora_event_sdk/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.4.
|
|
1
|
+
__version__ = "0.4.5"
|
|
@@ -1,5 +1,9 @@
|
|
|
1
1
|
diaspora_event_sdk/__init__.py,sha256=cG6QpFi8zovqhNJpAxGapvpsAkqDSgsZp6UvdJEf9-o,488
|
|
2
|
-
diaspora_event_sdk/version.py,sha256=
|
|
2
|
+
diaspora_event_sdk/version.py,sha256=ErkLkI2TDBX1OIqi2GGa20CPeu4ZculEi-XffRbLU6M,22
|
|
3
|
+
diaspora_event_sdk/examples/DiasporaDemoV3.ipynb,sha256=8neyMEHmmnsQiZ9bg4D8rppjO5ySL10MGN6RrhV--bM,4933
|
|
4
|
+
diaspora_event_sdk/examples/__init__.py,sha256=YvOs49nXFDTfksweZMUFso-o7vzBuYlOP7aH1OFKn1o,119
|
|
5
|
+
diaspora_event_sdk/examples/reliable_client_creation_examples.ipynb,sha256=8zxo2YKzDM18wuzGvxTinD7Rj8xfOaKTVmMaucSagxU,5288
|
|
6
|
+
diaspora_event_sdk/examples/reliable_client_creation_examples.py,sha256=TId8n6OMdpDq90Dtp9NvP_t_3X4loczcE-uaxd1AT8M,9245
|
|
3
7
|
diaspora_event_sdk/sdk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
8
|
diaspora_event_sdk/sdk/_environments.py,sha256=zuvyoMtGsUA3J0GI92NBn-z_1J4GbiE7WaUgKe8-dpI,501
|
|
5
9
|
diaspora_event_sdk/sdk/aws_iam_msk.py,sha256=9fPH7lgdfZhxKohdZE7shGPYIA4-X-XRbA5KdY-Agjo,3933
|
|
@@ -24,10 +28,10 @@ diaspora_event_sdk/sdk/login_manager/protocol.py,sha256=ipAOUi7GYF5YfU-az1LWUbU_
|
|
|
24
28
|
diaspora_event_sdk/sdk/login_manager/tokenstore.py,sha256=ImncC8EIxoAuGtDiZIwdtUgOD2fWo8oBP22G-fiZ5L4,2036
|
|
25
29
|
diaspora_event_sdk/sdk/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
26
30
|
diaspora_event_sdk/sdk/utils/uuid_like.py,sha256=xbxf0YXpDhdii16lwPLWRN21qFekHrNrqODSToMPtCg,470
|
|
27
|
-
diaspora_event_sdk-0.4.
|
|
31
|
+
diaspora_event_sdk-0.4.5.dist-info/licenses/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
|
|
28
32
|
tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
29
33
|
tests/unit/apis_test.py,sha256=x9Pd8Ss2COtXk7nKez4wHw0-Xp7GDeJT9bIWXJ1K5GQ,9140
|
|
30
|
-
diaspora_event_sdk-0.4.
|
|
31
|
-
diaspora_event_sdk-0.4.
|
|
32
|
-
diaspora_event_sdk-0.4.
|
|
33
|
-
diaspora_event_sdk-0.4.
|
|
34
|
+
diaspora_event_sdk-0.4.5.dist-info/METADATA,sha256=ClPsc9oyLw_ZcxsBwICSY43-lmRC_PWu0VZb_WJXVCI,3828
|
|
35
|
+
diaspora_event_sdk-0.4.5.dist-info/WHEEL,sha256=SmOxYU7pzNKBqASvQJ7DjX3XGUF92lrGhMb3R6_iiqI,91
|
|
36
|
+
diaspora_event_sdk-0.4.5.dist-info/top_level.txt,sha256=OVun-67t3fkLFEIwvJuNINgFFvAc--bClYhXjLhMmvs,25
|
|
37
|
+
diaspora_event_sdk-0.4.5.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|