onceonly-sdk 2.0.0__py3-none-any.whl → 2.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
onceonly/ai.py CHANGED
@@ -22,13 +22,17 @@ class AiClient:
22
22
  """
23
23
  AI helpers for long-running backend tasks.
24
24
 
25
- Typical usage for agents:
26
- result = client.ai.run_and_wait(key="job:123", metadata={...})
27
-
28
- Endpoints:
29
- - POST /ai/run => start/attach to a run (idempotent by key)
30
- - GET /ai/status => poll status
31
- - GET /ai/result => fetch final result (completed/failed)
25
+ High-level:
26
+ - POST /ai/run => start/attach to a run (idempotent by key)
27
+ - GET /ai/status => poll status
28
+ - GET /ai/result => fetch final result (completed/failed)
29
+
30
+ Low-level lease API (for local side effects / agent tools):
31
+ - POST /ai/lease
32
+ - POST /ai/extend
33
+ - POST /ai/complete
34
+ - POST /ai/fail
35
+ - POST /ai/cancel
32
36
  """
33
37
 
34
38
  def __init__(
@@ -47,7 +51,9 @@ class AiClient:
47
51
  self._retry_backoff = float(retry_backoff)
48
52
  self._retry_max_backoff = float(retry_max_backoff)
49
53
 
50
- # ---- sync ----
54
+ # ------------------------------------------------------------------
55
+ # High-level: /ai/run + /ai/status + /ai/result
56
+ # ------------------------------------------------------------------
51
57
 
52
58
  def run(self, key: str, ttl: Optional[int] = None, metadata: Optional[MetadataLike] = None) -> AiRun:
53
59
  payload: Dict[str, Any] = {"key": key}
@@ -92,7 +98,21 @@ class AiClient:
92
98
  logger.debug("ai.result key=%s status=%s", key, data.get("status"))
93
99
  return AiResult.from_dict(data)
94
100
 
95
- def wait(self, key: str, timeout: float = 60.0, poll_min: float = 0.5, poll_max: float = 5.0) -> AiResult:
101
+ def wait(
102
+ self,
103
+ key: str,
104
+ *,
105
+ timeout: float = 60.0,
106
+ poll_min: float = 0.5,
107
+ poll_max: float = 5.0,
108
+ ) -> AiResult:
109
+ """
110
+ Polls /ai/status until completed/failed or timeout.
111
+
112
+ NOTE: We do NOT return status="timeout" because backend model statuses
113
+ are usually limited to: not_found|in_progress|completed|failed.
114
+ Instead we return status="failed" with error_code="timeout".
115
+ """
96
116
  t0 = time.time()
97
117
  while True:
98
118
  st = self.status(key)
@@ -100,7 +120,7 @@ class AiClient:
100
120
  return self.result(key)
101
121
 
102
122
  if time.time() - t0 >= timeout:
103
- return AiResult(ok=False, status="timeout", key=key, error_code="timeout")
123
+ return AiResult(ok=False, status="failed", key=key, error_code="timeout")
104
124
 
105
125
  sleep_s = st.retry_after_sec if isinstance(st.retry_after_sec, int) else poll_min
106
126
  sleep_s = max(poll_min, min(poll_max, float(sleep_s)))
@@ -119,7 +139,7 @@ class AiClient:
119
139
  self.run(key=key, ttl=ttl, metadata=metadata)
120
140
  return self.wait(key=key, timeout=timeout, poll_min=poll_min, poll_max=poll_max)
121
141
 
122
- # ---- async ----
142
+ # -------------------- async high-level --------------------
123
143
 
124
144
  async def run_async(self, key: str, ttl: Optional[int] = None, metadata: Optional[MetadataLike] = None) -> AiRun:
125
145
  payload: Dict[str, Any] = {"key": key}
@@ -167,7 +187,14 @@ class AiClient:
167
187
  logger.debug("ai.result_async key=%s status=%s", key, data.get("status"))
168
188
  return AiResult.from_dict(data)
169
189
 
170
- async def wait_async(self, key: str, timeout: float = 60.0, poll_min: float = 0.5, poll_max: float = 5.0) -> AiResult:
190
+ async def wait_async(
191
+ self,
192
+ key: str,
193
+ *,
194
+ timeout: float = 60.0,
195
+ poll_min: float = 0.5,
196
+ poll_max: float = 5.0,
197
+ ) -> AiResult:
171
198
  t0 = time.time()
172
199
  while True:
173
200
  st = await self.status_async(key)
@@ -175,7 +202,7 @@ class AiClient:
175
202
  return await self.result_async(key)
176
203
 
177
204
  if time.time() - t0 >= timeout:
178
- return AiResult(ok=False, status="timeout", key=key, error_code="timeout")
205
+ return AiResult(ok=False, status="failed", key=key, error_code="timeout")
179
206
 
180
207
  sleep_s = st.retry_after_sec if isinstance(st.retry_after_sec, int) else poll_min
181
208
  sleep_s = max(poll_min, min(poll_max, float(sleep_s)))
@@ -193,3 +220,184 @@ class AiClient:
193
220
  ) -> AiResult:
194
221
  await self.run_async(key=key, ttl=ttl, metadata=metadata)
195
222
  return await self.wait_async(key=key, timeout=timeout, poll_min=poll_min, poll_max=poll_max)
223
+
224
+ # ------------------------------------------------------------------
225
+ # Low-level lease API (sync) - returns raw dicts (backend models)
226
+ # ------------------------------------------------------------------
227
+
228
+ def lease(self, key: str, ttl: Optional[int] = None, metadata: Optional[MetadataLike] = None) -> Dict[str, Any]:
229
+ payload: Dict[str, Any] = {"key": key}
230
+ if ttl is not None:
231
+ payload["ttl"] = int(ttl)
232
+ md = to_metadata_dict(metadata)
233
+ if md is not None:
234
+ payload["metadata"] = md
235
+
236
+ resp = request_with_retries_sync(
237
+ lambda: self._c.post("/ai/lease", json=payload),
238
+ max_retries=self._max_retries_429,
239
+ base_backoff=self._retry_backoff,
240
+ max_backoff=self._retry_max_backoff,
241
+ )
242
+ return parse_json_or_raise(resp)
243
+
244
+ def extend(self, key: str, lease_id: str, ttl: Optional[int] = None) -> Dict[str, Any]:
245
+ payload: Dict[str, Any] = {"key": key, "lease_id": lease_id}
246
+ if ttl is not None:
247
+ payload["ttl"] = int(ttl)
248
+
249
+ resp = request_with_retries_sync(
250
+ lambda: self._c.post("/ai/extend", json=payload),
251
+ max_retries=self._max_retries_429,
252
+ base_backoff=self._retry_backoff,
253
+ max_backoff=self._retry_max_backoff,
254
+ )
255
+ return parse_json_or_raise(resp)
256
+
257
+ def complete(
258
+ self,
259
+ key: str,
260
+ lease_id: str,
261
+ *,
262
+ result: Optional[Dict[str, Any]] = None,
263
+ result_hash: Optional[str] = None,
264
+ ) -> Dict[str, Any]:
265
+ payload: Dict[str, Any] = {"key": key, "lease_id": lease_id}
266
+ if result_hash is not None:
267
+ payload["result_hash"] = str(result_hash)
268
+ if result is not None:
269
+ payload["result"] = result
270
+
271
+ resp = request_with_retries_sync(
272
+ lambda: self._c.post("/ai/complete", json=payload),
273
+ max_retries=self._max_retries_429,
274
+ base_backoff=self._retry_backoff,
275
+ max_backoff=self._retry_max_backoff,
276
+ )
277
+ return parse_json_or_raise(resp)
278
+
279
+ def fail(
280
+ self,
281
+ key: str,
282
+ lease_id: str,
283
+ *,
284
+ error_code: str,
285
+ error_hash: Optional[str] = None,
286
+ ) -> Dict[str, Any]:
287
+ payload: Dict[str, Any] = {"key": key, "lease_id": lease_id, "error_code": str(error_code)}
288
+ if error_hash is not None:
289
+ payload["error_hash"] = str(error_hash)
290
+
291
+ resp = request_with_retries_sync(
292
+ lambda: self._c.post("/ai/fail", json=payload),
293
+ max_retries=self._max_retries_429,
294
+ base_backoff=self._retry_backoff,
295
+ max_backoff=self._retry_max_backoff,
296
+ )
297
+ return parse_json_or_raise(resp)
298
+
299
+ def cancel(self, key: str, lease_id: str, reason: Optional[str] = None) -> Dict[str, Any]:
300
+ payload: Dict[str, Any] = {"key": key, "lease_id": lease_id}
301
+ if reason:
302
+ payload["reason"] = str(reason)
303
+
304
+ resp = request_with_retries_sync(
305
+ lambda: self._c.post("/ai/cancel", json=payload),
306
+ max_retries=self._max_retries_429,
307
+ base_backoff=self._retry_backoff,
308
+ max_backoff=self._retry_max_backoff,
309
+ )
310
+ return parse_json_or_raise(resp)
311
+
312
+ # ------------------------------------------------------------------
313
+ # Low-level lease API (async) - returns raw dicts (backend models)
314
+ # ------------------------------------------------------------------
315
+
316
+ async def lease_async(self, key: str, ttl: Optional[int] = None, metadata: Optional[MetadataLike] = None) -> Dict[str, Any]:
317
+ payload: Dict[str, Any] = {"key": key}
318
+ if ttl is not None:
319
+ payload["ttl"] = int(ttl)
320
+ md = to_metadata_dict(metadata)
321
+ if md is not None:
322
+ payload["metadata"] = md
323
+
324
+ c = await self._get_ac()
325
+ resp = await request_with_retries_async(
326
+ lambda: c.post("/ai/lease", json=payload),
327
+ max_retries=self._max_retries_429,
328
+ base_backoff=self._retry_backoff,
329
+ max_backoff=self._retry_max_backoff,
330
+ )
331
+ return parse_json_or_raise(resp)
332
+
333
+ async def extend_async(self, key: str, lease_id: str, ttl: Optional[int] = None) -> Dict[str, Any]:
334
+ payload: Dict[str, Any] = {"key": key, "lease_id": lease_id}
335
+ if ttl is not None:
336
+ payload["ttl"] = int(ttl)
337
+
338
+ c = await self._get_ac()
339
+ resp = await request_with_retries_async(
340
+ lambda: c.post("/ai/extend", json=payload),
341
+ max_retries=self._max_retries_429,
342
+ base_backoff=self._retry_backoff,
343
+ max_backoff=self._retry_max_backoff,
344
+ )
345
+ return parse_json_or_raise(resp)
346
+
347
+ async def complete_async(
348
+ self,
349
+ key: str,
350
+ lease_id: str,
351
+ *,
352
+ result: Optional[Dict[str, Any]] = None,
353
+ result_hash: Optional[str] = None,
354
+ ) -> Dict[str, Any]:
355
+ payload: Dict[str, Any] = {"key": key, "lease_id": lease_id}
356
+ if result_hash is not None:
357
+ payload["result_hash"] = str(result_hash)
358
+ if result is not None:
359
+ payload["result"] = result
360
+
361
+ c = await self._get_ac()
362
+ resp = await request_with_retries_async(
363
+ lambda: c.post("/ai/complete", json=payload),
364
+ max_retries=self._max_retries_429,
365
+ base_backoff=self._retry_backoff,
366
+ max_backoff=self._retry_max_backoff,
367
+ )
368
+ return parse_json_or_raise(resp)
369
+
370
+ async def fail_async(
371
+ self,
372
+ key: str,
373
+ lease_id: str,
374
+ *,
375
+ error_code: str,
376
+ error_hash: Optional[str] = None,
377
+ ) -> Dict[str, Any]:
378
+ payload: Dict[str, Any] = {"key": key, "lease_id": lease_id, "error_code": str(error_code)}
379
+ if error_hash is not None:
380
+ payload["error_hash"] = str(error_hash)
381
+
382
+ c = await self._get_ac()
383
+ resp = await request_with_retries_async(
384
+ lambda: c.post("/ai/fail", json=payload),
385
+ max_retries=self._max_retries_429,
386
+ base_backoff=self._retry_backoff,
387
+ max_backoff=self._retry_max_backoff,
388
+ )
389
+ return parse_json_or_raise(resp)
390
+
391
+ async def cancel_async(self, key: str, lease_id: str, reason: Optional[str] = None) -> Dict[str, Any]:
392
+ payload: Dict[str, Any] = {"key": key, "lease_id": lease_id}
393
+ if reason:
394
+ payload["reason"] = str(reason)
395
+
396
+ c = await self._get_ac()
397
+ resp = await request_with_retries_async(
398
+ lambda: c.post("/ai/cancel", json=payload),
399
+ max_retries=self._max_retries_429,
400
+ base_backoff=self._retry_backoff,
401
+ max_backoff=self._retry_max_backoff,
402
+ )
403
+ return parse_json_or_raise(resp)
onceonly/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "2.0.0"
1
+ __version__ = "2.0.1"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: onceonly-sdk
3
- Version: 2.0.0
3
+ Version: 2.0.1
4
4
  Summary: Python SDK for OnceOnly idempotency API
5
5
  Author-email: OnceOnly <support@onceonly.tech>
6
6
  License: MIT
@@ -41,7 +41,8 @@ Documentation: https://onceonly.tech/docs/
41
41
  - Sync + Async client (httpx-based)
42
42
  - Fail-open mode for production safety
43
43
  - Stable idempotency keys (supports Pydantic & dataclasses)
44
- - Decorator for zero-boilerplate usage
44
+ - Decorators for zero-boilerplate usage
45
+ - Native AI API (long-running jobs, local side-effects)
45
46
  - Optional AI / LangChain integrations
46
47
 
47
48
  ---
@@ -60,7 +61,7 @@ pip install "onceonly-sdk[langchain]"
60
61
 
61
62
  ---
62
63
 
63
- ## Quick Start
64
+ ## Quick Start (Webhooks / Automations)
64
65
 
65
66
  ```python
66
67
  from onceonly import OnceOnly
@@ -78,11 +79,54 @@ else:
78
79
  print("First execution")
79
80
  ```
80
81
 
82
+ Use `check_lock()` for:
83
+ - Webhooks
84
+ - Make / Zapier scenarios
85
+ - Cron jobs
86
+ - Distributed workers
87
+
88
+ ---
89
+
90
+ ## AI Jobs (Server-side)
91
+
92
+ Use the AI API for long-running or asynchronous jobs.
93
+
94
+ ```python
95
+ result = client.ai.run_and_wait(
96
+ key="ai:job:daily_summary:2026-01-09",
97
+ metadata={"task": "daily_summary", "model": "gpt-4.1"},
98
+ timeout=60,
99
+ )
100
+
101
+ print(result.status)
102
+ print(result.result)
103
+ ```
104
+
105
+ - Charged **once per key**
106
+ - Polling is free
107
+ - Safe across retries and restarts
108
+
81
109
  ---
82
110
 
83
- ## AI Agents / LangChain Integration 🤖
111
+ ## AI Agents / Local Side-Effects
84
112
 
85
- OnceOnly integrates cleanly with AI-agent frameworks like LangChain.
113
+ Use the AI Lease API when your code performs the side-effect locally
114
+ (payments, emails, webhooks) but still needs exactly-once guarantees.
115
+
116
+ ```python
117
+ lease = client.ai.lease(key="ai:agent:charge:user_42:invoice_100", ttl=300)
118
+
119
+ if lease["status"] == "acquired":
120
+ try:
121
+ do_side_effect()
122
+ client.ai.complete(key=KEY, lease_id=lease["lease_id"], result={"ok": True})
123
+ except Exception:
124
+ client.ai.fail(key=KEY, lease_id=lease["lease_id"], error_code="failed")
125
+ ```
126
+
127
+ ---
128
+
129
+ ## LangChain Integration 🤖
86
130
 
87
131
  ```python
88
132
  from onceonly.integrations.langchain import make_idempotent_tool
@@ -94,11 +138,14 @@ tool = make_idempotent_tool(
94
138
  )
95
139
  ```
96
140
 
97
- Repeated tool calls with the same inputs will execute **exactly once**, even across retries or agent restarts.
141
+ Repeated tool calls with the same inputs will execute **exactly once**,
142
+ even across retries or agent restarts.
143
+
144
+ See `examples/ai/` for canonical patterns.
98
145
 
99
146
  ---
100
147
 
101
- ## Decorator
148
+ ## Decorators
102
149
 
103
150
  ```python
104
151
  from onceonly.decorators import idempotent
@@ -108,7 +155,7 @@ def process_order(order_id):
108
155
  ...
109
156
  ```
110
157
 
111
- Idempotency keys are generated automatically and are stable across restarts.
158
+ Idempotency keys are generated automatically and remain stable across restarts.
112
159
 
113
160
  ---
114
161
 
@@ -1,17 +1,17 @@
1
1
  onceonly/__init__.py,sha256=KMS6F4DejM5nI5-gw3UC8SvETnK90oUE9V5pskh--Uw,481
2
2
  onceonly/_http.py,sha256=bFAgrLv0T7cGFq3LqaQCwEiqx-VfKEiT8jUommmhRws,3240
3
3
  onceonly/_util.py,sha256=YVdEWn1bvipAzR3g3oXpHmgLiaODwGRB1IGA3gHZ2PM,1273
4
- onceonly/ai.py,sha256=NjMHtZgc-a-l1Wr3mTWwL9HnIOLZbVr9gkuMXMHbuqA,7043
4
+ onceonly/ai.py,sha256=-yaO1ZRlEO-Qqou4P_Q0cemZYwFSIRDM1Lu_AwfF5PY,14578
5
5
  onceonly/ai_models.py,sha256=7bHYnAavdb3c-4nlh9HgRY18949TgmU9XfXfv3PXQEE,2910
6
6
  onceonly/client.py,sha256=6DtLdWc-7_bAXsaaewUQUTHVnCkRZGsc-PByMVPRhYY,12838
7
7
  onceonly/decorators.py,sha256=nP7Wu-RAQQNaTwyOnibzClEgcBJvYheMrG3_KztdlG8,5171
8
8
  onceonly/exceptions.py,sha256=Issh08A4IHSDaysJhVZNRCU9W_9BfiGt65UHaMhDCs4,1156
9
9
  onceonly/models.py,sha256=hVEBPgIVZP3ELjWYIFSFCKPzI38t5DA0gio9FvrmHJg,678
10
- onceonly/version.py,sha256=_7OlQdbVkK4jad0CLdpI0grT-zEAb-qgFmH5mFzDXiA,22
10
+ onceonly/version.py,sha256=wAxkK8w13vqoF47A8iqWdSlIgRRXmZiQ0R4wePZfzhs,22
11
11
  onceonly/integrations/__init__.py,sha256=0tk-2HTTsmc42NhWuR_G_Afmz5-5WG8NvmlO7iIPkIY,34
12
12
  onceonly/integrations/langchain.py,sha256=cdpHIluddX48uYeDeE1cxmn-arruVdE3k6gvZxYC9z4,5821
13
- onceonly_sdk-2.0.0.dist-info/licenses/LICENSE,sha256=YQQ8IT_P7hcGmmLFFuOy3eKDZ90e1cqef_okg85oAiQ,129
14
- onceonly_sdk-2.0.0.dist-info/METADATA,sha256=3cso7k9xZoja4JR8VwZuT4QtnclBcR9cdB4zKYnOM1w,3080
15
- onceonly_sdk-2.0.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
- onceonly_sdk-2.0.0.dist-info/top_level.txt,sha256=lvz-sHerZcTwlZW-uYoda_wgx62kY07GdtzIdw89hnU,9
17
- onceonly_sdk-2.0.0.dist-info/RECORD,,
13
+ onceonly_sdk-2.0.1.dist-info/licenses/LICENSE,sha256=YQQ8IT_P7hcGmmLFFuOy3eKDZ90e1cqef_okg85oAiQ,129
14
+ onceonly_sdk-2.0.1.dist-info/METADATA,sha256=j1xhLIcpLYW6x4qmLKJp8qoKbDHrOhueu6beqF6DVmI,4131
15
+ onceonly_sdk-2.0.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
+ onceonly_sdk-2.0.1.dist-info/top_level.txt,sha256=lvz-sHerZcTwlZW-uYoda_wgx62kY07GdtzIdw89hnU,9
17
+ onceonly_sdk-2.0.1.dist-info/RECORD,,