xache 5.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
xache/types.py ADDED
@@ -0,0 +1,399 @@
1
+ """
2
+ Type definitions for Xache Protocol SDK
3
+ Matching API contracts per LLD §2
4
+ """
5
+
6
+ from dataclasses import dataclass
7
+ from typing import Dict, List, Literal, Optional, Any
8
+ from enum import Enum
9
+
10
+
11
+ # Type aliases
12
+ DID = str # did:agent:<evm|sol>:<address>
13
+ KeyType = Literal["evm", "solana"]
14
+ Chain = Literal["base", "solana"]
15
+ StorageTier = Literal["hot", "warm", "cold"]
16
+ ErrorCode = Literal[
17
+ "UNAUTHENTICATED",
18
+ "PAYMENT_REQUIRED",
19
+ "RATE_LIMITED",
20
+ "BUDGET_EXCEEDED",
21
+ "INVALID_INPUT",
22
+ "CONFLICT",
23
+ "RETRY_LATER",
24
+ "INTERNAL",
25
+ ]
26
+
27
+
28
+ @dataclass
29
+ class ResponseMeta:
30
+ """Response metadata per LLD §2.1"""
31
+ request_id: str
32
+ timestamp: str
33
+ duration: int
34
+
35
+
36
+ @dataclass
37
+ class APIError:
38
+ """API error details"""
39
+ code: ErrorCode
40
+ message: str
41
+ details: Optional[Dict[str, Any]] = None
42
+
43
+
44
+ @dataclass
45
+ class APIResponse:
46
+ """Generic API response wrapper"""
47
+ success: bool
48
+ data: Optional[Any] = None
49
+ error: Optional[APIError] = None
50
+ meta: Optional[ResponseMeta] = None
51
+
52
+
53
+ @dataclass
54
+ class Payment402:
55
+ """402 Payment Required response per LLD §2.3"""
56
+ challenge_id: str
57
+ amount: str
58
+ chain_hint: Literal["solana", "base"]
59
+ pay_to: str
60
+ description: str
61
+
62
+
63
+ @dataclass
64
+ class XacheClientConfig:
65
+ """Client configuration"""
66
+ api_url: str
67
+ did: DID
68
+ private_key: str
69
+ payment_provider: Optional[Dict[str, Any]] = None
70
+ timeout: int = 30
71
+ debug: bool = False
72
+
73
+
74
+ @dataclass
75
+ class RegisterIdentityRequest:
76
+ """Identity registration request per LLD §2.2"""
77
+ wallet_address: str
78
+ key_type: KeyType
79
+ chain: Chain
80
+
81
+
82
+ @dataclass
83
+ class RegisterIdentityResponse:
84
+ """Identity registration response"""
85
+ did: DID
86
+ wallet_address: str
87
+ key_type: KeyType
88
+ chain: Chain
89
+ created_at: str
90
+
91
+
92
+ @dataclass
93
+ class SubmitClaimRequest:
94
+ """Submit claim request (Option B: Async Claim Approval)"""
95
+ agent_did: DID
96
+ webhook_url: Optional[str] = None
97
+
98
+
99
+ @dataclass
100
+ class SubmitClaimResponse:
101
+ """Submit claim response"""
102
+ claim_id: str
103
+ status: str # 'pending'
104
+ message: str
105
+
106
+
107
+ @dataclass
108
+ class ProcessClaimRequest:
109
+ """Process claim request (Option B: Async Claim Approval)"""
110
+ owner_did: DID
111
+ approved: bool
112
+ owner_signature: Optional[str] = None
113
+ agent_signature: Optional[str] = None
114
+ message: Optional[str] = None
115
+ timestamp: Optional[int] = None
116
+ rejection_reason: Optional[str] = None
117
+
118
+
119
+ @dataclass
120
+ class ProcessClaimResponse:
121
+ """Process claim response"""
122
+ status: str # 'approved' or 'rejected'
123
+ message: str
124
+
125
+
126
+ @dataclass
127
+ class PendingClaim:
128
+ """Pending claim"""
129
+ claim_id: str
130
+ owner_did: DID
131
+ owner_wallet: str
132
+ requested_at: str
133
+ webhook_url: Optional[str] = None
134
+
135
+
136
+ @dataclass
137
+ class PendingClaimByOwner:
138
+ """Pending claim by owner"""
139
+ agent_did: DID
140
+ agent_wallet: str
141
+ requested_at: str
142
+ status: str
143
+
144
+
145
+ @dataclass
146
+ class OnChainClaimRequest:
147
+ """On-chain claim request (Option C: On-chain Claiming)"""
148
+ agent_did: DID
149
+ tx_hash: str
150
+ chain: str # 'solana' or 'base'
151
+
152
+
153
+ @dataclass
154
+ class OnChainClaimResponse:
155
+ """On-chain claim response"""
156
+ status: str # 'approved'
157
+ tx_hash: str
158
+ method: str
159
+ message: str
160
+
161
+
162
+ @dataclass
163
+ class StoreMemoryRequest:
164
+ """Memory store request per LLD §2.4"""
165
+ data: Dict[str, Any]
166
+ storage_tier: StorageTier
167
+ metadata: Optional[Dict[str, Any]] = None
168
+
169
+
170
+ @dataclass
171
+ class StoreMemoryResponse:
172
+ """Memory store response"""
173
+ memory_id: str
174
+ storage_tier: StorageTier
175
+ size: int
176
+ receipt_id: str
177
+
178
+
179
+ @dataclass
180
+ class RetrieveMemoryRequest:
181
+ """Memory retrieve request"""
182
+ memory_id: str
183
+
184
+
185
+ @dataclass
186
+ class RetrieveMemoryResponse:
187
+ """Memory retrieve response"""
188
+ memory_id: str
189
+ data: Dict[str, Any]
190
+ storage_tier: StorageTier
191
+ metadata: Optional[Dict[str, Any]]
192
+ receipt_id: str
193
+
194
+
195
+ @dataclass
196
+ class BatchStoreMemoryRequest:
197
+ """Batch store memory request per PRD FR-010, LLD §2.3 (max 100 items)"""
198
+ items: List[StoreMemoryRequest]
199
+
200
+
201
+ @dataclass
202
+ class BatchStoreMemoryResult:
203
+ """Single result in batch store response"""
204
+ index: int
205
+ memory_id: Optional[str] = None
206
+ receipt_id: Optional[str] = None
207
+ error: Optional[str] = None
208
+
209
+
210
+ @dataclass
211
+ class BatchStoreMemoryResponse:
212
+ """Batch store memory response per LLD §2.3"""
213
+ results: List[BatchStoreMemoryResult]
214
+ success_count: int
215
+ failure_count: int
216
+ batch_receipt_id: str
217
+
218
+
219
+ @dataclass
220
+ class BatchRetrieveMemoryRequest:
221
+ """Batch retrieve memory request per PRD FR-011, LLD §2.3 (max 100 items)"""
222
+ memory_ids: List[str]
223
+
224
+
225
+ @dataclass
226
+ class BatchRetrieveMemoryResult:
227
+ """Single result in batch retrieve response"""
228
+ index: int
229
+ memory_id: Optional[str] = None
230
+ data: Optional[Dict[str, Any]] = None
231
+ storage_tier: Optional[StorageTier] = None
232
+ metadata: Optional[Dict[str, Any]] = None
233
+ receipt_id: Optional[str] = None
234
+ error: Optional[str] = None
235
+
236
+
237
+ @dataclass
238
+ class BatchRetrieveMemoryResponse:
239
+ """Batch retrieve memory response per LLD §2.3"""
240
+ results: List[BatchRetrieveMemoryResult]
241
+ success_count: int
242
+ failure_count: int
243
+ batch_receipt_id: str
244
+
245
+
246
+ @dataclass
247
+ class HeuristicMetrics:
248
+ """Heuristic metrics per LLD §2.4"""
249
+ success_rate: float # 0.00 to 1.00
250
+ sample_size: int # Number of samples
251
+ confidence: float # 0.00 to 1.00
252
+
253
+
254
+ @dataclass
255
+ class ContributeHeuristicRequest:
256
+ """Collective contribute request per LLD §2.5"""
257
+ pattern: str # Pattern text (10-500 chars)
258
+ pattern_hash: str # Hash of pattern for deduplication
259
+ domain: str # Domain (e.g., 'javascript', 'python', 'devops')
260
+ tags: List[str] # Tags for categorization (1-10 tags)
261
+ metrics: HeuristicMetrics # Metrics per LLD §2.4
262
+ encrypted_content_ref: str # Reference to encrypted content in R2
263
+ context_type: Optional[str] = None
264
+ metadata: Optional[Dict[str, Any]] = None
265
+
266
+
267
+ @dataclass
268
+ class ContributeHeuristicResponse:
269
+ """Collective contribute response"""
270
+ heuristic_id: str
271
+ pattern: str
272
+ domain: str
273
+ tags: List[str]
274
+ receipt_id: str
275
+
276
+
277
+ @dataclass
278
+ class HeuristicMatch:
279
+ """Heuristic match in query results"""
280
+ heuristic_id: str
281
+ pattern: str
282
+ domain: str
283
+ tags: List[str]
284
+ contributor_did: DID
285
+ relevance_score: float
286
+ royalty_amount: str
287
+
288
+
289
+ @dataclass
290
+ class QueryCollectiveRequest:
291
+ """Collective query request"""
292
+ query_text: str
293
+ domain: Optional[str] = None
294
+ limit: int = 10
295
+
296
+
297
+ @dataclass
298
+ class QueryCollectiveResponse:
299
+ """Collective query response"""
300
+ matches: List[HeuristicMatch]
301
+ total_cost: str
302
+ royalties_usd: str
303
+ receipt_id: str
304
+
305
+
306
+ @dataclass
307
+ class BudgetStatus:
308
+ """Budget status"""
309
+ limit_cents: int
310
+ spent_cents: int
311
+ remaining_cents: int
312
+ percentage_used: float
313
+ current_period: str
314
+
315
+
316
+ class BudgetAlertLevel(Enum):
317
+ """Budget alert levels per HLD §2.2 Budget Guardian"""
318
+ WARN_50 = "WARN_50" # 50% threshold warning
319
+ WARN_80 = "WARN_80" # 80% threshold warning
320
+ CRITICAL_100 = "CRITICAL_100" # 100% critical threshold
321
+
322
+
323
+ @dataclass
324
+ class BudgetAlert:
325
+ """Budget alert details"""
326
+ level: BudgetAlertLevel # Alert severity level
327
+ threshold: float # Threshold percentage that triggered alert (50, 80, or 100)
328
+ percentage_used: float # Current budget usage percentage
329
+ spent_cents: int # Amount spent in cents
330
+ limit_cents: int # Budget limit in cents
331
+ remaining_cents: int # Remaining budget in cents
332
+ message: str # Human-readable alert message
333
+ timestamp: str # Timestamp when alert was triggered
334
+
335
+
336
+ # Budget alert handler callback type
337
+ BudgetAlertHandler = Any # Callable[[BudgetAlert], None] or async version
338
+
339
+
340
+ @dataclass
341
+ class Receipt:
342
+ """Receipt record"""
343
+ receipt_id: str
344
+ agent_did: DID
345
+ operation: str
346
+ amount_usd: str
347
+ timestamp: str
348
+ metadata: Optional[Dict[str, Any]] = None
349
+
350
+
351
+ @dataclass
352
+ class ReceiptWithProof:
353
+ """Receipt with Merkle proof"""
354
+ receipt_id: str
355
+ merkle_proof: List[str]
356
+ merkle_root: str
357
+
358
+
359
+ @dataclass
360
+ class UsageAnalytics:
361
+ """Usage analytics"""
362
+ operations: List[Dict[str, Any]]
363
+ total_spent: str
364
+ period: Dict[str, str]
365
+
366
+
367
+ @dataclass
368
+ class ReputationSnapshot:
369
+ """Reputation snapshot per HLD §2.2"""
370
+ agent_did: DID
371
+ timestamp: str
372
+ overall: float # Overall reputation score (0-100)
373
+ memory_quality: float # Memory quality score (0-100)
374
+ contrib_success: float # Contribution success score (0-100)
375
+ economic_value: float # Economic value score (0-100)
376
+ network_influence: float # Network influence score (0-100)
377
+ reliability: float # Reliability score (0-100)
378
+ specialization: float # Specialization score (0-100)
379
+ weights: Dict[str, float] # Score weights
380
+
381
+
382
+ @dataclass
383
+ class DomainReputation:
384
+ """Domain-specific reputation per HLD §2.2"""
385
+ domain: str # Domain name (e.g., 'javascript', 'python', 'devops')
386
+ score: float # Domain-specific reputation score (0-100)
387
+ contribution_count: int # Number of contributions in this domain
388
+ success_rate: float # Success rate in this domain (0.00 to 1.00)
389
+ total_earned_usd: str # Total earnings in USD for this domain
390
+
391
+
392
+ @dataclass
393
+ class TopAgent:
394
+ """Top agent entry for leaderboard"""
395
+ agent_did: DID
396
+ wallet_address: str
397
+ reputation_score: float # Overall reputation score (0-100)
398
+ operation_count: int # Total number of operations
399
+ total_earned_usd: str # Total earned in USD
@@ -0,0 +1,5 @@
1
+ """Utility modules"""
2
+
3
+ from .http import HttpClient, RetryConfig
4
+
5
+ __all__ = ["HttpClient", "RetryConfig"]
xache/utils/cache.py ADDED
@@ -0,0 +1,214 @@
1
+ """
2
+ LRU Cache implementation for memory operations
3
+ Production-ready with TTL support and pickle persistence
4
+ """
5
+
6
+ import time
7
+ import pickle
8
+ import os
9
+ from typing import Dict, Generic, TypeVar, Optional, Any
10
+ from dataclasses import dataclass
11
+
12
+ T = TypeVar('T')
13
+
14
+
15
+ @dataclass
16
+ class CacheEntry(Generic[T]):
17
+ """Cache entry with metadata"""
18
+ value: T
19
+ expires_at: float
20
+ access_count: int
21
+ last_accessed: float
22
+
23
+
24
+ class CacheConfig:
25
+ """Configuration for cache behavior"""
26
+
27
+ def __init__(
28
+ self,
29
+ enabled: bool = True,
30
+ max_size: int = 100,
31
+ ttl: int = 300000, # 5 minutes in milliseconds
32
+ storage: str = 'memory', # 'memory' or 'pickle'
33
+ pickle_path: Optional[str] = None
34
+ ):
35
+ self.enabled = enabled
36
+ self.max_size = max_size
37
+ self.ttl = ttl
38
+ self.storage = storage
39
+ self.pickle_path = pickle_path or os.path.expanduser('~/.xache_cache.pkl')
40
+
41
+
42
+ class LRUCache(Generic[T]):
43
+ """LRU Cache with TTL support and pickle persistence"""
44
+
45
+ def __init__(self, config: CacheConfig):
46
+ self.max_size = config.max_size
47
+ self.ttl = config.ttl / 1000 # Convert to seconds
48
+ self.storage = config.storage
49
+ self.pickle_path = config.pickle_path
50
+ self.cache: Dict[str, CacheEntry[T]] = {}
51
+
52
+ if self.storage == 'pickle':
53
+ self._load_from_pickle()
54
+
55
+ def get(self, key: str) -> Optional[T]:
56
+ """
57
+ Get value from cache
58
+
59
+ Args:
60
+ key: Cache key
61
+
62
+ Returns:
63
+ Cached value or None if not found/expired
64
+ """
65
+ entry = self.cache.get(key)
66
+
67
+ if entry is None:
68
+ return None
69
+
70
+ # Check expiration
71
+ if time.time() > entry.expires_at:
72
+ del self.cache[key]
73
+ self._persist_to_pickle()
74
+ return None
75
+
76
+ # Update LRU metadata
77
+ entry.access_count += 1
78
+ entry.last_accessed = time.time()
79
+
80
+ # Move to end (LRU)
81
+ del self.cache[key]
82
+ self.cache[key] = entry
83
+
84
+ return entry.value
85
+
86
+ def set(self, key: str, value: T) -> None:
87
+ """
88
+ Set value in cache
89
+
90
+ Args:
91
+ key: Cache key
92
+ value: Value to cache
93
+ """
94
+ # Evict if at capacity
95
+ if len(self.cache) >= self.max_size and key not in self.cache:
96
+ self._evict_lru()
97
+
98
+ entry = CacheEntry(
99
+ value=value,
100
+ expires_at=time.time() + self.ttl,
101
+ access_count=1,
102
+ last_accessed=time.time()
103
+ )
104
+
105
+ self.cache[key] = entry
106
+ self._persist_to_pickle()
107
+
108
+ def has(self, key: str) -> bool:
109
+ """
110
+ Check if key exists and is not expired
111
+
112
+ Args:
113
+ key: Cache key
114
+
115
+ Returns:
116
+ True if key exists and is valid
117
+ """
118
+ return self.get(key) is not None
119
+
120
+ def delete(self, key: str) -> bool:
121
+ """
122
+ Delete specific key
123
+
124
+ Args:
125
+ key: Cache key
126
+
127
+ Returns:
128
+ True if key was deleted
129
+ """
130
+ if key in self.cache:
131
+ del self.cache[key]
132
+ self._persist_to_pickle()
133
+ return True
134
+ return False
135
+
136
+ def clear(self) -> None:
137
+ """Clear entire cache"""
138
+ self.cache.clear()
139
+ self._persist_to_pickle()
140
+
141
+ def get_stats(self) -> Dict[str, Any]:
142
+ """
143
+ Get cache statistics
144
+
145
+ Returns:
146
+ Dictionary with cache stats
147
+ """
148
+ return {
149
+ 'size': len(self.cache),
150
+ 'max_size': self.max_size
151
+ }
152
+
153
+ def cleanup(self) -> None:
154
+ """Clean up expired entries"""
155
+ now = time.time()
156
+ keys_to_delete = [
157
+ key for key, entry in self.cache.items()
158
+ if entry.expires_at <= now
159
+ ]
160
+
161
+ for key in keys_to_delete:
162
+ del self.cache[key]
163
+
164
+ if keys_to_delete:
165
+ self._persist_to_pickle()
166
+
167
+ def _evict_lru(self) -> None:
168
+ """Evict least recently used entry"""
169
+ if not self.cache:
170
+ return
171
+
172
+ oldest_key = None
173
+ oldest_time = float('inf')
174
+
175
+ for key, entry in self.cache.items():
176
+ if entry.last_accessed < oldest_time:
177
+ oldest_time = entry.last_accessed
178
+ oldest_key = key
179
+
180
+ if oldest_key:
181
+ del self.cache[oldest_key]
182
+
183
+ def _load_from_pickle(self) -> None:
184
+ """Load cache from pickle file"""
185
+ if self.storage != 'pickle':
186
+ return
187
+
188
+ try:
189
+ if os.path.exists(self.pickle_path):
190
+ with open(self.pickle_path, 'rb') as f:
191
+ data = pickle.load(f)
192
+
193
+ # Filter expired entries
194
+ now = time.time()
195
+ self.cache = {
196
+ key: entry for key, entry in data.items()
197
+ if entry.expires_at > now
198
+ }
199
+ except Exception as e:
200
+ print(f"Warning: Failed to load cache from pickle: {e}")
201
+
202
+ def _persist_to_pickle(self) -> None:
203
+ """Persist cache to pickle file"""
204
+ if self.storage != 'pickle':
205
+ return
206
+
207
+ try:
208
+ # Ensure directory exists
209
+ os.makedirs(os.path.dirname(self.pickle_path), exist_ok=True)
210
+
211
+ with open(self.pickle_path, 'wb') as f:
212
+ pickle.dump(self.cache, f)
213
+ except Exception as e:
214
+ print(f"Warning: Failed to persist cache to pickle: {e}")