chub-dev 0.1.0 → 0.1.2-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +55 -0
- package/bin/chub-mcp +2 -0
- package/dist/airtable/docs/database/javascript/DOC.md +1437 -0
- package/dist/airtable/docs/database/python/DOC.md +1735 -0
- package/dist/amplitude/docs/analytics/javascript/DOC.md +1282 -0
- package/dist/amplitude/docs/analytics/python/DOC.md +1199 -0
- package/dist/anthropic/docs/claude-api/javascript/DOC.md +503 -0
- package/dist/anthropic/docs/claude-api/python/DOC.md +389 -0
- package/dist/asana/docs/tasks/DOC.md +1396 -0
- package/dist/assemblyai/docs/transcription/DOC.md +1043 -0
- package/dist/atlassian/docs/confluence/javascript/DOC.md +1347 -0
- package/dist/atlassian/docs/confluence/python/DOC.md +1604 -0
- package/dist/auth0/docs/identity/javascript/DOC.md +968 -0
- package/dist/auth0/docs/identity/python/DOC.md +1199 -0
- package/dist/aws/docs/s3/javascript/DOC.md +1773 -0
- package/dist/aws/docs/s3/python/DOC.md +1807 -0
- package/dist/binance/docs/trading/javascript/DOC.md +1315 -0
- package/dist/binance/docs/trading/python/DOC.md +1454 -0
- package/dist/braintree/docs/gateway/javascript/DOC.md +1278 -0
- package/dist/braintree/docs/gateway/python/DOC.md +1179 -0
- package/dist/chromadb/docs/embeddings-db/javascript/DOC.md +1263 -0
- package/dist/chromadb/docs/embeddings-db/python/DOC.md +1707 -0
- package/dist/clerk/docs/auth/javascript/DOC.md +1220 -0
- package/dist/clerk/docs/auth/python/DOC.md +274 -0
- package/dist/cloudflare/docs/workers/javascript/DOC.md +918 -0
- package/dist/cloudflare/docs/workers/python/DOC.md +994 -0
- package/dist/cockroachdb/docs/distributed-db/DOC.md +1500 -0
- package/dist/cohere/docs/llm/DOC.md +1335 -0
- package/dist/datadog/docs/monitoring/javascript/DOC.md +1740 -0
- package/dist/datadog/docs/monitoring/python/DOC.md +1815 -0
- package/dist/deepgram/docs/speech/javascript/DOC.md +885 -0
- package/dist/deepgram/docs/speech/python/DOC.md +685 -0
- package/dist/deepl/docs/translation/javascript/DOC.md +887 -0
- package/dist/deepl/docs/translation/python/DOC.md +944 -0
- package/dist/deepseek/docs/llm/DOC.md +1220 -0
- package/dist/directus/docs/headless-cms/javascript/DOC.md +1128 -0
- package/dist/directus/docs/headless-cms/python/DOC.md +1276 -0
- package/dist/discord/docs/bot/javascript/DOC.md +1090 -0
- package/dist/discord/docs/bot/python/DOC.md +1130 -0
- package/dist/elasticsearch/docs/search/DOC.md +1634 -0
- package/dist/elevenlabs/docs/text-to-speech/javascript/DOC.md +336 -0
- package/dist/elevenlabs/docs/text-to-speech/python/DOC.md +552 -0
- package/dist/firebase/docs/auth/DOC.md +1015 -0
- package/dist/gemini/docs/genai/javascript/DOC.md +691 -0
- package/dist/gemini/docs/genai/python/DOC.md +555 -0
- package/dist/github/docs/octokit/DOC.md +1560 -0
- package/dist/google/docs/bigquery/javascript/DOC.md +1688 -0
- package/dist/google/docs/bigquery/python/DOC.md +1503 -0
- package/dist/hubspot/docs/crm/javascript/DOC.md +1805 -0
- package/dist/hubspot/docs/crm/python/DOC.md +2033 -0
- package/dist/huggingface/docs/transformers/DOC.md +948 -0
- package/dist/intercom/docs/messaging/javascript/DOC.md +1844 -0
- package/dist/intercom/docs/messaging/python/DOC.md +1797 -0
- package/dist/jira/docs/issues/javascript/DOC.md +1420 -0
- package/dist/jira/docs/issues/python/DOC.md +1492 -0
- package/dist/kafka/docs/streaming/javascript/DOC.md +1671 -0
- package/dist/kafka/docs/streaming/python/DOC.md +1464 -0
- package/dist/landingai-ade/docs/api/DOC.md +620 -0
- package/dist/landingai-ade/docs/sdk/python/DOC.md +489 -0
- package/dist/landingai-ade/docs/sdk/typescript/DOC.md +542 -0
- package/dist/landingai-ade/skills/SKILL.md +489 -0
- package/dist/launchdarkly/docs/feature-flags/javascript/DOC.md +1191 -0
- package/dist/launchdarkly/docs/feature-flags/python/DOC.md +1671 -0
- package/dist/linear/docs/tracker/DOC.md +1554 -0
- package/dist/livekit/docs/realtime/javascript/DOC.md +303 -0
- package/dist/livekit/docs/realtime/python/DOC.md +163 -0
- package/dist/mailchimp/docs/marketing/DOC.md +1420 -0
- package/dist/meilisearch/docs/search/DOC.md +1241 -0
- package/dist/microsoft/docs/onedrive/javascript/DOC.md +1421 -0
- package/dist/microsoft/docs/onedrive/python/DOC.md +1549 -0
- package/dist/mongodb/docs/atlas/DOC.md +2041 -0
- package/dist/notion/docs/workspace-api/javascript/DOC.md +1435 -0
- package/dist/notion/docs/workspace-api/python/DOC.md +1400 -0
- package/dist/okta/docs/identity/javascript/DOC.md +1171 -0
- package/dist/okta/docs/identity/python/DOC.md +1401 -0
- package/dist/openai/docs/chat/javascript/DOC.md +407 -0
- package/dist/openai/docs/chat/python/DOC.md +568 -0
- package/dist/paypal/docs/checkout/DOC.md +278 -0
- package/dist/pinecone/docs/sdk/javascript/DOC.md +984 -0
- package/dist/pinecone/docs/sdk/python/DOC.md +1395 -0
- package/dist/plaid/docs/banking/javascript/DOC.md +1163 -0
- package/dist/plaid/docs/banking/python/DOC.md +1203 -0
- package/dist/playwright-community/skills/login-flows/SKILL.md +108 -0
- package/dist/postmark/docs/transactional-email/DOC.md +1168 -0
- package/dist/prisma/docs/orm/javascript/DOC.md +1419 -0
- package/dist/prisma/docs/orm/python/DOC.md +1317 -0
- package/dist/qdrant/docs/vector-search/javascript/DOC.md +1221 -0
- package/dist/qdrant/docs/vector-search/python/DOC.md +1653 -0
- package/dist/rabbitmq/docs/message-queue/javascript/DOC.md +1193 -0
- package/dist/rabbitmq/docs/message-queue/python/DOC.md +1243 -0
- package/dist/razorpay/docs/payments/javascript/DOC.md +1219 -0
- package/dist/razorpay/docs/payments/python/DOC.md +1330 -0
- package/dist/redis/docs/key-value/javascript/DOC.md +1851 -0
- package/dist/redis/docs/key-value/python/DOC.md +2054 -0
- package/dist/registry.json +2817 -0
- package/dist/replicate/docs/model-hosting/DOC.md +1318 -0
- package/dist/resend/docs/email/DOC.md +1271 -0
- package/dist/salesforce/docs/crm/javascript/DOC.md +1241 -0
- package/dist/salesforce/docs/crm/python/DOC.md +1183 -0
- package/dist/search-index.json +1 -0
- package/dist/sendgrid/docs/email-api/javascript/DOC.md +371 -0
- package/dist/sendgrid/docs/email-api/python/DOC.md +656 -0
- package/dist/sentry/docs/error-tracking/javascript/DOC.md +1073 -0
- package/dist/sentry/docs/error-tracking/python/DOC.md +1309 -0
- package/dist/shopify/docs/storefront/DOC.md +457 -0
- package/dist/slack/docs/workspace/javascript/DOC.md +933 -0
- package/dist/slack/docs/workspace/python/DOC.md +271 -0
- package/dist/square/docs/payments/javascript/DOC.md +1855 -0
- package/dist/square/docs/payments/python/DOC.md +1728 -0
- package/dist/stripe/docs/api/DOC.md +1727 -0
- package/dist/stripe/docs/payments/DOC.md +1726 -0
- package/dist/stytch/docs/auth/javascript/DOC.md +1813 -0
- package/dist/stytch/docs/auth/python/DOC.md +1962 -0
- package/dist/supabase/docs/client/DOC.md +1606 -0
- package/dist/twilio/docs/messaging/python/DOC.md +469 -0
- package/dist/twilio/docs/messaging/typescript/DOC.md +946 -0
- package/dist/vercel/docs/platform/DOC.md +1940 -0
- package/dist/weaviate/docs/vector-db/javascript/DOC.md +1268 -0
- package/dist/weaviate/docs/vector-db/python/DOC.md +1388 -0
- package/dist/zendesk/docs/support/javascript/DOC.md +2150 -0
- package/dist/zendesk/docs/support/python/DOC.md +2297 -0
- package/package.json +22 -6
- package/skills/get-api-docs/SKILL.md +84 -0
- package/src/commands/annotate.js +83 -0
- package/src/commands/build.js +12 -1
- package/src/commands/feedback.js +150 -0
- package/src/commands/get.js +83 -42
- package/src/commands/search.js +7 -0
- package/src/index.js +43 -17
- package/src/lib/analytics.js +90 -0
- package/src/lib/annotations.js +57 -0
- package/src/lib/bm25.js +170 -0
- package/src/lib/cache.js +69 -6
- package/src/lib/config.js +8 -3
- package/src/lib/identity.js +99 -0
- package/src/lib/registry.js +103 -20
- package/src/lib/telemetry.js +86 -0
- package/src/mcp/server.js +177 -0
- package/src/mcp/tools.js +251 -0
|
@@ -0,0 +1,2054 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: key-value
|
|
3
|
+
description: "Redis Python client (redis-py) for key-value storage, caching, and pub/sub messaging"
|
|
4
|
+
metadata:
|
|
5
|
+
languages: "python"
|
|
6
|
+
versions: "7.0.1"
|
|
7
|
+
updated-on: "2026-03-01"
|
|
8
|
+
source: maintainer
|
|
9
|
+
tags: "redis,database,cache,key-value,pubsub"
|
|
10
|
+
---
|
|
11
|
+
|
|
12
|
+
# Redis Python Client (redis-py) - Complete Integration Guide
|
|
13
|
+
|
|
14
|
+
## GOLDEN RULE
|
|
15
|
+
|
|
16
|
+
**ALWAYS use the official `redis` package (redis-py) for Redis integration.**
|
|
17
|
+
|
|
18
|
+
```bash
|
|
19
|
+
pip install redis
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
**For enhanced performance, install with hiredis support:**
|
|
23
|
+
|
|
24
|
+
```bash
|
|
25
|
+
pip install redis[hiredis]
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
**DO NOT use:**
|
|
29
|
+
- `redis-py-cluster` (deprecated, functionality merged into redis-py)
|
|
30
|
+
- `aioredis` (deprecated, async support now in redis-py)
|
|
31
|
+
- `walrus` (third-party wrapper)
|
|
32
|
+
- Any other unofficial Redis clients
|
|
33
|
+
|
|
34
|
+
The official `redis` package is maintained by Redis and supports both synchronous and asynchronous operations.
|
|
35
|
+
|
|
36
|
+
---
|
|
37
|
+
|
|
38
|
+
## Installation
|
|
39
|
+
|
|
40
|
+
### Basic Installation
|
|
41
|
+
|
|
42
|
+
```bash
|
|
43
|
+
pip install redis
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
### Installation with Performance Optimization
|
|
47
|
+
|
|
48
|
+
```bash
|
|
49
|
+
pip install redis[hiredis]
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
The hiredis library provides a compiled response parser that significantly improves performance.
|
|
53
|
+
|
|
54
|
+
### Installation with All Optional Dependencies
|
|
55
|
+
|
|
56
|
+
```bash
|
|
57
|
+
pip install redis[hiredis,cryptography]
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
### Environment Setup
|
|
61
|
+
|
|
62
|
+
Create a `.env` file in your project root:
|
|
63
|
+
|
|
64
|
+
```env
|
|
65
|
+
REDIS_HOST=localhost
|
|
66
|
+
REDIS_PORT=6379
|
|
67
|
+
REDIS_PASSWORD=your_password_here
|
|
68
|
+
REDIS_DB=0
|
|
69
|
+
REDIS_URL=redis://username:password@localhost:6379/0
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
Install python-dotenv to load environment variables:
|
|
73
|
+
|
|
74
|
+
```bash
|
|
75
|
+
pip install python-dotenv
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
---
|
|
79
|
+
|
|
80
|
+
## Initialization
|
|
81
|
+
|
|
82
|
+
### Basic Connection (Localhost)
|
|
83
|
+
|
|
84
|
+
```python
|
|
85
|
+
import redis
|
|
86
|
+
|
|
87
|
+
r = redis.Redis(host='localhost', port=6379, decode_responses=True)
|
|
88
|
+
|
|
89
|
+
# Test connection
|
|
90
|
+
r.ping() # Returns True
|
|
91
|
+
|
|
92
|
+
# Use the client...
|
|
93
|
+
|
|
94
|
+
r.close()
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
### Connection with Environment Variables
|
|
98
|
+
|
|
99
|
+
```python
|
|
100
|
+
import redis
|
|
101
|
+
import os
|
|
102
|
+
from dotenv import load_dotenv
|
|
103
|
+
|
|
104
|
+
load_dotenv()
|
|
105
|
+
|
|
106
|
+
r = redis.Redis(
|
|
107
|
+
host=os.getenv('REDIS_HOST', 'localhost'),
|
|
108
|
+
port=int(os.getenv('REDIS_PORT', 6379)),
|
|
109
|
+
password=os.getenv('REDIS_PASSWORD'),
|
|
110
|
+
db=int(os.getenv('REDIS_DB', 0)),
|
|
111
|
+
decode_responses=True
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
r.ping()
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
### Connection Using URL
|
|
118
|
+
|
|
119
|
+
```python
|
|
120
|
+
import redis
|
|
121
|
+
import os
|
|
122
|
+
from dotenv import load_dotenv
|
|
123
|
+
|
|
124
|
+
load_dotenv()
|
|
125
|
+
|
|
126
|
+
redis_url = os.getenv('REDIS_URL', 'redis://localhost:6379/0')
|
|
127
|
+
r = redis.from_url(redis_url, decode_responses=True)
|
|
128
|
+
|
|
129
|
+
r.ping()
|
|
130
|
+
```
|
|
131
|
+
|
|
132
|
+
### Connection with Authentication
|
|
133
|
+
|
|
134
|
+
```python
|
|
135
|
+
import redis
|
|
136
|
+
|
|
137
|
+
r = redis.Redis(
|
|
138
|
+
host='redis.example.com',
|
|
139
|
+
port=6379,
|
|
140
|
+
username='default',
|
|
141
|
+
password='your_password',
|
|
142
|
+
db=0,
|
|
143
|
+
decode_responses=True
|
|
144
|
+
)
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
### Connection with TLS/SSL
|
|
148
|
+
|
|
149
|
+
```python
|
|
150
|
+
import redis
|
|
151
|
+
import ssl
|
|
152
|
+
|
|
153
|
+
r = redis.Redis(
|
|
154
|
+
host='redis.example.com',
|
|
155
|
+
port=6380,
|
|
156
|
+
password='your_password',
|
|
157
|
+
ssl=True,
|
|
158
|
+
ssl_certfile='/path/to/client-cert.pem',
|
|
159
|
+
ssl_keyfile='/path/to/client-key.pem',
|
|
160
|
+
ssl_ca_certs='/path/to/ca-cert.pem',
|
|
161
|
+
ssl_cert_reqs=ssl.CERT_REQUIRED,
|
|
162
|
+
decode_responses=True
|
|
163
|
+
)
|
|
164
|
+
```
|
|
165
|
+
|
|
166
|
+
### Connection Pool
|
|
167
|
+
|
|
168
|
+
```python
|
|
169
|
+
import redis
|
|
170
|
+
|
|
171
|
+
# Create connection pool
|
|
172
|
+
pool = redis.ConnectionPool(
|
|
173
|
+
host='localhost',
|
|
174
|
+
port=6379,
|
|
175
|
+
password='your_password',
|
|
176
|
+
db=0,
|
|
177
|
+
max_connections=10,
|
|
178
|
+
decode_responses=True
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
# Create client from pool
|
|
182
|
+
r = redis.Redis(connection_pool=pool)
|
|
183
|
+
|
|
184
|
+
# Multiple clients can share the pool
|
|
185
|
+
r2 = redis.Redis(connection_pool=pool)
|
|
186
|
+
```
|
|
187
|
+
|
|
188
|
+
### Context Manager (Auto-Close)
|
|
189
|
+
|
|
190
|
+
```python
|
|
191
|
+
import redis
|
|
192
|
+
|
|
193
|
+
with redis.Redis(host='localhost', port=6379, decode_responses=True) as r:
|
|
194
|
+
r.set('key', 'value')
|
|
195
|
+
value = r.get('key')
|
|
196
|
+
print(value)
|
|
197
|
+
# Connection automatically closed
|
|
198
|
+
```
|
|
199
|
+
|
|
200
|
+
### Decode Responses
|
|
201
|
+
|
|
202
|
+
```python
|
|
203
|
+
# Without decode_responses (default - returns bytes)
|
|
204
|
+
r = redis.Redis(host='localhost', port=6379)
|
|
205
|
+
value = r.get('key') # Returns: b'value'
|
|
206
|
+
|
|
207
|
+
# With decode_responses (returns strings)
|
|
208
|
+
r = redis.Redis(host='localhost', port=6379, decode_responses=True)
|
|
209
|
+
value = r.get('key') # Returns: 'value'
|
|
210
|
+
```
|
|
211
|
+
|
|
212
|
+
---
|
|
213
|
+
|
|
214
|
+
## Core API Surfaces
|
|
215
|
+
|
|
216
|
+
## String Operations
|
|
217
|
+
|
|
218
|
+
### Basic SET and GET
|
|
219
|
+
|
|
220
|
+
```python
|
|
221
|
+
# Set a string value
|
|
222
|
+
r.set('key', 'value')
|
|
223
|
+
|
|
224
|
+
# Get a string value
|
|
225
|
+
value = r.get('key')
|
|
226
|
+
print(value) # 'value'
|
|
227
|
+
```
|
|
228
|
+
|
|
229
|
+
### SET with Options
|
|
230
|
+
|
|
231
|
+
```python
|
|
232
|
+
# Set with expiration (seconds)
|
|
233
|
+
r.set('session:123', 'user_data', ex=3600) # Expires in 1 hour
|
|
234
|
+
|
|
235
|
+
# Set with expiration (milliseconds)
|
|
236
|
+
r.set('temp:key', 'value', px=5000) # Expires in 5 seconds
|
|
237
|
+
|
|
238
|
+
# Set only if key doesn't exist (NX)
|
|
239
|
+
result = r.set('key', 'value', nx=True) # Returns True if set, None if exists
|
|
240
|
+
|
|
241
|
+
# Set only if key exists (XX)
|
|
242
|
+
result = r.set('key', 'new_value', xx=True) # Returns True if set, None if not exists
|
|
243
|
+
|
|
244
|
+
# Get old value and set new value
|
|
245
|
+
old_value = r.set('key', 'new_value', get=True) # Returns old value
|
|
246
|
+
|
|
247
|
+
# Set with multiple options
|
|
248
|
+
r.set('key', 'value', ex=60, nx=True)
|
|
249
|
+
```
|
|
250
|
+
|
|
251
|
+
### Multiple Keys
|
|
252
|
+
|
|
253
|
+
```python
|
|
254
|
+
# Set multiple keys at once
|
|
255
|
+
r.mset({
|
|
256
|
+
'key1': 'value1',
|
|
257
|
+
'key2': 'value2',
|
|
258
|
+
'key3': 'value3'
|
|
259
|
+
})
|
|
260
|
+
|
|
261
|
+
# Get multiple keys at once
|
|
262
|
+
values = r.mget(['key1', 'key2', 'key3'])
|
|
263
|
+
print(values) # ['value1', 'value2', 'value3']
|
|
264
|
+
|
|
265
|
+
# Set multiple only if none exist
|
|
266
|
+
result = r.msetnx({
|
|
267
|
+
'key4': 'value4',
|
|
268
|
+
'key5': 'value5'
|
|
269
|
+
}) # Returns True if all set, False if any exist
|
|
270
|
+
```
|
|
271
|
+
|
|
272
|
+
### Increment and Decrement
|
|
273
|
+
|
|
274
|
+
```python
|
|
275
|
+
# Set initial value
|
|
276
|
+
r.set('counter', 0)
|
|
277
|
+
|
|
278
|
+
# Increment by 1
|
|
279
|
+
r.incr('counter') # Returns 1
|
|
280
|
+
|
|
281
|
+
# Increment by specific amount
|
|
282
|
+
r.incrby('counter', 10) # Returns 11
|
|
283
|
+
|
|
284
|
+
# Increment float
|
|
285
|
+
r.incrbyfloat('price', 2.5) # Increment by 2.5
|
|
286
|
+
|
|
287
|
+
# Decrement by 1
|
|
288
|
+
r.decr('counter')
|
|
289
|
+
|
|
290
|
+
# Decrement by specific amount
|
|
291
|
+
r.decrby('counter', 5)
|
|
292
|
+
```
|
|
293
|
+
|
|
294
|
+
### String Manipulation
|
|
295
|
+
|
|
296
|
+
```python
|
|
297
|
+
# Append to string
|
|
298
|
+
r.set('message', 'Hello')
|
|
299
|
+
r.append('message', ' World') # 'Hello World'
|
|
300
|
+
|
|
301
|
+
# Get substring
|
|
302
|
+
substr = r.getrange('message', 0, 4) # 'Hello'
|
|
303
|
+
|
|
304
|
+
# Get length
|
|
305
|
+
length = r.strlen('message') # 11
|
|
306
|
+
|
|
307
|
+
# Set range
|
|
308
|
+
r.setrange('message', 6, 'Redis') # 'Hello Redis'
|
|
309
|
+
|
|
310
|
+
# Get and delete
|
|
311
|
+
value = r.getdel('message') # Returns value and deletes key
|
|
312
|
+
|
|
313
|
+
# Get and set expiration
|
|
314
|
+
value = r.getex('key', ex=60) # Returns value and sets expiration
|
|
315
|
+
```
|
|
316
|
+
|
|
317
|
+
---
|
|
318
|
+
|
|
319
|
+
## Hash Operations
|
|
320
|
+
|
|
321
|
+
### Basic Hash Operations
|
|
322
|
+
|
|
323
|
+
```python
|
|
324
|
+
# Set a single field in a hash
|
|
325
|
+
r.hset('user:1000', 'name', 'John Doe')
|
|
326
|
+
|
|
327
|
+
# Get a single field from a hash
|
|
328
|
+
name = r.hget('user:1000', 'name')
|
|
329
|
+
print(name) # 'John Doe'
|
|
330
|
+
|
|
331
|
+
# Set multiple fields at once
|
|
332
|
+
r.hset('user:1000', mapping={
|
|
333
|
+
'name': 'John Doe',
|
|
334
|
+
'email': '[email protected]',
|
|
335
|
+
'age': 30
|
|
336
|
+
})
|
|
337
|
+
|
|
338
|
+
# Get all fields and values
|
|
339
|
+
user = r.hgetall('user:1000')
|
|
340
|
+
print(user)
|
|
341
|
+
# {'name': 'John Doe', 'email': '[email protected]', 'age': '30'}
|
|
342
|
+
```
|
|
343
|
+
|
|
344
|
+
### Advanced Hash Operations
|
|
345
|
+
|
|
346
|
+
```python
|
|
347
|
+
# Check if field exists
|
|
348
|
+
exists = r.hexists('user:1000', 'email') # True
|
|
349
|
+
|
|
350
|
+
# Get all field names
|
|
351
|
+
fields = r.hkeys('user:1000')
|
|
352
|
+
# ['name', 'email', 'age']
|
|
353
|
+
|
|
354
|
+
# Get all values
|
|
355
|
+
values = r.hvals('user:1000')
|
|
356
|
+
# ['John Doe', '[email protected]', '30']
|
|
357
|
+
|
|
358
|
+
# Get number of fields
|
|
359
|
+
count = r.hlen('user:1000') # 3
|
|
360
|
+
|
|
361
|
+
# Get multiple fields
|
|
362
|
+
user_data = r.hmget('user:1000', ['name', 'email'])
|
|
363
|
+
# ['John Doe', '[email protected]']
|
|
364
|
+
|
|
365
|
+
# Delete fields
|
|
366
|
+
r.hdel('user:1000', 'age')
|
|
367
|
+
r.hdel('user:1000', 'field1', 'field2', 'field3') # Multiple fields
|
|
368
|
+
|
|
369
|
+
# Increment numeric field
|
|
370
|
+
r.hset('user:1000', 'login_count', 0)
|
|
371
|
+
r.hincrby('user:1000', 'login_count', 1) # 1
|
|
372
|
+
|
|
373
|
+
# Increment float field
|
|
374
|
+
r.hincrbyfloat('user:1000', 'balance', 10.50)
|
|
375
|
+
|
|
376
|
+
# Set only if field doesn't exist
|
|
377
|
+
r.hsetnx('user:1000', 'created', '2024-01-01')
|
|
378
|
+
|
|
379
|
+
# Get random field
|
|
380
|
+
field = r.hrandfield('user:1000') # Random field name
|
|
381
|
+
fields_and_values = r.hrandfield('user:1000', 2, withvalues=True)
|
|
382
|
+
# [('name', 'John Doe'), ('email', '[email protected]')]
|
|
383
|
+
```
|
|
384
|
+
|
|
385
|
+
### Scan Hash Fields
|
|
386
|
+
|
|
387
|
+
```python
|
|
388
|
+
# Scan hash fields (for large hashes)
|
|
389
|
+
cursor = 0
|
|
390
|
+
while True:
|
|
391
|
+
cursor, fields = r.hscan('user:1000', cursor, count=10)
|
|
392
|
+
for field, value in fields.items():
|
|
393
|
+
print(f"{field}: {value}")
|
|
394
|
+
if cursor == 0:
|
|
395
|
+
break
|
|
396
|
+
|
|
397
|
+
# Using hscan_iter (simpler)
|
|
398
|
+
for field, value in r.hscan_iter('user:1000', count=10):
|
|
399
|
+
print(f"{field}: {value}")
|
|
400
|
+
```
|
|
401
|
+
|
|
402
|
+
---
|
|
403
|
+
|
|
404
|
+
## List Operations
|
|
405
|
+
|
|
406
|
+
### Basic List Operations
|
|
407
|
+
|
|
408
|
+
```python
|
|
409
|
+
# Push elements to the right (end) of list
|
|
410
|
+
r.rpush('tasks', 'task1')
|
|
411
|
+
r.rpush('tasks', 'task2', 'task3') # Multiple values
|
|
412
|
+
|
|
413
|
+
# Push elements to the left (beginning) of list
|
|
414
|
+
r.lpush('tasks', 'urgent_task')
|
|
415
|
+
r.lpush('tasks', 'task0', 'task-1')
|
|
416
|
+
|
|
417
|
+
# Get list length
|
|
418
|
+
length = r.llen('tasks')
|
|
419
|
+
|
|
420
|
+
# Get range of elements
|
|
421
|
+
tasks = r.lrange('tasks', 0, -1) # Get all
|
|
422
|
+
first_three = r.lrange('tasks', 0, 2) # Get first 3
|
|
423
|
+
|
|
424
|
+
# Get element by index
|
|
425
|
+
task = r.lindex('tasks', 0)
|
|
426
|
+
|
|
427
|
+
# Pop from right (end)
|
|
428
|
+
last_task = r.rpop('tasks')
|
|
429
|
+
|
|
430
|
+
# Pop from left (beginning)
|
|
431
|
+
first_task = r.lpop('tasks')
|
|
432
|
+
|
|
433
|
+
# Pop multiple elements
|
|
434
|
+
tasks = r.lpop('tasks', count=3) # Pop 3 elements
|
|
435
|
+
```
|
|
436
|
+
|
|
437
|
+
### Advanced List Operations
|
|
438
|
+
|
|
439
|
+
```python
|
|
440
|
+
# Blocking pop (wait for element)
|
|
441
|
+
task = r.blpop('tasks', timeout=10) # Wait up to 10 seconds
|
|
442
|
+
# Returns: ('tasks', 'task1') or None if timeout
|
|
443
|
+
|
|
444
|
+
task_right = r.brpop('tasks', timeout=10)
|
|
445
|
+
|
|
446
|
+
# Block on multiple lists
|
|
447
|
+
task = r.blpop(['queue1', 'queue2', 'queue3'], timeout=5)
|
|
448
|
+
|
|
449
|
+
# Set element at index
|
|
450
|
+
r.lset('tasks', 0, 'updated_task')
|
|
451
|
+
|
|
452
|
+
# Insert before/after element
|
|
453
|
+
r.linsert('tasks', 'BEFORE', 'task2', 'new_task')
|
|
454
|
+
r.linsert('tasks', 'AFTER', 'task2', 'another_task')
|
|
455
|
+
|
|
456
|
+
# Remove elements
|
|
457
|
+
r.lrem('tasks', 2, 'task1') # Remove first 2 occurrences
|
|
458
|
+
r.lrem('tasks', -1, 'task2') # Remove last occurrence
|
|
459
|
+
r.lrem('tasks', 0, 'task3') # Remove all occurrences
|
|
460
|
+
|
|
461
|
+
# Trim list to range
|
|
462
|
+
r.ltrim('tasks', 0, 9) # Keep only first 10 elements
|
|
463
|
+
|
|
464
|
+
# Move element between lists
|
|
465
|
+
element = r.rpoplpush('source', 'destination')
|
|
466
|
+
|
|
467
|
+
# Blocking move
|
|
468
|
+
moved = r.brpoplpush('source', 'destination', timeout=5)
|
|
469
|
+
|
|
470
|
+
# Move element (new command)
|
|
471
|
+
moved = r.lmove('source', 'destination', 'LEFT', 'RIGHT')
|
|
472
|
+
moved = r.blmove('source', 'destination', 'RIGHT', 'LEFT', timeout=5)
|
|
473
|
+
```
|
|
474
|
+
|
|
475
|
+
---
|
|
476
|
+
|
|
477
|
+
## Set Operations
|
|
478
|
+
|
|
479
|
+
### Basic Set Operations
|
|
480
|
+
|
|
481
|
+
```python
|
|
482
|
+
# Add members to set
|
|
483
|
+
r.sadd('tags', 'javascript')
|
|
484
|
+
r.sadd('tags', 'nodejs', 'redis', 'database')
|
|
485
|
+
|
|
486
|
+
# Check if member exists
|
|
487
|
+
exists = r.sismember('tags', 'nodejs') # True
|
|
488
|
+
|
|
489
|
+
# Get all members
|
|
490
|
+
all_tags = r.smembers('tags')
|
|
491
|
+
# {'javascript', 'nodejs', 'redis', 'database'}
|
|
492
|
+
|
|
493
|
+
# Get number of members
|
|
494
|
+
count = r.scard('tags') # 4
|
|
495
|
+
|
|
496
|
+
# Remove members
|
|
497
|
+
r.srem('tags', 'database')
|
|
498
|
+
r.srem('tags', 'nodejs', 'redis')
|
|
499
|
+
|
|
500
|
+
# Pop random member
|
|
501
|
+
random_tag = r.spop('tags')
|
|
502
|
+
|
|
503
|
+
# Pop multiple random members
|
|
504
|
+
random_tags = r.spop('tags', count=2)
|
|
505
|
+
|
|
506
|
+
# Get random member without removing
|
|
507
|
+
random = r.srandmember('tags')
|
|
508
|
+
random_three = r.srandmember('tags', 3)
|
|
509
|
+
```
|
|
510
|
+
|
|
511
|
+
### Set Operations Between Multiple Sets
|
|
512
|
+
|
|
513
|
+
```python
|
|
514
|
+
# Create sets
|
|
515
|
+
r.sadd('set1', 'a', 'b', 'c')
|
|
516
|
+
r.sadd('set2', 'b', 'c', 'd')
|
|
517
|
+
r.sadd('set3', 'c', 'd', 'e')
|
|
518
|
+
|
|
519
|
+
# Union (combine all unique members)
|
|
520
|
+
union = r.sunion('set1', 'set2')
|
|
521
|
+
# {'a', 'b', 'c', 'd'}
|
|
522
|
+
|
|
523
|
+
# Store union in new set
|
|
524
|
+
r.sunionstore('result', 'set1', 'set2')
|
|
525
|
+
|
|
526
|
+
# Intersection (common members)
|
|
527
|
+
inter = r.sinter('set1', 'set2')
|
|
528
|
+
# {'b', 'c'}
|
|
529
|
+
|
|
530
|
+
# Store intersection
|
|
531
|
+
r.sinterstore('result', 'set1', 'set2')
|
|
532
|
+
|
|
533
|
+
# Intersection of multiple sets
|
|
534
|
+
inter = r.sinter('set1', 'set2', 'set3') # {'c'}
|
|
535
|
+
|
|
536
|
+
# Difference (members in first set but not in others)
|
|
537
|
+
diff = r.sdiff('set1', 'set2')
|
|
538
|
+
# {'a'}
|
|
539
|
+
|
|
540
|
+
# Store difference
|
|
541
|
+
r.sdiffstore('result', 'set1', 'set2')
|
|
542
|
+
|
|
543
|
+
# Move member between sets
|
|
544
|
+
r.smove('set1', 'set2', 'a')
|
|
545
|
+
|
|
546
|
+
# Check multiple members
|
|
547
|
+
result = r.smismember('tags', 'nodejs', 'python', 'redis')
|
|
548
|
+
# [True, False, True]
|
|
549
|
+
```
|
|
550
|
+
|
|
551
|
+
### Scan Set Members
|
|
552
|
+
|
|
553
|
+
```python
|
|
554
|
+
# Scan set members (for large sets)
|
|
555
|
+
cursor = 0
|
|
556
|
+
while True:
|
|
557
|
+
cursor, members = r.sscan('tags', cursor, count=10)
|
|
558
|
+
for member in members:
|
|
559
|
+
print(member)
|
|
560
|
+
if cursor == 0:
|
|
561
|
+
break
|
|
562
|
+
|
|
563
|
+
# Using sscan_iter (simpler)
|
|
564
|
+
for member in r.sscan_iter('tags', match='node*', count=10):
|
|
565
|
+
print(member)
|
|
566
|
+
```
|
|
567
|
+
|
|
568
|
+
---
|
|
569
|
+
|
|
570
|
+
## Sorted Set Operations
|
|
571
|
+
|
|
572
|
+
### Basic Sorted Set Operations
|
|
573
|
+
|
|
574
|
+
```python
|
|
575
|
+
# Add members with scores
|
|
576
|
+
r.zadd('leaderboard', {'player1': 100})
|
|
577
|
+
r.zadd('leaderboard', {'player2': 200, 'player3': 150})
|
|
578
|
+
|
|
579
|
+
# Add with options
|
|
580
|
+
r.zadd('leaderboard', {'player4': 175}, nx=True) # Only if doesn't exist
|
|
581
|
+
r.zadd('leaderboard', {'player1': 120}, xx=True) # Only if exists
|
|
582
|
+
r.zadd('leaderboard', {'player1': 130}, gt=True) # Only if new score greater
|
|
583
|
+
r.zadd('leaderboard', {'player1': 110}, lt=True) # Only if new score less
|
|
584
|
+
|
|
585
|
+
# Get rank (0-based, lowest to highest)
|
|
586
|
+
rank = r.zrank('leaderboard', 'player1') # 0
|
|
587
|
+
|
|
588
|
+
# Get reverse rank (highest to lowest)
|
|
589
|
+
rev_rank = r.zrevrank('leaderboard', 'player2') # 0
|
|
590
|
+
|
|
591
|
+
# Get score
|
|
592
|
+
score = r.zscore('leaderboard', 'player2') # 200.0
|
|
593
|
+
|
|
594
|
+
# Get number of members
|
|
595
|
+
count = r.zcard('leaderboard') # 3
|
|
596
|
+
|
|
597
|
+
# Increment score
|
|
598
|
+
r.zincrby('leaderboard', 50, 'player1') # 150.0
|
|
599
|
+
```
|
|
600
|
+
|
|
601
|
+
### Range Queries
|
|
602
|
+
|
|
603
|
+
```python
|
|
604
|
+
# Get range by rank (lowest to highest)
|
|
605
|
+
bottom3 = r.zrange('leaderboard', 0, 2)
|
|
606
|
+
# ['player1', 'player3', 'player2']
|
|
607
|
+
|
|
608
|
+
# Get range with scores
|
|
609
|
+
with_scores = r.zrange('leaderboard', 0, 2, withscores=True)
|
|
610
|
+
# [('player1', 150.0), ('player3', 150.0), ('player2', 200.0)]
|
|
611
|
+
|
|
612
|
+
# Get range by rank (highest to lowest)
|
|
613
|
+
top3 = r.zrevrange('leaderboard', 0, 2)
|
|
614
|
+
|
|
615
|
+
# Get range with scores (descending)
|
|
616
|
+
top3_scores = r.zrevrange('leaderboard', 0, 2, withscores=True)
|
|
617
|
+
|
|
618
|
+
# Get range by score
|
|
619
|
+
range_result = r.zrangebyscore('leaderboard', 100, 200)
|
|
620
|
+
|
|
621
|
+
# Get range by score with limit
|
|
622
|
+
limited = r.zrangebyscore('leaderboard', 100, 200, start=0, num=10)
|
|
623
|
+
|
|
624
|
+
# Get range by score with scores
|
|
625
|
+
range_scores = r.zrangebyscore('leaderboard', 100, 200, withscores=True)
|
|
626
|
+
|
|
627
|
+
# Get reverse range by score
|
|
628
|
+
rev_range = r.zrevrangebyscore('leaderboard', 200, 100)
|
|
629
|
+
|
|
630
|
+
# Infinity boundaries
|
|
631
|
+
high_scores = r.zrangebyscore('leaderboard', 150, '+inf')
|
|
632
|
+
low_scores = r.zrangebyscore('leaderboard', '-inf', 150)
|
|
633
|
+
```
|
|
634
|
+
|
|
635
|
+
### Advanced Sorted Set Operations
|
|
636
|
+
|
|
637
|
+
```python
|
|
638
|
+
# Count members in score range
|
|
639
|
+
count = r.zcount('leaderboard', 100, 200)
|
|
640
|
+
|
|
641
|
+
# Count members by lexicographical range
|
|
642
|
+
r.zadd('words', {'apple': 0, 'banana': 0, 'cherry': 0})
|
|
643
|
+
lex_count = r.zlexcount('words', '[a', '[c')
|
|
644
|
+
|
|
645
|
+
# Get lexicographical range
|
|
646
|
+
lex_range = r.zrangebylex('words', '[a', '[c')
|
|
647
|
+
|
|
648
|
+
# Remove members
|
|
649
|
+
r.zrem('leaderboard', 'player1')
|
|
650
|
+
r.zrem('leaderboard', 'player2', 'player3')
|
|
651
|
+
|
|
652
|
+
# Remove by rank range
|
|
653
|
+
r.zremrangebyrank('leaderboard', 0, 1) # Remove bottom 2
|
|
654
|
+
|
|
655
|
+
# Remove by score range
|
|
656
|
+
r.zremrangebyscore('leaderboard', 0, 100)
|
|
657
|
+
|
|
658
|
+
# Remove by lex range
|
|
659
|
+
r.zremrangebylex('words', '[a', '[b')
|
|
660
|
+
|
|
661
|
+
# Pop highest/lowest scoring member
|
|
662
|
+
highest = r.zpopmax('leaderboard') # [('player2', 200.0)]
|
|
663
|
+
lowest = r.zpopmin('leaderboard') # [('player1', 100.0)]
|
|
664
|
+
|
|
665
|
+
# Pop with count
|
|
666
|
+
top3 = r.zpopmax('leaderboard', count=3)
|
|
667
|
+
|
|
668
|
+
# Blocking pop
|
|
669
|
+
member = r.bzpopmax('leaderboard', timeout=5)
|
|
670
|
+
# ('leaderboard', 'player2', 200.0)
|
|
671
|
+
min_member = r.bzpopmin('leaderboard', timeout=5)
|
|
672
|
+
|
|
673
|
+
# Block on multiple sorted sets
|
|
674
|
+
member = r.bzpopmax(['set1', 'set2', 'set3'], timeout=5)
|
|
675
|
+
```
|
|
676
|
+
|
|
677
|
+
### Sorted Set Operations Between Multiple Sets
|
|
678
|
+
|
|
679
|
+
```python
|
|
680
|
+
# Union of sorted sets
|
|
681
|
+
r.zunionstore('result', ['set1', 'set2'])
|
|
682
|
+
|
|
683
|
+
# Union with weights
|
|
684
|
+
r.zunionstore('result', {'set1': 2, 'set2': 3})
|
|
685
|
+
|
|
686
|
+
# Union with aggregate function
|
|
687
|
+
r.zunionstore('result', ['set1', 'set2'], aggregate='MAX') # or 'MIN', 'SUM'
|
|
688
|
+
|
|
689
|
+
# Intersection
|
|
690
|
+
r.zinterstore('result', ['set1', 'set2'])
|
|
691
|
+
|
|
692
|
+
# Intersection with weights and aggregate
|
|
693
|
+
r.zinterstore('result', {'set1': 1, 'set2': 2}, aggregate='SUM')
|
|
694
|
+
|
|
695
|
+
# Get union without storing
|
|
696
|
+
union = r.zunion(['set1', 'set2'], withscores=True)
|
|
697
|
+
|
|
698
|
+
# Get intersection without storing
|
|
699
|
+
inter = r.zinter(['set1', 'set2'], withscores=True)
|
|
700
|
+
|
|
701
|
+
# Difference between sorted sets
|
|
702
|
+
diff = r.zdiff(['set1', 'set2'])
|
|
703
|
+
diff_scores = r.zdiff(['set1', 'set2'], withscores=True)
|
|
704
|
+
|
|
705
|
+
# Store difference
|
|
706
|
+
r.zdiffstore('result', ['set1', 'set2'])
|
|
707
|
+
```
|
|
708
|
+
|
|
709
|
+
### Scan Sorted Set
|
|
710
|
+
|
|
711
|
+
```python
|
|
712
|
+
# Scan sorted set members
|
|
713
|
+
cursor = 0
|
|
714
|
+
while True:
|
|
715
|
+
cursor, members = r.zscan('leaderboard', cursor, count=10)
|
|
716
|
+
for member, score in members:
|
|
717
|
+
print(f"{member}: {score}")
|
|
718
|
+
if cursor == 0:
|
|
719
|
+
break
|
|
720
|
+
|
|
721
|
+
# Using zscan_iter (simpler)
|
|
722
|
+
for member, score in r.zscan_iter('leaderboard', count=10):
|
|
723
|
+
print(f"{member}: {score}")
|
|
724
|
+
```
|
|
725
|
+
|
|
726
|
+
---
|
|
727
|
+
|
|
728
|
+
## Key Management Operations
|
|
729
|
+
|
|
730
|
+
### Key Operations
|
|
731
|
+
|
|
732
|
+
```python
|
|
733
|
+
# Check if key exists
|
|
734
|
+
exists = r.exists('mykey') # 1 if exists, 0 if not
|
|
735
|
+
multi_exists = r.exists('key1', 'key2', 'key3') # Count
|
|
736
|
+
|
|
737
|
+
# Delete keys
|
|
738
|
+
r.delete('mykey')
|
|
739
|
+
r.delete('key1', 'key2', 'key3')
|
|
740
|
+
|
|
741
|
+
# Set expiration in seconds
|
|
742
|
+
r.expire('mykey', 60) # Expire in 60 seconds
|
|
743
|
+
|
|
744
|
+
# Set expiration at specific timestamp
|
|
745
|
+
import time
|
|
746
|
+
timestamp = int(time.time()) + 3600
|
|
747
|
+
r.expireat('mykey', timestamp)
|
|
748
|
+
|
|
749
|
+
# Set expiration in milliseconds
|
|
750
|
+
r.pexpire('mykey', 60000) # 60 seconds
|
|
751
|
+
|
|
752
|
+
# Set expiration at timestamp (milliseconds)
|
|
753
|
+
r.pexpireat('mykey', int(time.time() * 1000) + 60000)
|
|
754
|
+
|
|
755
|
+
# Get time to live in seconds
|
|
756
|
+
ttl = r.ttl('mykey') # -1 if no expiration, -2 if not exists
|
|
757
|
+
|
|
758
|
+
# Get time to live in milliseconds
|
|
759
|
+
pttl = r.pttl('mykey')
|
|
760
|
+
|
|
761
|
+
# Remove expiration
|
|
762
|
+
r.persist('mykey')
|
|
763
|
+
|
|
764
|
+
# Rename key
|
|
765
|
+
r.rename('oldkey', 'newkey')
|
|
766
|
+
|
|
767
|
+
# Rename only if new key doesn't exist
|
|
768
|
+
renamed = r.renamenx('oldkey', 'newkey') # Returns True if renamed
|
|
769
|
+
|
|
770
|
+
# Get key type
|
|
771
|
+
key_type = r.type('mykey') # 'string', 'list', 'set', etc.
|
|
772
|
+
|
|
773
|
+
# Get random key
|
|
774
|
+
random_key = r.randomkey()
|
|
775
|
+
|
|
776
|
+
# Copy key
|
|
777
|
+
r.copy('source', 'destination')
|
|
778
|
+
r.copy('source', 'destination', replace=True) # Overwrite if exists
|
|
779
|
+
|
|
780
|
+
# Touch keys (update last access time)
|
|
781
|
+
r.touch('key1', 'key2', 'key3')
|
|
782
|
+
|
|
783
|
+
# Get object encoding
|
|
784
|
+
encoding = r.object('encoding', 'mykey')
|
|
785
|
+
|
|
786
|
+
# Get object idle time
|
|
787
|
+
idle_time = r.object('idletime', 'mykey')
|
|
788
|
+
```
|
|
789
|
+
|
|
790
|
+
### Scanning Keys
|
|
791
|
+
|
|
792
|
+
```python
|
|
793
|
+
# Scan all keys (use instead of KEYS for production)
|
|
794
|
+
keys = []
|
|
795
|
+
cursor = 0
|
|
796
|
+
while True:
|
|
797
|
+
cursor, partial_keys = r.scan(cursor, count=100)
|
|
798
|
+
keys.extend(partial_keys)
|
|
799
|
+
if cursor == 0:
|
|
800
|
+
break
|
|
801
|
+
|
|
802
|
+
# Using scan_iter (simpler)
|
|
803
|
+
for key in r.scan_iter(count=100):
|
|
804
|
+
print(key)
|
|
805
|
+
|
|
806
|
+
# Scan with pattern
|
|
807
|
+
for key in r.scan_iter(match='user:*', count=100):
|
|
808
|
+
print(key)
|
|
809
|
+
|
|
810
|
+
# Scan specific key type
|
|
811
|
+
for key in r.scan_iter(_type='string', count=100):
|
|
812
|
+
print(key)
|
|
813
|
+
|
|
814
|
+
# Scan with pattern and type
|
|
815
|
+
for key in r.scan_iter(match='session:*', _type='hash', count=100):
|
|
816
|
+
print(key)
|
|
817
|
+
```
|
|
818
|
+
|
|
819
|
+
---
|
|
820
|
+
|
|
821
|
+
## Transactions
|
|
822
|
+
|
|
823
|
+
### Basic Transaction (MULTI/EXEC)
|
|
824
|
+
|
|
825
|
+
```python
|
|
826
|
+
# Execute multiple commands atomically
|
|
827
|
+
r.set('another-key', 'another-value')
|
|
828
|
+
|
|
829
|
+
pipe = r.pipeline()
|
|
830
|
+
pipe.set('key', 'value')
|
|
831
|
+
pipe.get('another-key')
|
|
832
|
+
pipe.incr('counter')
|
|
833
|
+
results = pipe.execute()
|
|
834
|
+
|
|
835
|
+
print(results) # [True, 'another-value', 1]
|
|
836
|
+
```
|
|
837
|
+
|
|
838
|
+
### Pipeline Without Transaction
|
|
839
|
+
|
|
840
|
+
```python
|
|
841
|
+
# Execute commands in pipeline without atomicity
|
|
842
|
+
pipe = r.pipeline(transaction=False)
|
|
843
|
+
pipe.set('key1', 'value1')
|
|
844
|
+
pipe.set('key2', 'value2')
|
|
845
|
+
pipe.get('key1')
|
|
846
|
+
results = pipe.execute()
|
|
847
|
+
```
|
|
848
|
+
|
|
849
|
+
### Transaction with Error Handling
|
|
850
|
+
|
|
851
|
+
```python
|
|
852
|
+
try:
|
|
853
|
+
pipe = r.pipeline()
|
|
854
|
+
pipe.set('key1', 'value1')
|
|
855
|
+
pipe.set('key2', 'value2')
|
|
856
|
+
pipe.get('key1')
|
|
857
|
+
results = pipe.execute()
|
|
858
|
+
|
|
859
|
+
for i, result in enumerate(results):
|
|
860
|
+
print(f"Command {i} result: {result}")
|
|
861
|
+
except redis.exceptions.ResponseError as e:
|
|
862
|
+
print(f"Transaction failed: {e}")
|
|
863
|
+
```
|
|
864
|
+
|
|
865
|
+
### Transaction with WATCH (Optimistic Locking)
|
|
866
|
+
|
|
867
|
+
```python
|
|
868
|
+
# Watch a key for changes
|
|
869
|
+
with r.pipeline() as pipe:
|
|
870
|
+
while True:
|
|
871
|
+
try:
|
|
872
|
+
pipe.watch('balance')
|
|
873
|
+
balance = int(pipe.get('balance') or 0)
|
|
874
|
+
|
|
875
|
+
if balance >= 100:
|
|
876
|
+
pipe.multi()
|
|
877
|
+
pipe.decrby('balance', 100)
|
|
878
|
+
pipe.incrby('purchases', 1)
|
|
879
|
+
pipe.execute()
|
|
880
|
+
print('Purchase successful')
|
|
881
|
+
break
|
|
882
|
+
else:
|
|
883
|
+
pipe.unwatch()
|
|
884
|
+
print('Insufficient balance')
|
|
885
|
+
break
|
|
886
|
+
except redis.WatchError:
|
|
887
|
+
print('Balance was modified, retrying...')
|
|
888
|
+
continue
|
|
889
|
+
```
|
|
890
|
+
|
|
891
|
+
### Complex Transaction Example
|
|
892
|
+
|
|
893
|
+
```python
|
|
894
|
+
def transfer_money(r, from_account, to_account, amount):
|
|
895
|
+
"""Transfer money between accounts with optimistic locking"""
|
|
896
|
+
with r.pipeline() as pipe:
|
|
897
|
+
while True:
|
|
898
|
+
try:
|
|
899
|
+
pipe.watch(from_account, to_account)
|
|
900
|
+
|
|
901
|
+
from_balance = float(pipe.get(from_account) or 0)
|
|
902
|
+
|
|
903
|
+
if from_balance < amount:
|
|
904
|
+
pipe.unwatch()
|
|
905
|
+
raise ValueError('Insufficient funds')
|
|
906
|
+
|
|
907
|
+
pipe.multi()
|
|
908
|
+
pipe.incrbyfloat(from_account, -amount)
|
|
909
|
+
pipe.incrbyfloat(to_account, amount)
|
|
910
|
+
pipe.execute()
|
|
911
|
+
return True
|
|
912
|
+
except redis.WatchError:
|
|
913
|
+
# Another client modified the keys, retry
|
|
914
|
+
continue
|
|
915
|
+
|
|
916
|
+
# Usage
|
|
917
|
+
try:
|
|
918
|
+
transfer_money(r, 'account:1', 'account:2', 50.00)
|
|
919
|
+
print('Transfer successful')
|
|
920
|
+
except ValueError as e:
|
|
921
|
+
print(f'Transfer failed: {e}')
|
|
922
|
+
```
|
|
923
|
+
|
|
924
|
+
---
|
|
925
|
+
|
|
926
|
+
## Pipelining
|
|
927
|
+
|
|
928
|
+
### Basic Pipelining
|
|
929
|
+
|
|
930
|
+
```python
|
|
931
|
+
# Execute multiple commands in one round trip
|
|
932
|
+
pipe = r.pipeline(transaction=False)
|
|
933
|
+
pipe.set('key1', 'value1')
|
|
934
|
+
pipe.set('key2', 'value2')
|
|
935
|
+
pipe.get('key1')
|
|
936
|
+
pipe.mget(['key1', 'key2'])
|
|
937
|
+
results = pipe.execute()
|
|
938
|
+
|
|
939
|
+
print(results) # [True, True, 'value1', ['value1', 'value2']]
|
|
940
|
+
```
|
|
941
|
+
|
|
942
|
+
### Large Batch Operations
|
|
943
|
+
|
|
944
|
+
```python
|
|
945
|
+
def batch_set_keys(r, key_value_pairs):
|
|
946
|
+
"""Set many keys efficiently using pipeline"""
|
|
947
|
+
pipe = r.pipeline(transaction=False)
|
|
948
|
+
|
|
949
|
+
for key, value in key_value_pairs.items():
|
|
950
|
+
pipe.set(key, value)
|
|
951
|
+
|
|
952
|
+
results = pipe.execute()
|
|
953
|
+
return results
|
|
954
|
+
|
|
955
|
+
# Usage
|
|
956
|
+
data = {
|
|
957
|
+
'user:1': 'Alice',
|
|
958
|
+
'user:2': 'Bob',
|
|
959
|
+
'user:3': 'Charlie'
|
|
960
|
+
}
|
|
961
|
+
|
|
962
|
+
batch_set_keys(r, data)
|
|
963
|
+
```
|
|
964
|
+
|
|
965
|
+
### Chunked Pipeline
|
|
966
|
+
|
|
967
|
+
```python
|
|
968
|
+
def batch_operation_chunked(r, operations, chunk_size=1000):
|
|
969
|
+
"""Execute operations in chunks to avoid memory issues"""
|
|
970
|
+
results = []
|
|
971
|
+
|
|
972
|
+
for i in range(0, len(operations), chunk_size):
|
|
973
|
+
chunk = operations[i:i + chunk_size]
|
|
974
|
+
pipe = r.pipeline(transaction=False)
|
|
975
|
+
|
|
976
|
+
for op in chunk:
|
|
977
|
+
getattr(pipe, op['command'])(*op['args'])
|
|
978
|
+
|
|
979
|
+
results.extend(pipe.execute())
|
|
980
|
+
|
|
981
|
+
return results
|
|
982
|
+
|
|
983
|
+
# Usage
|
|
984
|
+
operations = [
|
|
985
|
+
{'command': 'set', 'args': [f'key:{i}', f'value:{i}']}
|
|
986
|
+
for i in range(10000)
|
|
987
|
+
]
|
|
988
|
+
|
|
989
|
+
batch_operation_chunked(r, operations, chunk_size=1000)
|
|
990
|
+
```
|
|
991
|
+
|
|
992
|
+
---
|
|
993
|
+
|
|
994
|
+
## Pub/Sub
|
|
995
|
+
|
|
996
|
+
### Basic Subscriber
|
|
997
|
+
|
|
998
|
+
```python
|
|
999
|
+
import redis
|
|
1000
|
+
|
|
1001
|
+
r = redis.Redis(host='localhost', port=6379, decode_responses=True)
|
|
1002
|
+
|
|
1003
|
+
# Create PubSub object
|
|
1004
|
+
pubsub = r.pubsub()
|
|
1005
|
+
|
|
1006
|
+
# Subscribe to channel
|
|
1007
|
+
pubsub.subscribe('notifications')
|
|
1008
|
+
|
|
1009
|
+
# Listen for messages
|
|
1010
|
+
for message in pubsub.listen():
|
|
1011
|
+
if message['type'] == 'message':
|
|
1012
|
+
print(f"Received: {message['data']}")
|
|
1013
|
+
```
|
|
1014
|
+
|
|
1015
|
+
### Basic Publisher
|
|
1016
|
+
|
|
1017
|
+
```python
|
|
1018
|
+
import redis
|
|
1019
|
+
|
|
1020
|
+
r = redis.Redis(host='localhost', port=6379, decode_responses=True)
|
|
1021
|
+
|
|
1022
|
+
# Publish messages
|
|
1023
|
+
r.publish('notifications', 'Hello, World!')
|
|
1024
|
+
r.publish('notifications', 'System update')
|
|
1025
|
+
|
|
1026
|
+
print('Messages published')
|
|
1027
|
+
```
|
|
1028
|
+
|
|
1029
|
+
### Multiple Channels
|
|
1030
|
+
|
|
1031
|
+
```python
|
|
1032
|
+
# Subscribe to multiple channels
|
|
1033
|
+
pubsub = r.pubsub()
|
|
1034
|
+
pubsub.subscribe('channel1', 'channel2', 'channel3')
|
|
1035
|
+
|
|
1036
|
+
for message in pubsub.listen():
|
|
1037
|
+
if message['type'] == 'message':
|
|
1038
|
+
print(f"Channel: {message['channel']}, Message: {message['data']}")
|
|
1039
|
+
```
|
|
1040
|
+
|
|
1041
|
+
### Pattern-Based Subscription
|
|
1042
|
+
|
|
1043
|
+
```python
|
|
1044
|
+
# Subscribe to channels matching pattern
|
|
1045
|
+
pubsub = r.pubsub()
|
|
1046
|
+
pubsub.psubscribe('user:*')
|
|
1047
|
+
|
|
1048
|
+
for message in pubsub.listen():
|
|
1049
|
+
if message['type'] == 'pmessage':
|
|
1050
|
+
print(f"Pattern: {message['pattern']}, Channel: {message['channel']}, Message: {message['data']}")
|
|
1051
|
+
|
|
1052
|
+
# Publish to matching channels
|
|
1053
|
+
r.publish('user:1000', 'User 1000 logged in')
|
|
1054
|
+
r.publish('user:2000', 'User 2000 logged out')
|
|
1055
|
+
```
|
|
1056
|
+
|
|
1057
|
+
### Message Handler Functions
|
|
1058
|
+
|
|
1059
|
+
```python
|
|
1060
|
+
def message_handler(message):
|
|
1061
|
+
"""Handle messages from channel"""
|
|
1062
|
+
print(f"Received: {message['data']}")
|
|
1063
|
+
|
|
1064
|
+
def error_handler(message):
|
|
1065
|
+
"""Handle errors"""
|
|
1066
|
+
print(f"Error: {message}")
|
|
1067
|
+
|
|
1068
|
+
# Subscribe with handler
|
|
1069
|
+
pubsub = r.pubsub()
|
|
1070
|
+
pubsub.subscribe(**{
|
|
1071
|
+
'notifications': message_handler,
|
|
1072
|
+
'errors': error_handler
|
|
1073
|
+
})
|
|
1074
|
+
|
|
1075
|
+
# Run event loop
|
|
1076
|
+
thread = pubsub.run_in_thread(sleep_time=0.01)
|
|
1077
|
+
|
|
1078
|
+
# Do other work...
|
|
1079
|
+
|
|
1080
|
+
# Stop listening
|
|
1081
|
+
thread.stop()
|
|
1082
|
+
```
|
|
1083
|
+
|
|
1084
|
+
### Unsubscribe
|
|
1085
|
+
|
|
1086
|
+
```python
|
|
1087
|
+
# Unsubscribe from specific channel
|
|
1088
|
+
pubsub.unsubscribe('channel1')
|
|
1089
|
+
|
|
1090
|
+
# Unsubscribe from all channels
|
|
1091
|
+
pubsub.unsubscribe()
|
|
1092
|
+
|
|
1093
|
+
# Unsubscribe from pattern
|
|
1094
|
+
pubsub.punsubscribe('user:*')
|
|
1095
|
+
```
|
|
1096
|
+
|
|
1097
|
+
### Complete Pub/Sub Example
|
|
1098
|
+
|
|
1099
|
+
```python
|
|
1100
|
+
import redis
|
|
1101
|
+
import json
|
|
1102
|
+
import threading
|
|
1103
|
+
import time
|
|
1104
|
+
|
|
1105
|
+
def subscriber_worker():
|
|
1106
|
+
"""Subscriber in separate thread"""
|
|
1107
|
+
r = redis.Redis(host='localhost', port=6379, decode_responses=True)
|
|
1108
|
+
pubsub = r.pubsub()
|
|
1109
|
+
pubsub.subscribe('chat:room1')
|
|
1110
|
+
|
|
1111
|
+
for message in pubsub.listen():
|
|
1112
|
+
if message['type'] == 'message':
|
|
1113
|
+
data = json.loads(message['data'])
|
|
1114
|
+
print(f"{data['user']}: {data['text']}")
|
|
1115
|
+
|
|
1116
|
+
def publisher_worker():
|
|
1117
|
+
"""Publisher in separate thread"""
|
|
1118
|
+
r = redis.Redis(host='localhost', port=6379, decode_responses=True)
|
|
1119
|
+
|
|
1120
|
+
time.sleep(1) # Wait for subscriber
|
|
1121
|
+
|
|
1122
|
+
# Publish chat messages
|
|
1123
|
+
r.publish('chat:room1', json.dumps({
|
|
1124
|
+
'user': 'Alice',
|
|
1125
|
+
'text': 'Hello everyone!'
|
|
1126
|
+
}))
|
|
1127
|
+
|
|
1128
|
+
r.publish('chat:room1', json.dumps({
|
|
1129
|
+
'user': 'Bob',
|
|
1130
|
+
'text': 'Hi Alice!'
|
|
1131
|
+
}))
|
|
1132
|
+
|
|
1133
|
+
# Start subscriber
|
|
1134
|
+
sub_thread = threading.Thread(target=subscriber_worker, daemon=True)
|
|
1135
|
+
sub_thread.start()
|
|
1136
|
+
|
|
1137
|
+
# Start publisher
|
|
1138
|
+
pub_thread = threading.Thread(target=publisher_worker)
|
|
1139
|
+
pub_thread.start()
|
|
1140
|
+
pub_thread.join()
|
|
1141
|
+
|
|
1142
|
+
time.sleep(2) # Let subscriber process messages
|
|
1143
|
+
```
|
|
1144
|
+
|
|
1145
|
+
---
|
|
1146
|
+
|
|
1147
|
+
## Redis Streams
|
|
1148
|
+
|
|
1149
|
+
### Add to Stream (XADD)
|
|
1150
|
+
|
|
1151
|
+
```python
|
|
1152
|
+
# Add entry to stream
|
|
1153
|
+
entry_id = r.xadd('events', {
|
|
1154
|
+
'user': 'alice',
|
|
1155
|
+
'action': 'login',
|
|
1156
|
+
'timestamp': time.time()
|
|
1157
|
+
})
|
|
1158
|
+
|
|
1159
|
+
print(f'Entry ID: {entry_id}') # '1234567890123-0'
|
|
1160
|
+
|
|
1161
|
+
# Add with specific ID
|
|
1162
|
+
r.xadd('events', {'user': 'bob', 'action': 'logout'}, id='1234567890123-1')
|
|
1163
|
+
|
|
1164
|
+
# Add with maxlen (limit stream size)
|
|
1165
|
+
r.xadd('events', {'user': 'charlie', 'action': 'signup'}, maxlen=1000)
|
|
1166
|
+
|
|
1167
|
+
# Add with approximate trimming
|
|
1168
|
+
r.xadd('events', {'user': 'dave', 'action': 'purchase'}, maxlen=1000, approximate=True)
|
|
1169
|
+
|
|
1170
|
+
# Add with minid trimming
|
|
1171
|
+
r.xadd('events', {'data': 'value'}, minid='1234567890000-0')
|
|
1172
|
+
```
|
|
1173
|
+
|
|
1174
|
+
### Read from Stream (XREAD)
|
|
1175
|
+
|
|
1176
|
+
```python
|
|
1177
|
+
# Read entries from beginning
|
|
1178
|
+
messages = r.xread({'events': '0'}, count=10)
|
|
1179
|
+
|
|
1180
|
+
for stream, entries in messages:
|
|
1181
|
+
for entry_id, data in entries:
|
|
1182
|
+
print(f"ID: {entry_id}, Data: {data}")
|
|
1183
|
+
|
|
1184
|
+
# Read from multiple streams
|
|
1185
|
+
messages = r.xread({'stream1': '0', 'stream2': '0'})
|
|
1186
|
+
|
|
1187
|
+
# Blocking read (wait for new entries)
|
|
1188
|
+
messages = r.xread({'events': '$'}, block=5000) # Wait up to 5 seconds
|
|
1189
|
+
|
|
1190
|
+
# Read only new messages
|
|
1191
|
+
messages = r.xread({'events': '$'}, block=0) # Block indefinitely
|
|
1192
|
+
```
|
|
1193
|
+
|
|
1194
|
+
### Stream Range Queries
|
|
1195
|
+
|
|
1196
|
+
```python
|
|
1197
|
+
# Read all entries
|
|
1198
|
+
all_entries = r.xrange('events', '-', '+')
|
|
1199
|
+
|
|
1200
|
+
# Read range with IDs
|
|
1201
|
+
range_entries = r.xrange('events', '1234567890000', '1234567899999')
|
|
1202
|
+
|
|
1203
|
+
# Read with limit
|
|
1204
|
+
limited = r.xrange('events', '-', '+', count=100)
|
|
1205
|
+
|
|
1206
|
+
# Reverse range
|
|
1207
|
+
reverse = r.xrevrange('events', '+', '-', count=10)
|
|
1208
|
+
|
|
1209
|
+
# Iterate through entries
|
|
1210
|
+
for entry_id, data in r.xrange('events', '-', '+'):
|
|
1211
|
+
print(f"{entry_id}: {data}")
|
|
1212
|
+
```
|
|
1213
|
+
|
|
1214
|
+
### Stream Length and Info
|
|
1215
|
+
|
|
1216
|
+
```python
|
|
1217
|
+
# Get stream length
|
|
1218
|
+
length = r.xlen('events')
|
|
1219
|
+
|
|
1220
|
+
# Get stream info
|
|
1221
|
+
info = r.xinfo_stream('events')
|
|
1222
|
+
print(f"Length: {info['length']}")
|
|
1223
|
+
print(f"First entry: {info['first-entry']}")
|
|
1224
|
+
print(f"Last entry: {info['last-entry']}")
|
|
1225
|
+
```
|
|
1226
|
+
|
|
1227
|
+
### Consumer Groups
|
|
1228
|
+
|
|
1229
|
+
```python
|
|
1230
|
+
# Create consumer group
|
|
1231
|
+
try:
|
|
1232
|
+
r.xgroup_create('events', 'processors', '0', mkstream=True)
|
|
1233
|
+
except redis.exceptions.ResponseError:
|
|
1234
|
+
# Group already exists
|
|
1235
|
+
pass
|
|
1236
|
+
|
|
1237
|
+
# Read as consumer group
|
|
1238
|
+
messages = r.xreadgroup(
|
|
1239
|
+
'processors',
|
|
1240
|
+
'consumer1',
|
|
1241
|
+
{'events': '>'}, # '>' means undelivered messages
|
|
1242
|
+
count=10,
|
|
1243
|
+
block=5000
|
|
1244
|
+
)
|
|
1245
|
+
|
|
1246
|
+
# Process messages and acknowledge
|
|
1247
|
+
for stream, entries in messages:
|
|
1248
|
+
for entry_id, data in entries:
|
|
1249
|
+
# Process message
|
|
1250
|
+
print(f'Processing: {entry_id}, {data}')
|
|
1251
|
+
|
|
1252
|
+
# Acknowledge message
|
|
1253
|
+
r.xack('events', 'processors', entry_id)
|
|
1254
|
+
|
|
1255
|
+
# Get pending messages
|
|
1256
|
+
pending = r.xpending('events', 'processors')
|
|
1257
|
+
print(f"Pending messages: {pending['pending']}")
|
|
1258
|
+
|
|
1259
|
+
# Get detailed pending info
|
|
1260
|
+
pending_details = r.xpending_range('events', 'processors', '-', '+', count=10)
|
|
1261
|
+
|
|
1262
|
+
# Claim pending messages (take over from another consumer)
|
|
1263
|
+
claimed = r.xclaim('events', 'processors', 'consumer2', 3600000, ['1234567890123-0'])
|
|
1264
|
+
```
|
|
1265
|
+
|
|
1266
|
+
### Delete from Stream
|
|
1267
|
+
|
|
1268
|
+
```python
|
|
1269
|
+
# Delete specific entries
|
|
1270
|
+
r.xdel('events', '1234567890123-0', '1234567890123-1')
|
|
1271
|
+
|
|
1272
|
+
# Trim stream
|
|
1273
|
+
r.xtrim('events', maxlen=1000)
|
|
1274
|
+
|
|
1275
|
+
# Approximate trim
|
|
1276
|
+
r.xtrim('events', maxlen=1000, approximate=True)
|
|
1277
|
+
|
|
1278
|
+
# Trim by minid
|
|
1279
|
+
r.xtrim('events', minid='1234567890000-0')
|
|
1280
|
+
```
|
|
1281
|
+
|
|
1282
|
+
### Complete Stream Example
|
|
1283
|
+
|
|
1284
|
+
```python
|
|
1285
|
+
import redis
|
|
1286
|
+
import time
|
|
1287
|
+
import json
|
|
1288
|
+
|
|
1289
|
+
r = redis.Redis(host='localhost', port=6379, decode_responses=True)
|
|
1290
|
+
|
|
1291
|
+
def add_event(event_data):
|
|
1292
|
+
"""Add event to stream"""
|
|
1293
|
+
return r.xadd('events', event_data)
|
|
1294
|
+
|
|
1295
|
+
def process_events():
|
|
1296
|
+
"""Process events using consumer group"""
|
|
1297
|
+
# Create group if not exists
|
|
1298
|
+
try:
|
|
1299
|
+
r.xgroup_create('events', 'workers', '0', mkstream=True)
|
|
1300
|
+
except redis.exceptions.ResponseError:
|
|
1301
|
+
pass
|
|
1302
|
+
|
|
1303
|
+
while True:
|
|
1304
|
+
messages = r.xreadgroup(
|
|
1305
|
+
'workers',
|
|
1306
|
+
'worker1',
|
|
1307
|
+
{'events': '>'},
|
|
1308
|
+
count=10,
|
|
1309
|
+
block=5000
|
|
1310
|
+
)
|
|
1311
|
+
|
|
1312
|
+
if messages:
|
|
1313
|
+
for stream, entries in messages:
|
|
1314
|
+
for entry_id, data in entries:
|
|
1315
|
+
try:
|
|
1316
|
+
# Process event
|
|
1317
|
+
print(f'Processing event: {data}')
|
|
1318
|
+
|
|
1319
|
+
# Acknowledge
|
|
1320
|
+
r.xack('events', 'workers', entry_id)
|
|
1321
|
+
except Exception as e:
|
|
1322
|
+
print(f'Error processing: {e}')
|
|
1323
|
+
|
|
1324
|
+
# Add events
|
|
1325
|
+
add_event({'type': 'user_login', 'user': 'alice'})
|
|
1326
|
+
add_event({'type': 'user_logout', 'user': 'bob'})
|
|
1327
|
+
|
|
1328
|
+
# Process events
|
|
1329
|
+
process_events()
|
|
1330
|
+
```
|
|
1331
|
+
|
|
1332
|
+
---
|
|
1333
|
+
|
|
1334
|
+
## Scripting with Lua
|
|
1335
|
+
|
|
1336
|
+
### Basic Script Execution (EVAL)
|
|
1337
|
+
|
|
1338
|
+
```python
|
|
1339
|
+
# Execute Lua script
|
|
1340
|
+
script = "return redis.call('SET', KEYS[1], ARGV[1])"
|
|
1341
|
+
result = r.eval(script, 1, 'mykey', 'myvalue')
|
|
1342
|
+
print(result) # b'OK' or 'OK' with decode_responses=True
|
|
1343
|
+
```
|
|
1344
|
+
|
|
1345
|
+
### Script with Multiple Operations
|
|
1346
|
+
|
|
1347
|
+
```python
|
|
1348
|
+
script = """
|
|
1349
|
+
local current = redis.call('GET', KEYS[1])
|
|
1350
|
+
if current == false then
|
|
1351
|
+
current = 0
|
|
1352
|
+
end
|
|
1353
|
+
local new = tonumber(current) + tonumber(ARGV[1])
|
|
1354
|
+
redis.call('SET', KEYS[1], new)
|
|
1355
|
+
return new
|
|
1356
|
+
"""
|
|
1357
|
+
|
|
1358
|
+
new_value = r.eval(script, 1, 'counter', 5)
|
|
1359
|
+
print(f'New counter value: {new_value}')
|
|
1360
|
+
```
|
|
1361
|
+
|
|
1362
|
+
### Register Script (SCRIPT LOAD / EVALSHA)
|
|
1363
|
+
|
|
1364
|
+
```python
|
|
1365
|
+
# Register script
|
|
1366
|
+
script = "return redis.call('GET', KEYS[1])"
|
|
1367
|
+
sha = r.script_load(script)
|
|
1368
|
+
print(f'Script SHA: {sha}')
|
|
1369
|
+
|
|
1370
|
+
# Execute by SHA (more efficient for repeated calls)
|
|
1371
|
+
value = r.evalsha(sha, 1, 'mykey')
|
|
1372
|
+
|
|
1373
|
+
# Check if script exists
|
|
1374
|
+
exists = r.script_exists(sha)
|
|
1375
|
+
print(f'Script exists: {exists}') # [True]
|
|
1376
|
+
|
|
1377
|
+
# Flush all scripts
|
|
1378
|
+
r.script_flush()
|
|
1379
|
+
```
|
|
1380
|
+
|
|
1381
|
+
### Rate Limiting with Lua Script
|
|
1382
|
+
|
|
1383
|
+
```python
|
|
1384
|
+
rate_limit_script = """
|
|
1385
|
+
local key = KEYS[1]
|
|
1386
|
+
local limit = tonumber(ARGV[1])
|
|
1387
|
+
local window = tonumber(ARGV[2])
|
|
1388
|
+
local current = redis.call('INCR', key)
|
|
1389
|
+
if current == 1 then
|
|
1390
|
+
redis.call('EXPIRE', key, window)
|
|
1391
|
+
end
|
|
1392
|
+
if current > limit then
|
|
1393
|
+
return 0
|
|
1394
|
+
else
|
|
1395
|
+
return 1
|
|
1396
|
+
end
|
|
1397
|
+
"""
|
|
1398
|
+
|
|
1399
|
+
def check_rate_limit(user_id, limit=10, window=60):
|
|
1400
|
+
"""Check if user is within rate limit"""
|
|
1401
|
+
allowed = r.eval(
|
|
1402
|
+
rate_limit_script,
|
|
1403
|
+
1,
|
|
1404
|
+
f'ratelimit:{user_id}',
|
|
1405
|
+
limit,
|
|
1406
|
+
window
|
|
1407
|
+
)
|
|
1408
|
+
return allowed == 1
|
|
1409
|
+
|
|
1410
|
+
# Usage
|
|
1411
|
+
if check_rate_limit('user:1000', limit=10, window=60):
|
|
1412
|
+
print('Request allowed')
|
|
1413
|
+
else:
|
|
1414
|
+
print('Rate limit exceeded')
|
|
1415
|
+
```
|
|
1416
|
+
|
|
1417
|
+
### Atomic Get and Delete
|
|
1418
|
+
|
|
1419
|
+
```python
|
|
1420
|
+
get_and_delete = """
|
|
1421
|
+
local value = redis.call('GET', KEYS[1])
|
|
1422
|
+
if value then
|
|
1423
|
+
redis.call('DEL', KEYS[1])
|
|
1424
|
+
end
|
|
1425
|
+
return value
|
|
1426
|
+
"""
|
|
1427
|
+
|
|
1428
|
+
value = r.eval(get_and_delete, 1, 'temp:key')
|
|
1429
|
+
print(f'Retrieved and deleted: {value}')
|
|
1430
|
+
```
|
|
1431
|
+
|
|
1432
|
+
### Script Class for Reusability
|
|
1433
|
+
|
|
1434
|
+
```python
|
|
1435
|
+
class RedisScripts:
|
|
1436
|
+
"""Reusable Redis Lua scripts"""
|
|
1437
|
+
|
|
1438
|
+
def __init__(self, redis_client):
|
|
1439
|
+
self.r = redis_client
|
|
1440
|
+
|
|
1441
|
+
# Load scripts on initialization
|
|
1442
|
+
self.rate_limit_sha = self.r.script_load("""
|
|
1443
|
+
local key = KEYS[1]
|
|
1444
|
+
local limit = tonumber(ARGV[1])
|
|
1445
|
+
local window = tonumber(ARGV[2])
|
|
1446
|
+
local current = redis.call('INCR', key)
|
|
1447
|
+
if current == 1 then
|
|
1448
|
+
redis.call('EXPIRE', key, window)
|
|
1449
|
+
end
|
|
1450
|
+
return current <= limit and 1 or 0
|
|
1451
|
+
""")
|
|
1452
|
+
|
|
1453
|
+
self.atomic_increment_sha = self.r.script_load("""
|
|
1454
|
+
local current = redis.call('GET', KEYS[1])
|
|
1455
|
+
if not current then
|
|
1456
|
+
current = 0
|
|
1457
|
+
end
|
|
1458
|
+
local new = tonumber(current) + tonumber(ARGV[1])
|
|
1459
|
+
redis.call('SET', KEYS[1], new)
|
|
1460
|
+
return new
|
|
1461
|
+
""")
|
|
1462
|
+
|
|
1463
|
+
def rate_limit(self, key, limit, window):
|
|
1464
|
+
return self.r.evalsha(self.rate_limit_sha, 1, key, limit, window) == 1
|
|
1465
|
+
|
|
1466
|
+
def atomic_increment(self, key, amount):
|
|
1467
|
+
return self.r.evalsha(self.atomic_increment_sha, 1, key, amount)
|
|
1468
|
+
|
|
1469
|
+
# Usage
|
|
1470
|
+
scripts = RedisScripts(r)
|
|
1471
|
+
allowed = scripts.rate_limit('user:1000', 10, 60)
|
|
1472
|
+
new_value = scripts.atomic_increment('counter', 5)
|
|
1473
|
+
```
|
|
1474
|
+
|
|
1475
|
+
---
|
|
1476
|
+
|
|
1477
|
+
## Geospatial Operations
|
|
1478
|
+
|
|
1479
|
+
### Add Geo Points
|
|
1480
|
+
|
|
1481
|
+
```python
|
|
1482
|
+
# Add location
|
|
1483
|
+
r.geoadd('locations', -122.4194, 37.7749, 'San Francisco')
|
|
1484
|
+
|
|
1485
|
+
# Add multiple locations
|
|
1486
|
+
r.geoadd('locations',
|
|
1487
|
+
-118.2437, 34.0522, 'Los Angeles',
|
|
1488
|
+
-73.9352, 40.7306, 'New York'
|
|
1489
|
+
)
|
|
1490
|
+
```
|
|
1491
|
+
|
|
1492
|
+
### Query Geo Points
|
|
1493
|
+
|
|
1494
|
+
```python
|
|
1495
|
+
# Get position
|
|
1496
|
+
position = r.geopos('locations', 'San Francisco')
|
|
1497
|
+
print(position) # [(-122.4194, 37.7749)]
|
|
1498
|
+
|
|
1499
|
+
# Get multiple positions
|
|
1500
|
+
positions = r.geopos('locations', 'San Francisco', 'Los Angeles')
|
|
1501
|
+
|
|
1502
|
+
# Get distance between points
|
|
1503
|
+
distance = r.geodist('locations', 'San Francisco', 'Los Angeles', unit='mi')
|
|
1504
|
+
print(f'Distance: {distance} miles')
|
|
1505
|
+
|
|
1506
|
+
# Available units: 'm', 'km', 'mi', 'ft'
|
|
1507
|
+
|
|
1508
|
+
# Search by radius (from coordinates)
|
|
1509
|
+
nearby = r.georadius('locations', -122.4194, 37.7749, 500, unit='mi')
|
|
1510
|
+
|
|
1511
|
+
# Search with additional info
|
|
1512
|
+
detailed = r.georadius(
|
|
1513
|
+
'locations', -122.4194, 37.7749, 500, unit='mi',
|
|
1514
|
+
withdist=True,
|
|
1515
|
+
withcoord=True,
|
|
1516
|
+
withhash=True,
|
|
1517
|
+
count=10,
|
|
1518
|
+
sort='ASC'
|
|
1519
|
+
)
|
|
1520
|
+
|
|
1521
|
+
# Search by member
|
|
1522
|
+
near_sf = r.georadiusbymember('locations', 'San Francisco', 600, unit='mi')
|
|
1523
|
+
|
|
1524
|
+
# Search by member with details
|
|
1525
|
+
detailed = r.georadiusbymember(
|
|
1526
|
+
'locations', 'San Francisco', 600, unit='mi',
|
|
1527
|
+
withdist=True,
|
|
1528
|
+
withcoord=True
|
|
1529
|
+
)
|
|
1530
|
+
|
|
1531
|
+
# Get geohash
|
|
1532
|
+
geohash = r.geohash('locations', 'San Francisco')
|
|
1533
|
+
```
|
|
1534
|
+
|
|
1535
|
+
---
|
|
1536
|
+
|
|
1537
|
+
## HyperLogLog (Cardinality Estimation)
|
|
1538
|
+
|
|
1539
|
+
```python
|
|
1540
|
+
# Add elements
|
|
1541
|
+
r.pfadd('unique:visitors', 'user1', 'user2', 'user3')
|
|
1542
|
+
r.pfadd('unique:visitors', 'user2', 'user4') # user2 counted once
|
|
1543
|
+
|
|
1544
|
+
# Get count
|
|
1545
|
+
count = r.pfcount('unique:visitors') # ~4
|
|
1546
|
+
print(f'Unique visitors: {count}')
|
|
1547
|
+
|
|
1548
|
+
# Count multiple HyperLogLogs
|
|
1549
|
+
total = r.pfcount('unique:day1', 'unique:day2', 'unique:day3')
|
|
1550
|
+
|
|
1551
|
+
# Merge multiple HyperLogLogs
|
|
1552
|
+
r.pfmerge('unique:combined', 'unique:day1', 'unique:day2')
|
|
1553
|
+
```
|
|
1554
|
+
|
|
1555
|
+
---
|
|
1556
|
+
|
|
1557
|
+
## Bitmap Operations
|
|
1558
|
+
|
|
1559
|
+
```python
|
|
1560
|
+
# Set bit
|
|
1561
|
+
r.setbit('login:2024-01-15', 100, 1) # User 100 logged in
|
|
1562
|
+
|
|
1563
|
+
# Get bit
|
|
1564
|
+
bit = r.getbit('login:2024-01-15', 100) # 1
|
|
1565
|
+
|
|
1566
|
+
# Count set bits
|
|
1567
|
+
count = r.bitcount('login:2024-01-15')
|
|
1568
|
+
|
|
1569
|
+
# Count bits in range (bytes)
|
|
1570
|
+
count = r.bitcount('login:2024-01-15', 0, 10)
|
|
1571
|
+
|
|
1572
|
+
# Bitwise operations
|
|
1573
|
+
r.bitop('AND', 'result', 'bitmap1', 'bitmap2')
|
|
1574
|
+
r.bitop('OR', 'result', 'bitmap1', 'bitmap2')
|
|
1575
|
+
r.bitop('XOR', 'result', 'bitmap1', 'bitmap2')
|
|
1576
|
+
r.bitop('NOT', 'result', 'bitmap1')
|
|
1577
|
+
|
|
1578
|
+
# Find first bit
|
|
1579
|
+
pos = r.bitpos('login:2024-01-15', 1) # First set bit
|
|
1580
|
+
pos = r.bitpos('login:2024-01-15', 0) # First unset bit
|
|
1581
|
+
|
|
1582
|
+
# Find bit in range
|
|
1583
|
+
pos = r.bitpos('login:2024-01-15', 1, 0, 10)
|
|
1584
|
+
```
|
|
1585
|
+
|
|
1586
|
+
---
|
|
1587
|
+
|
|
1588
|
+
## Server and Connection Management
|
|
1589
|
+
|
|
1590
|
+
### Server Information
|
|
1591
|
+
|
|
1592
|
+
```python
|
|
1593
|
+
# Get server info
|
|
1594
|
+
info = r.info()
|
|
1595
|
+
print(info)
|
|
1596
|
+
|
|
1597
|
+
# Get specific section
|
|
1598
|
+
stats = r.info('stats')
|
|
1599
|
+
memory = r.info('memory')
|
|
1600
|
+
replication = r.info('replication')
|
|
1601
|
+
|
|
1602
|
+
# Ping server
|
|
1603
|
+
pong = r.ping() # True
|
|
1604
|
+
|
|
1605
|
+
# Echo message
|
|
1606
|
+
echo = r.echo('Hello Redis')
|
|
1607
|
+
|
|
1608
|
+
# Get server time
|
|
1609
|
+
server_time = r.time() # (seconds, microseconds)
|
|
1610
|
+
```
|
|
1611
|
+
|
|
1612
|
+
### Database Management
|
|
1613
|
+
|
|
1614
|
+
```python
|
|
1615
|
+
# Select database (0-15 by default)
|
|
1616
|
+
r.execute_command('SELECT', 1)
|
|
1617
|
+
|
|
1618
|
+
# Get database size
|
|
1619
|
+
size = r.dbsize()
|
|
1620
|
+
|
|
1621
|
+
# Flush current database
|
|
1622
|
+
r.flushdb()
|
|
1623
|
+
|
|
1624
|
+
# Flush all databases
|
|
1625
|
+
r.flushall()
|
|
1626
|
+
|
|
1627
|
+
# Save database to disk
|
|
1628
|
+
r.save()
|
|
1629
|
+
|
|
1630
|
+
# Background save
|
|
1631
|
+
r.bgsave()
|
|
1632
|
+
|
|
1633
|
+
# Get last save time
|
|
1634
|
+
last_save = r.lastsave() # Unix timestamp
|
|
1635
|
+
|
|
1636
|
+
# Background rewrite AOF
|
|
1637
|
+
r.bgrewriteaof()
|
|
1638
|
+
```
|
|
1639
|
+
|
|
1640
|
+
### Client Management
|
|
1641
|
+
|
|
1642
|
+
```python
|
|
1643
|
+
# Get client list
|
|
1644
|
+
clients = r.client_list()
|
|
1645
|
+
|
|
1646
|
+
# Get client ID
|
|
1647
|
+
client_id = r.client_id()
|
|
1648
|
+
|
|
1649
|
+
# Set client name
|
|
1650
|
+
r.client_setname('my-app')
|
|
1651
|
+
|
|
1652
|
+
# Get client name
|
|
1653
|
+
name = r.client_getname()
|
|
1654
|
+
|
|
1655
|
+
# Get client info
|
|
1656
|
+
info = r.client_info()
|
|
1657
|
+
|
|
1658
|
+
# Kill client
|
|
1659
|
+
r.client_kill('127.0.0.1:6379')
|
|
1660
|
+
|
|
1661
|
+
# Pause all clients
|
|
1662
|
+
r.client_pause(1000) # Pause for 1000ms
|
|
1663
|
+
```
|
|
1664
|
+
|
|
1665
|
+
### Memory Management
|
|
1666
|
+
|
|
1667
|
+
```python
|
|
1668
|
+
# Get memory stats
|
|
1669
|
+
memory_stats = r.memory_stats()
|
|
1670
|
+
|
|
1671
|
+
# Get memory usage of key
|
|
1672
|
+
usage = r.memory_usage('mykey')
|
|
1673
|
+
|
|
1674
|
+
# Purge memory
|
|
1675
|
+
r.memory_purge()
|
|
1676
|
+
```
|
|
1677
|
+
|
|
1678
|
+
---
|
|
1679
|
+
|
|
1680
|
+
## Async/Await Support
|
|
1681
|
+
|
|
1682
|
+
### Async Redis Client
|
|
1683
|
+
|
|
1684
|
+
```python
|
|
1685
|
+
import asyncio
|
|
1686
|
+
import redis.asyncio as aioredis
|
|
1687
|
+
|
|
1688
|
+
async def main():
|
|
1689
|
+
# Create async client
|
|
1690
|
+
r = await aioredis.from_url('redis://localhost:6379', decode_responses=True)
|
|
1691
|
+
|
|
1692
|
+
# Set value
|
|
1693
|
+
await r.set('key', 'value')
|
|
1694
|
+
|
|
1695
|
+
# Get value
|
|
1696
|
+
value = await r.get('key')
|
|
1697
|
+
print(value)
|
|
1698
|
+
|
|
1699
|
+
# Close connection
|
|
1700
|
+
await r.close()
|
|
1701
|
+
|
|
1702
|
+
# Run async code
|
|
1703
|
+
asyncio.run(main())
|
|
1704
|
+
```
|
|
1705
|
+
|
|
1706
|
+
### Async Pipeline
|
|
1707
|
+
|
|
1708
|
+
```python
|
|
1709
|
+
async def async_pipeline():
|
|
1710
|
+
r = await aioredis.from_url('redis://localhost:6379', decode_responses=True)
|
|
1711
|
+
|
|
1712
|
+
async with r.pipeline(transaction=True) as pipe:
|
|
1713
|
+
await pipe.set('key1', 'value1')
|
|
1714
|
+
await pipe.set('key2', 'value2')
|
|
1715
|
+
await pipe.get('key1')
|
|
1716
|
+
results = await pipe.execute()
|
|
1717
|
+
|
|
1718
|
+
print(results)
|
|
1719
|
+
await r.close()
|
|
1720
|
+
|
|
1721
|
+
asyncio.run(async_pipeline())
|
|
1722
|
+
```
|
|
1723
|
+
|
|
1724
|
+
### Async Pub/Sub
|
|
1725
|
+
|
|
1726
|
+
```python
|
|
1727
|
+
async def reader(channel: aioredis.client.PubSub):
|
|
1728
|
+
while True:
|
|
1729
|
+
message = await channel.get_message(ignore_subscribe_messages=True)
|
|
1730
|
+
if message is not None:
|
|
1731
|
+
print(f"Received: {message['data']}")
|
|
1732
|
+
|
|
1733
|
+
async def async_pubsub():
|
|
1734
|
+
r = await aioredis.from_url('redis://localhost:6379', decode_responses=True)
|
|
1735
|
+
|
|
1736
|
+
async with r.pubsub() as pubsub:
|
|
1737
|
+
await pubsub.subscribe('notifications')
|
|
1738
|
+
|
|
1739
|
+
# Create task to read messages
|
|
1740
|
+
future = asyncio.create_task(reader(pubsub))
|
|
1741
|
+
|
|
1742
|
+
# Publish messages
|
|
1743
|
+
await r.publish('notifications', 'Hello Async!')
|
|
1744
|
+
|
|
1745
|
+
await asyncio.sleep(1)
|
|
1746
|
+
future.cancel()
|
|
1747
|
+
|
|
1748
|
+
await r.close()
|
|
1749
|
+
|
|
1750
|
+
asyncio.run(async_pubsub())
|
|
1751
|
+
```
|
|
1752
|
+
|
|
1753
|
+
---
|
|
1754
|
+
|
|
1755
|
+
## Error Handling
|
|
1756
|
+
|
|
1757
|
+
### Connection Errors
|
|
1758
|
+
|
|
1759
|
+
```python
|
|
1760
|
+
import redis
|
|
1761
|
+
from redis.exceptions import ConnectionError, TimeoutError, RedisError
|
|
1762
|
+
|
|
1763
|
+
try:
|
|
1764
|
+
r = redis.Redis(
|
|
1765
|
+
host='localhost',
|
|
1766
|
+
port=6379,
|
|
1767
|
+
socket_connect_timeout=5,
|
|
1768
|
+
socket_timeout=5,
|
|
1769
|
+
retry_on_timeout=True
|
|
1770
|
+
)
|
|
1771
|
+
r.ping()
|
|
1772
|
+
except ConnectionError as e:
|
|
1773
|
+
print(f'Connection error: {e}')
|
|
1774
|
+
except TimeoutError as e:
|
|
1775
|
+
print(f'Timeout error: {e}')
|
|
1776
|
+
except RedisError as e:
|
|
1777
|
+
print(f'Redis error: {e}')
|
|
1778
|
+
```
|
|
1779
|
+
|
|
1780
|
+
### Command Errors
|
|
1781
|
+
|
|
1782
|
+
```python
|
|
1783
|
+
from redis.exceptions import ResponseError, DataError
|
|
1784
|
+
|
|
1785
|
+
try:
|
|
1786
|
+
r.get('nonexistent')
|
|
1787
|
+
except ResponseError as e:
|
|
1788
|
+
print(f'Command error: {e}')
|
|
1789
|
+
except DataError as e:
|
|
1790
|
+
print(f'Data error: {e}')
|
|
1791
|
+
```
|
|
1792
|
+
|
|
1793
|
+
### Pipeline Errors
|
|
1794
|
+
|
|
1795
|
+
```python
|
|
1796
|
+
pipe = r.pipeline()
|
|
1797
|
+
pipe.set('key1', 'value1')
|
|
1798
|
+
pipe.incr('not-a-number') # Will error
|
|
1799
|
+
pipe.get('key1')
|
|
1800
|
+
|
|
1801
|
+
try:
|
|
1802
|
+
results = pipe.execute()
|
|
1803
|
+
except ResponseError as e:
|
|
1804
|
+
print(f'Pipeline error: {e}')
|
|
1805
|
+
```
|
|
1806
|
+
|
|
1807
|
+
### Retry Logic
|
|
1808
|
+
|
|
1809
|
+
```python
|
|
1810
|
+
import time
|
|
1811
|
+
from redis.exceptions import ConnectionError
|
|
1812
|
+
|
|
1813
|
+
def redis_operation_with_retry(operation, max_retries=3):
|
|
1814
|
+
"""Execute Redis operation with retry logic"""
|
|
1815
|
+
for attempt in range(max_retries):
|
|
1816
|
+
try:
|
|
1817
|
+
return operation()
|
|
1818
|
+
except ConnectionError:
|
|
1819
|
+
if attempt == max_retries - 1:
|
|
1820
|
+
raise
|
|
1821
|
+
time.sleep(2 ** attempt) # Exponential backoff
|
|
1822
|
+
|
|
1823
|
+
# Usage
|
|
1824
|
+
result = redis_operation_with_retry(lambda: r.get('mykey'))
|
|
1825
|
+
```
|
|
1826
|
+
|
|
1827
|
+
---
|
|
1828
|
+
|
|
1829
|
+
## Connection Pooling
|
|
1830
|
+
|
|
1831
|
+
### Configure Connection Pool
|
|
1832
|
+
|
|
1833
|
+
```python
|
|
1834
|
+
pool = redis.ConnectionPool(
|
|
1835
|
+
host='localhost',
|
|
1836
|
+
port=6379,
|
|
1837
|
+
password='your_password',
|
|
1838
|
+
db=0,
|
|
1839
|
+
max_connections=50,
|
|
1840
|
+
socket_timeout=5,
|
|
1841
|
+
socket_connect_timeout=5,
|
|
1842
|
+
socket_keepalive=True,
|
|
1843
|
+
socket_keepalive_options={
|
|
1844
|
+
1: 1, # TCP_KEEPIDLE
|
|
1845
|
+
2: 1, # TCP_KEEPINTVL
|
|
1846
|
+
3: 3 # TCP_KEEPCNT
|
|
1847
|
+
},
|
|
1848
|
+
decode_responses=True
|
|
1849
|
+
)
|
|
1850
|
+
|
|
1851
|
+
r = redis.Redis(connection_pool=pool)
|
|
1852
|
+
```
|
|
1853
|
+
|
|
1854
|
+
### Multiple Clients Sharing Pool
|
|
1855
|
+
|
|
1856
|
+
```python
|
|
1857
|
+
pool = redis.ConnectionPool(
|
|
1858
|
+
host='localhost',
|
|
1859
|
+
port=6379,
|
|
1860
|
+
max_connections=20,
|
|
1861
|
+
decode_responses=True
|
|
1862
|
+
)
|
|
1863
|
+
|
|
1864
|
+
# Multiple clients share the pool
|
|
1865
|
+
client1 = redis.Redis(connection_pool=pool)
|
|
1866
|
+
client2 = redis.Redis(connection_pool=pool)
|
|
1867
|
+
client3 = redis.Redis(connection_pool=pool)
|
|
1868
|
+
```
|
|
1869
|
+
|
|
1870
|
+
---
|
|
1871
|
+
|
|
1872
|
+
## Cluster Support
|
|
1873
|
+
|
|
1874
|
+
### Connect to Cluster
|
|
1875
|
+
|
|
1876
|
+
```python
|
|
1877
|
+
from redis.cluster import RedisCluster
|
|
1878
|
+
|
|
1879
|
+
# Connect to cluster
|
|
1880
|
+
cluster = RedisCluster(
|
|
1881
|
+
host='localhost',
|
|
1882
|
+
port=7000,
|
|
1883
|
+
decode_responses=True
|
|
1884
|
+
)
|
|
1885
|
+
|
|
1886
|
+
# Use cluster like regular client
|
|
1887
|
+
cluster.set('key', 'value')
|
|
1888
|
+
value = cluster.get('key')
|
|
1889
|
+
|
|
1890
|
+
cluster.close()
|
|
1891
|
+
```
|
|
1892
|
+
|
|
1893
|
+
### Cluster with Multiple Nodes
|
|
1894
|
+
|
|
1895
|
+
```python
|
|
1896
|
+
from redis.cluster import RedisCluster, ClusterNode
|
|
1897
|
+
|
|
1898
|
+
startup_nodes = [
|
|
1899
|
+
ClusterNode('localhost', 7000),
|
|
1900
|
+
ClusterNode('localhost', 7001),
|
|
1901
|
+
ClusterNode('localhost', 7002)
|
|
1902
|
+
]
|
|
1903
|
+
|
|
1904
|
+
cluster = RedisCluster(
|
|
1905
|
+
startup_nodes=startup_nodes,
|
|
1906
|
+
decode_responses=True
|
|
1907
|
+
)
|
|
1908
|
+
```
|
|
1909
|
+
|
|
1910
|
+
---
|
|
1911
|
+
|
|
1912
|
+
## Complete Application Example
|
|
1913
|
+
|
|
1914
|
+
```python
|
|
1915
|
+
import redis
|
|
1916
|
+
import json
|
|
1917
|
+
import time
|
|
1918
|
+
import os
|
|
1919
|
+
from typing import Optional, List, Dict, Any
|
|
1920
|
+
from dotenv import load_dotenv
|
|
1921
|
+
|
|
1922
|
+
load_dotenv()
|
|
1923
|
+
|
|
1924
|
+
class RedisManager:
|
|
1925
|
+
"""Redis manager with common operations"""
|
|
1926
|
+
|
|
1927
|
+
def __init__(self):
|
|
1928
|
+
self.pool = redis.ConnectionPool(
|
|
1929
|
+
host=os.getenv('REDIS_HOST', 'localhost'),
|
|
1930
|
+
port=int(os.getenv('REDIS_PORT', 6379)),
|
|
1931
|
+
password=os.getenv('REDIS_PASSWORD'),
|
|
1932
|
+
db=int(os.getenv('REDIS_DB', 0)),
|
|
1933
|
+
max_connections=10,
|
|
1934
|
+
decode_responses=True
|
|
1935
|
+
)
|
|
1936
|
+
self.client = redis.Redis(connection_pool=self.pool)
|
|
1937
|
+
|
|
1938
|
+
def cache_get(self, key: str) -> Optional[Any]:
|
|
1939
|
+
"""Get value from cache"""
|
|
1940
|
+
value = self.client.get(key)
|
|
1941
|
+
return json.loads(value) if value else None
|
|
1942
|
+
|
|
1943
|
+
def cache_set(self, key: str, value: Any, ttl: int = 3600):
|
|
1944
|
+
"""Set value in cache with TTL"""
|
|
1945
|
+
self.client.set(key, json.dumps(value), ex=ttl)
|
|
1946
|
+
|
|
1947
|
+
def invalidate_cache(self, pattern: str):
|
|
1948
|
+
"""Invalidate cache keys matching pattern"""
|
|
1949
|
+
keys = list(self.client.scan_iter(match=pattern))
|
|
1950
|
+
if keys:
|
|
1951
|
+
self.client.delete(*keys)
|
|
1952
|
+
|
|
1953
|
+
def track_user_activity(self, user_id: str, action: str):
|
|
1954
|
+
"""Track user activity"""
|
|
1955
|
+
today = time.strftime('%Y-%m-%d')
|
|
1956
|
+
key = f'activity:{user_id}:{today}'
|
|
1957
|
+
|
|
1958
|
+
self.client.rpush(key, json.dumps({
|
|
1959
|
+
'action': action,
|
|
1960
|
+
'timestamp': time.time()
|
|
1961
|
+
}))
|
|
1962
|
+
self.client.expire(key, 86400 * 7) # Keep for 7 days
|
|
1963
|
+
|
|
1964
|
+
def get_user_activity(self, user_id: str, date: str) -> List[Dict]:
|
|
1965
|
+
"""Get user activity for date"""
|
|
1966
|
+
key = f'activity:{user_id}:{date}'
|
|
1967
|
+
activities = self.client.lrange(key, 0, -1)
|
|
1968
|
+
return [json.loads(a) for a in activities]
|
|
1969
|
+
|
|
1970
|
+
def increment_page_view(self, page_id: str) -> int:
|
|
1971
|
+
"""Increment page view counter"""
|
|
1972
|
+
return self.client.incr(f'pageviews:{page_id}')
|
|
1973
|
+
|
|
1974
|
+
def get_page_views(self, page_id: str) -> int:
|
|
1975
|
+
"""Get page view count"""
|
|
1976
|
+
views = self.client.get(f'pageviews:{page_id}')
|
|
1977
|
+
return int(views) if views else 0
|
|
1978
|
+
|
|
1979
|
+
def add_to_leaderboard(self, user_id: str, score: float):
|
|
1980
|
+
"""Add or update user score in leaderboard"""
|
|
1981
|
+
self.client.zadd('leaderboard', {user_id: score})
|
|
1982
|
+
|
|
1983
|
+
def get_leaderboard(self, count: int = 10) -> List[Dict]:
|
|
1984
|
+
"""Get top users from leaderboard"""
|
|
1985
|
+
results = self.client.zrevrange('leaderboard', 0, count - 1, withscores=True)
|
|
1986
|
+
return [{'user_id': user, 'score': score} for user, score in results]
|
|
1987
|
+
|
|
1988
|
+
def get_user_rank(self, user_id: str) -> Optional[int]:
|
|
1989
|
+
"""Get user rank in leaderboard (1-based)"""
|
|
1990
|
+
rank = self.client.zrevrank('leaderboard', user_id)
|
|
1991
|
+
return rank + 1 if rank is not None else None
|
|
1992
|
+
|
|
1993
|
+
def rate_limit(self, key: str, limit: int = 10, window: int = 60) -> bool:
|
|
1994
|
+
"""Check rate limit"""
|
|
1995
|
+
pipe = self.client.pipeline()
|
|
1996
|
+
now = int(time.time())
|
|
1997
|
+
window_start = now - window
|
|
1998
|
+
|
|
1999
|
+
pipe.zremrangebyscore(key, 0, window_start)
|
|
2000
|
+
pipe.zcard(key)
|
|
2001
|
+
pipe.zadd(key, {str(now): now})
|
|
2002
|
+
pipe.expire(key, window)
|
|
2003
|
+
|
|
2004
|
+
results = pipe.execute()
|
|
2005
|
+
return results[1] < limit
|
|
2006
|
+
|
|
2007
|
+
def close(self):
|
|
2008
|
+
"""Close connection"""
|
|
2009
|
+
self.client.close()
|
|
2010
|
+
|
|
2011
|
+
# Usage example
|
|
2012
|
+
if __name__ == '__main__':
|
|
2013
|
+
redis_mgr = RedisManager()
|
|
2014
|
+
|
|
2015
|
+
# Cache operations
|
|
2016
|
+
redis_mgr.cache_set('user:1000', {
|
|
2017
|
+
'name': 'John',
|
|
2018
|
+
'email': '[email protected]'
|
|
2019
|
+
}, ttl=3600)
|
|
2020
|
+
|
|
2021
|
+
user = redis_mgr.cache_get('user:1000')
|
|
2022
|
+
print(f'Cached user: {user}')
|
|
2023
|
+
|
|
2024
|
+
# Track activity
|
|
2025
|
+
redis_mgr.track_user_activity('user:1000', 'login')
|
|
2026
|
+
redis_mgr.track_user_activity('user:1000', 'view_profile')
|
|
2027
|
+
|
|
2028
|
+
# Get activity
|
|
2029
|
+
today = time.strftime('%Y-%m-%d')
|
|
2030
|
+
activities = redis_mgr.get_user_activity('user:1000', today)
|
|
2031
|
+
print(f'Activities: {activities}')
|
|
2032
|
+
|
|
2033
|
+
# Page views
|
|
2034
|
+
views = redis_mgr.increment_page_view('page:home')
|
|
2035
|
+
print(f'Page views: {views}')
|
|
2036
|
+
|
|
2037
|
+
# Leaderboard
|
|
2038
|
+
redis_mgr.add_to_leaderboard('user:1000', 500)
|
|
2039
|
+
redis_mgr.add_to_leaderboard('user:2000', 750)
|
|
2040
|
+
redis_mgr.add_to_leaderboard('user:3000', 250)
|
|
2041
|
+
|
|
2042
|
+
top_users = redis_mgr.get_leaderboard(3)
|
|
2043
|
+
print(f'Top users: {top_users}')
|
|
2044
|
+
|
|
2045
|
+
rank = redis_mgr.get_user_rank('user:1000')
|
|
2046
|
+
print(f'User rank: {rank}')
|
|
2047
|
+
|
|
2048
|
+
# Rate limiting
|
|
2049
|
+
for i in range(12):
|
|
2050
|
+
allowed = redis_mgr.rate_limit('api:user:1000', limit=10, window=60)
|
|
2051
|
+
print(f'Request {i + 1}: {"Allowed" if allowed else "Blocked"}')
|
|
2052
|
+
|
|
2053
|
+
redis_mgr.close()
|
|
2054
|
+
```
|