redenv 0.3.0__tar.gz → 0.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,4 +6,6 @@ build
6
6
  dist
7
7
  .eggs
8
8
  __pycache__
9
- .pytest_cache
9
+ .pytest_cache
10
+ .ruff_cache
11
+ .venv
@@ -2,6 +2,13 @@
2
2
 
3
3
  All notable changes to this project will be documented in this file.
4
4
 
5
+ ## [Unreleased]
6
+
7
+ ### Changed
8
+
9
+ - **Atomic Secret Updates:** Refactored `client.set()` (async and sync) to use a Lua script for atomic "read-modify-write" operations in Redis. This prevents race conditions and data loss during concurrent secret updates.
10
+ - **Cluster-Safe Architecture:** Optimized the update flow to be Redis Cluster compatible by separating metadata retrieval from the atomic write operation, avoiding CROSSSLOT errors.
11
+
5
12
  ## [0.3.0] - 2026-01-25
6
13
 
7
14
  ### Added
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: redenv
3
- Version: 0.3.0
3
+ Version: 0.4.0
4
4
  Summary: A zero-knowledge, end-to-end encrypted secret management SDK for Python.
5
5
  Project-URL: Homepage, https://github.com/redenv-labs/redenv
6
6
  Project-URL: Documentation, https://github.com/redenv-labs/redenv/tree/main/packages/python-client
@@ -0,0 +1,147 @@
1
+ import asyncio
2
+ import json
3
+ import time
4
+ import os
5
+ from upstash_redis.asyncio import Redis
6
+ from redenv.utils import set_secret
7
+ from redenv.crypto import derive_key, generate_salt, random_bytes, encrypt, buffer_to_hex, decrypt
8
+ from redenv.types import RedenvOptions, UpstashConfig
9
+
10
+ # Credentials from client/example.ts
11
+ UPSTASH_URL = os.getenv("UPSTASH_URL")
12
+ UPSTASH_TOKEN = os.getenv("UPSTASH_TOKEN")
13
+
14
+ async def main():
15
+ if not UPSTASH_URL or not UPSTASH_TOKEN:
16
+ raise ValueError("UPSTASH_URL and UPSTASH_TOKEN must be set")
17
+
18
+ project_name = f"test-atomic-py-{int(time.time())}"
19
+ environment = "dev"
20
+ key = "ATOMIC_KEY_PY"
21
+ value1 = "value-1"
22
+ value2 = "value-2"
23
+ user = "python-integration-test"
24
+
25
+ # Direct Redis access for verification
26
+ redis = Redis(url=UPSTASH_URL, token=UPSTASH_TOKEN)
27
+
28
+ # Setup Options object manually since we are testing set_secret util directly
29
+ options = RedenvOptions(
30
+ project=project_name,
31
+ token_id="stk_test",
32
+ token="redenv_sk_test",
33
+ upstash=UpstashConfig(url=UPSTASH_URL, token=UPSTASH_TOKEN),
34
+ environment=environment,
35
+ log="none"
36
+ )
37
+
38
+ print(f"\n--- Starting Python Real Integration Test ---")
39
+ print(f"Project: {project_name}")
40
+
41
+ try:
42
+ # 1. Setup Metadata & Keys
43
+ print("1. Creating project metadata...")
44
+
45
+ # We need a valid PEK encrypted in metadata for get_pek to work
46
+ # Generate real PEK
47
+ salt = generate_salt()
48
+ pek = random_bytes(32) # PEK is 32 bytes (256 bits)
49
+
50
+ # Wrap PEK with our mock service token
51
+ token_key = derive_key("redenv_sk_test", salt)
52
+ # Encrypt the HEX representation of PEK
53
+ encrypted_pek = encrypt(buffer_to_hex(pek), token_key)
54
+
55
+ await redis.hset(f"meta@{project_name}", values={
56
+ "historyLimit": 5,
57
+ "serviceTokens": json.dumps({
58
+ "stk_test": {
59
+ "salt": buffer_to_hex(salt),
60
+ "encryptedPEK": encrypted_pek,
61
+ "name": "Test Token"
62
+ }
63
+ })
64
+ })
65
+
66
+ # 2. First Write
67
+ print(f"2. Writing first version: '{value1}'...")
68
+ await set_secret(redis, options, key, value1)
69
+ print(" ✓ Write successful")
70
+
71
+ # 3. Second Write
72
+ print(f"3. Writing second version: '{value2}'...")
73
+ await set_secret(redis, options, key, value2)
74
+ print(" ✓ Update successful")
75
+
76
+ # 4. Verification
77
+ print("4. Verifying data in Redis...")
78
+ raw_data = await redis.hget(f"{environment}:{project_name}", key)
79
+ history = json.loads(raw_data) if isinstance(raw_data, str) else raw_data
80
+
81
+ print(f" History length: {len(history)} (Expected: 2)")
82
+ if len(history) != 2:
83
+ raise Exception(f"History length mismatch! Got {len(history)}")
84
+
85
+ # Check V2 (Latest)
86
+ v2 = history[0]
87
+ decrypted_v2 = decrypt(v2["value"], pek)
88
+ print(f" v{v2['version']} Value: '{decrypted_v2}' (Expected: '{value2}')")
89
+ if decrypted_v2 != value2:
90
+ raise Exception("Latest value mismatch!")
91
+
92
+ # Check V1
93
+ v1 = history[1]
94
+ decrypted_v1 = decrypt(v1["value"], pek)
95
+ print(f" v{v1['version']} Value: '{decrypted_v1}' (Expected: '{value1}')")
96
+ if decrypted_v1 != value1:
97
+ raise Exception("Previous value mismatch!")
98
+
99
+ # 5. Concurrency Test
100
+ print("\n5. Testing Concurrency (Race Conditions)...")
101
+ parallel_writes = 5
102
+ print(f" Firing {parallel_writes} writes in parallel...")
103
+
104
+ tasks = []
105
+ for i in range(parallel_writes):
106
+ tasks.append(set_secret(redis, options, key, f"concurrent-{i}"))
107
+
108
+ await asyncio.gather(*tasks)
109
+ print(" ✓ Parallel writes completed")
110
+
111
+ # 6. Verify Concurrency
112
+ print("6. Verifying concurrency results...")
113
+ raw_data_concurrent = await redis.hget(f"{environment}:{project_name}", key)
114
+ history = json.loads(raw_data_concurrent) if isinstance(raw_data_concurrent, str) else raw_data_concurrent
115
+
116
+ # Initial 2 + 5 = 7 total versions created.
117
+ # But historyLimit is 5.
118
+ print(f" History length: {len(history)} (Expected Cap: 5)")
119
+ if len(history) != 5:
120
+ raise Exception(f"History should be capped at 5! Got {len(history)}")
121
+
122
+ latest_version = history[0]['version']
123
+ expected_version = 2 + parallel_writes # 7
124
+ print(f" Latest Version: {latest_version} (Expected: {expected_version})")
125
+
126
+ if latest_version != expected_version:
127
+ raise Exception(f"Race condition detected! Expected version {expected_version}, got {latest_version}. Updates were lost.")
128
+
129
+ # Ensure all versions are unique
130
+ versions = [h['version'] for h in history]
131
+ unique_versions = set(versions)
132
+ if len(versions) != len(unique_versions):
133
+ raise Exception("Duplicate version numbers detected!")
134
+
135
+ print("\n✅ SUCCESS: Python Atomic set_secret is working correctly on real Redis.")
136
+
137
+ except Exception as e:
138
+ print(f"\n❌ FAILED: {e}")
139
+ import traceback
140
+ traceback.print_exc()
141
+ finally:
142
+ print("\nCleaning up...")
143
+ await redis.delete(f"meta@{project_name}")
144
+ await redis.delete(f"{environment}:{project_name}")
145
+
146
+ if __name__ == "__main__":
147
+ asyncio.run(main())
@@ -3,5 +3,5 @@ from .errors import RedenvError
3
3
  from .secrets import Secrets
4
4
  from .sync import Redenv as RedenvSync
5
5
 
6
- __version__ = "0.3.0"
6
+ __version__ = "0.4.0"
7
7
  __all__ = ["Redenv", "RedenvSync", "RedenvError", "Secrets"]
@@ -110,14 +110,12 @@ def populate_env(secrets: Union[Dict[str, str], Secrets], options: RedenvOptions
110
110
 
111
111
  def set_secret(redis: SyncRedis, options: RedenvOptions, key: str, value: str):
112
112
  """
113
- Sets a secret in Redis with versioning and history.
113
+ Sets a secret in Redis.
114
114
  """
115
115
  env_key = f"{options.environment}:{options.project}"
116
116
  meta_key = f"meta@{options.project}"
117
117
 
118
- # Sequential fetch (Simpler for sync, parallel requires threads)
119
118
  metadata = redis.hgetall(meta_key)
120
- current_history_str = redis.hget(env_key, key)
121
119
 
122
120
  if not metadata:
123
121
  raise RedenvError(f'Project "{options.project}" not found.', "PROJECT_NOT_FOUND")
@@ -126,32 +124,66 @@ def set_secret(redis: SyncRedis, options: RedenvOptions, key: str, value: str):
126
124
 
127
125
  history_limit = int(metadata.get("historyLimit", 10))
128
126
 
129
- history = []
130
- if current_history_str:
131
- history = json.loads(current_history_str) if isinstance(current_history_str, str) else current_history_str
132
-
133
- if not isinstance(history, list):
134
- history = []
135
-
136
- last_version = history[0]["version"] if len(history) > 0 else 0
137
-
138
127
  encrypted_value = encrypt(value, pek)
139
128
 
140
129
  from datetime import datetime, timezone
141
-
142
- new_version = {
143
- "version": last_version + 1,
144
- "value": encrypted_value,
145
- "user": options.token_id,
146
- "createdAt": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
130
+ created_at = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
131
+
132
+ script = """
133
+ local env_key = KEYS[1]
134
+ local field = ARGV[1]
135
+ local encrypted_value = ARGV[2]
136
+ local user = ARGV[3]
137
+ local created_at = ARGV[4]
138
+ local history_limit = tonumber(ARGV[5])
139
+
140
+ -- Fetch Current History
141
+ local current_data = redis.call('HGET', env_key, field)
142
+ local history = {}
143
+
144
+ if current_data then
145
+ local status, res = pcall(cjson.decode, current_data)
146
+ if status then
147
+ history = res
148
+ end
149
+ end
150
+
151
+ -- Determine Next Version
152
+ local last_version = 0
153
+ if #history > 0 and history[1] and history[1]['version'] then
154
+ last_version = history[1]['version']
155
+ end
156
+
157
+ -- Create New Record
158
+ local new_version = {
159
+ version = last_version + 1,
160
+ value = encrypted_value,
161
+ user = user,
162
+ createdAt = created_at
147
163
  }
164
+
165
+ -- Prepend (Newest First)
166
+ table.insert(history, 1, new_version)
167
+
168
+ -- Trim History
169
+ if history_limit > 0 then
170
+ while #history > history_limit do
171
+ table.remove(history)
172
+ end
173
+ end
174
+
175
+ -- Save and Return
176
+ local encoded = cjson.encode(history)
177
+ redis.call('HSET', env_key, field, encoded)
178
+
179
+ return encoded
180
+ """
148
181
 
149
- history.insert(0, new_version)
150
-
151
- if history_limit > 0:
152
- history = history[:history_limit]
153
-
154
- return redis.hset(env_key, key, json.dumps(history))
182
+ return redis.eval(
183
+ script,
184
+ [env_key],
185
+ [key, encrypted_value, options.token_id, created_at, str(history_limit)]
186
+ )
155
187
 
156
188
  def get_secret_version(redis: SyncRedis, options: RedenvOptions, cache: LRUCache, key: str, version: int, mode: Literal["id", "index"] = "id") -> Optional[str]:
157
189
  """
@@ -4,7 +4,6 @@ from .errors import RedenvError
4
4
  from .expand import expand_secrets
5
5
  from upstash_redis import AsyncRedis
6
6
  from .secrets import Secrets
7
- import asyncio
8
7
  import json
9
8
  import os
10
9
  import time
@@ -145,56 +144,85 @@ async def populate_env(secrets: Union[Dict[str, str], Secrets], options: RedenvO
145
144
 
146
145
  async def set_secret(redis: AsyncRedis, options: RedenvOptions, key: str, value: str):
147
146
  """
148
- Sets a secret in Redis with versioning and history.
147
+ Sets a secret in Redis.
149
148
  """
150
149
  env_key = f"{options.environment}:{options.project}"
151
150
  meta_key = f"meta@{options.project}"
152
151
 
153
- # Fetch metadata (for PEK & historyLimit) and current history in parallel
154
- metadata, current_history = await asyncio.gather(
155
- redis.hgetall(meta_key),
156
- redis.hget(env_key, key)
157
- )
152
+ # We do this outside Lua to avoid CROSSSLOT errors in Redis Cluster/Upstash
153
+ metadata = await redis.hgetall(meta_key)
158
154
 
159
155
  if not metadata:
160
156
  raise RedenvError(f'Project "{options.project}" not found.', "PROJECT_NOT_FOUND")
161
157
 
162
- # Reuse metadata to get PEK without extra fetch
158
+ # Reuse metadata to get PEK
163
159
  pek = await get_pek(redis, options, metadata)
164
160
 
165
161
  history_limit = int(metadata.get("historyLimit", 10))
166
162
 
167
- # Fetch current history for the key
168
- history = []
169
- if current_history:
170
- history = json.loads(current_history) if isinstance(current_history, str) else current_history
171
-
172
- if not isinstance(history, list):
173
- history = []
174
-
175
- last_version = history[0]["version"] if len(history) > 0 else 0
176
-
177
163
  # Encrypt new value
178
164
  encrypted_value = encrypt(value, pek)
179
165
 
180
166
  from datetime import datetime, timezone
167
+ created_at = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
181
168
 
182
- new_version = {
183
- "version": last_version + 1,
184
- "value": encrypted_value,
185
- "user": options.token_id, # Using token_id as the user/auditor
186
- "createdAt": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
169
+ # KEYS[1] = env_key
170
+ # ARGV[1] = field (key), ARGV[2] = encrypted_value, ARGV[3] = user, ARGV[4] = created_at, ARGV[5] = history_limit
171
+ script = """
172
+ local env_key = KEYS[1]
173
+ local field = ARGV[1]
174
+ local encrypted_value = ARGV[2]
175
+ local user = ARGV[3]
176
+ local created_at = ARGV[4]
177
+ local history_limit = tonumber(ARGV[5])
178
+
179
+ -- Fetch Current History
180
+ local current_data = redis.call('HGET', env_key, field)
181
+ local history = {}
182
+
183
+ if current_data then
184
+ local status, res = pcall(cjson.decode, current_data)
185
+ if status then
186
+ history = res
187
+ end
188
+ end
189
+
190
+ -- Determine Next Version
191
+ local last_version = 0
192
+ if #history > 0 and history[1] and history[1]['version'] then
193
+ last_version = history[1]['version']
194
+ end
195
+
196
+ -- Create New Record
197
+ local new_version = {
198
+ version = last_version + 1,
199
+ value = encrypted_value,
200
+ user = user,
201
+ createdAt = created_at
187
202
  }
188
-
189
- # Prepend new version
190
- history.insert(0, new_version)
191
-
192
- # Trim history
193
- if history_limit > 0:
194
- history = history[:history_limit]
195
-
196
- # Write back
197
- return await redis.hset(env_key, key, json.dumps(history))
203
+
204
+ -- Prepend (Newest First)
205
+ table.insert(history, 1, new_version)
206
+
207
+ -- Trim History
208
+ if history_limit > 0 then
209
+ while #history > history_limit do
210
+ table.remove(history)
211
+ end
212
+ end
213
+
214
+ -- Save and Return
215
+ local encoded = cjson.encode(history)
216
+ redis.call('HSET', env_key, field, encoded)
217
+
218
+ return encoded
219
+ """
220
+
221
+ return await redis.eval(
222
+ script,
223
+ [env_key],
224
+ [key, encrypted_value, options.token_id, created_at, str(history_limit)]
225
+ )
198
226
 
199
227
  async def get_secret_version(redis: AsyncRedis, options: RedenvOptions, cache: LRUCache, key: str, version: int, mode: Literal["id", "index"] = "id") -> Optional[str]:
200
228
  """
@@ -110,19 +110,28 @@ async def test_write_secret(client, mock_redis):
110
110
  new_val = "new-value"
111
111
  await client.set(SECRET_KEY, new_val)
112
112
 
113
- # Verify hset was called
114
- args = mock_redis.hset.call_args
115
- # hset(key, field, value)
116
- assert args[0][0] == "dev:test-project"
117
- assert args[0][1] == SECRET_KEY
113
+ # Verify eval was called (for Lua script)
114
+ assert mock_redis.eval.called
118
115
 
119
- written_json = args[0][2]
120
- history = json.loads(written_json)
116
+ args = mock_redis.eval.call_args
117
+ script = args[0][0]
118
+ keys = args[0][1]
119
+ argv = args[0][2]
121
120
 
122
- assert len(history) == 2 # Prepend new version
123
- assert history[0]["version"] == 2
124
- # We can't verify encrypted value easily without decrypting,
125
- # but we assume encrypt() works (tested in unit tests)
121
+ # Check script content basics
122
+ assert "local env_key = KEYS[1]" in script
123
+ assert "redis.call('HSET', env_key, field, encoded)" in script
124
+
125
+ # Check keys
126
+ assert keys[0] == "dev:test-project"
127
+
128
+ # Check args: [key, encrypted_value, user, created_at, history_limit]
129
+ assert argv[0] == SECRET_KEY
130
+ # We can't verify encrypted value exactly without decrypting, but it should be a string
131
+ assert isinstance(argv[1], str)
132
+ assert argv[2] == TOKEN_ID
133
+ # History Limit
134
+ assert int(argv[4]) == 10
126
135
 
127
136
  @pytest.mark.asyncio
128
137
  async def test_get_version(client, mock_redis):
@@ -104,15 +104,27 @@ def test_sync_set_secret(sync_client, mock_redis_sync):
104
104
  new_val = "new-sync-val"
105
105
  sync_client.set(SECRET_KEY, new_val)
106
106
 
107
- # Verify hset was called
108
- assert mock_redis_sync.hset.called
109
- args = mock_redis_sync.hset.call_args
110
- assert args[0][0] == "prod:sync-project"
111
- assert args[0][1] == SECRET_KEY
107
+ # Verify eval was called (for Lua script)
108
+ assert mock_redis_sync.eval.called
112
109
 
113
- written_json = args[0][2]
114
- history = json.loads(written_json)
115
- assert history[0]["version"] == 2
110
+ args = mock_redis_sync.eval.call_args
111
+ script = args[0][0]
112
+ keys = args[0][1]
113
+ argv = args[0][2]
114
+
115
+ # Check script content basics
116
+ assert "local env_key = KEYS[1]" in script
117
+ assert "redis.call('HSET', env_key, field, encoded)" in script
118
+
119
+ # Check keys
120
+ assert keys[0] == "prod:sync-project"
121
+
122
+ # Check args: [key, encrypted_value, user, created_at, history_limit]
123
+ assert argv[0] == SECRET_KEY
124
+ assert isinstance(argv[1], str)
125
+ assert argv[2] == TOKEN_ID
126
+ # History Limit
127
+ assert int(argv[4]) == 10
116
128
 
117
129
  def test_sync_get_version(sync_client, mock_redis_sync):
118
130
  # Valid version
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes