redis-benchmarks-specification 0.1.281__py3-none-any.whl → 0.1.283__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of redis-benchmarks-specification might be problematic. Click here for more details.

@@ -19,10 +19,10 @@ import requests
19
19
  def extract_redis_pid(redis_conn) -> Optional[int]:
20
20
  """
21
21
  Extract Redis process ID from Redis INFO command.
22
-
22
+
23
23
  Args:
24
24
  redis_conn: Redis connection object
25
-
25
+
26
26
  Returns:
27
27
  Redis process ID as integer, or None if not found
28
28
  """
@@ -43,10 +43,10 @@ def extract_redis_pid(redis_conn) -> Optional[int]:
43
43
  def extract_redis_metadata(redis_conn) -> Dict[str, Any]:
44
44
  """
45
45
  Extract Redis metadata for profile comments.
46
-
46
+
47
47
  Args:
48
48
  redis_conn: Redis connection object
49
-
49
+
50
50
  Returns:
51
51
  Dictionary containing Redis metadata
52
52
  """
@@ -60,18 +60,20 @@ def extract_redis_metadata(redis_conn) -> Dict[str, Any]:
60
60
  "process_id": redis_info.get("process_id", "unknown"),
61
61
  "tcp_port": redis_info.get("tcp_port", "unknown"),
62
62
  }
63
-
63
+
64
64
  # Use build_id if git_sha1 is empty or 0
65
65
  if metadata["redis_git_sha1"] in ("", 0, "0"):
66
66
  metadata["redis_git_sha1"] = metadata["redis_build_id"]
67
-
68
- logging.info(f"Extracted Redis metadata: version={metadata['redis_version']}, sha={metadata['redis_git_sha1']}, pid={metadata['process_id']}")
67
+
68
+ logging.info(
69
+ f"Extracted Redis metadata: version={metadata['redis_version']}, sha={metadata['redis_git_sha1']}, pid={metadata['process_id']}"
70
+ )
69
71
  return metadata
70
72
  except Exception as e:
71
73
  logging.error(f"Failed to extract Redis metadata: {e}")
72
74
  return {
73
75
  "redis_version": "unknown",
74
- "redis_git_sha1": "unknown",
76
+ "redis_git_sha1": "unknown",
75
77
  "redis_git_dirty": "unknown",
76
78
  "redis_build_id": "unknown",
77
79
  "process_id": "unknown",
@@ -79,19 +81,172 @@ def extract_redis_metadata(redis_conn) -> Dict[str, Any]:
79
81
  }
80
82
 
81
83
 
84
+ def extract_server_info_for_args(redis_conn) -> Dict[str, str]:
85
+ """
86
+ Extract server information from Redis INFO SERVER to auto-detect
87
+ github_org, github_repo, and github_version when not explicitly provided.
88
+
89
+ Args:
90
+ redis_conn: Redis connection object
91
+
92
+ Returns:
93
+ Dictionary containing detected server information:
94
+ - github_org: Detected organization (e.g., 'redis', 'valkey-io')
95
+ - github_repo: Detected repository (e.g., 'redis', 'valkey')
96
+ - github_version: Detected version
97
+ - server_name: Server name from INFO
98
+ """
99
+ try:
100
+ server_info = redis_conn.info("server")
101
+
102
+ # Extract server name and version info
103
+ server_name = server_info.get("server_name", "").lower()
104
+ redis_version = server_info.get("redis_version", "unknown")
105
+
106
+ # Default values
107
+ github_org = "redis"
108
+ github_repo = "redis"
109
+ github_version = redis_version
110
+
111
+ # Check for Valkey
112
+ if "valkey" in server_name:
113
+ github_org = "valkey-io"
114
+ github_repo = "valkey"
115
+ # Use valkey_version if available, fallback to redis_version
116
+ valkey_version = server_info.get("valkey_version")
117
+ if valkey_version:
118
+ github_version = valkey_version
119
+
120
+ logging.info(
121
+ f"Auto-detected server info: org={github_org}, repo={github_repo}, "
122
+ f"version={github_version}, server_name={server_name}"
123
+ )
124
+
125
+ return {
126
+ "github_org": github_org,
127
+ "github_repo": github_repo,
128
+ "github_version": github_version,
129
+ "server_name": server_name,
130
+ }
131
+
132
+ except Exception as e:
133
+ logging.error(f"Failed to extract server info: {e}")
134
+ return {
135
+ "github_org": "redis",
136
+ "github_repo": "redis",
137
+ "github_version": "unknown",
138
+ "server_name": "unknown",
139
+ }
140
+
141
+
142
+ def extract_server_metadata_for_timeseries(redis_conn) -> Dict[str, str]:
143
+ """
144
+ Extract comprehensive server metadata from Redis INFO SERVER for use as
145
+ timeseries metadata tags.
146
+
147
+ Args:
148
+ redis_conn: Redis connection object
149
+
150
+ Returns:
151
+ Dictionary containing server metadata for timeseries tags:
152
+ - os: Operating system information
153
+ - arch_bits: Architecture bits (32/64)
154
+ - gcc_version: GCC compiler version
155
+ - server_mode: Server mode (standalone/cluster/sentinel)
156
+ - multiplexing_api: Multiplexing API used (epoll/kqueue/etc)
157
+ - atomicvar_api: Atomic variable API
158
+ - redis_build_id: Build ID
159
+ - redis_git_dirty: Git dirty flag
160
+ - process_supervised: Process supervision status
161
+ - availability_zone: Availability zone (if available)
162
+ - And other interesting metadata fields
163
+ """
164
+ try:
165
+ server_info = redis_conn.info("server")
166
+
167
+ # Extract interesting metadata fields for timeseries tags
168
+ metadata = {}
169
+
170
+ # Core system information
171
+ if "os" in server_info:
172
+ metadata["os"] = str(server_info["os"])
173
+ if "arch_bits" in server_info:
174
+ metadata["arch_bits"] = str(server_info["arch_bits"])
175
+ if "gcc_version" in server_info:
176
+ metadata["gcc_version"] = str(server_info["gcc_version"])
177
+
178
+ # Server configuration
179
+ if "server_mode" in server_info:
180
+ metadata["server_mode"] = str(server_info["server_mode"])
181
+ elif "redis_mode" in server_info: # Fallback for older versions
182
+ metadata["server_mode"] = str(server_info["redis_mode"])
183
+
184
+ # Performance-related APIs
185
+ if "multiplexing_api" in server_info:
186
+ metadata["multiplexing_api"] = str(server_info["multiplexing_api"])
187
+ if "atomicvar_api" in server_info:
188
+ metadata["atomicvar_api"] = str(server_info["atomicvar_api"])
189
+ if "monotonic_clock" in server_info:
190
+ metadata["monotonic_clock"] = str(server_info["monotonic_clock"])
191
+
192
+ # Build information
193
+ if "redis_build_id" in server_info:
194
+ metadata["redis_build_id"] = str(server_info["redis_build_id"])
195
+ if "redis_git_dirty" in server_info:
196
+ metadata["redis_git_dirty"] = str(server_info["redis_git_dirty"])
197
+
198
+ # Process information
199
+ if "process_supervised" in server_info:
200
+ metadata["process_supervised"] = str(server_info["process_supervised"])
201
+
202
+ # Cloud/deployment information
203
+ if "availability_zone" in server_info and server_info["availability_zone"]:
204
+ metadata["availability_zone"] = str(server_info["availability_zone"])
205
+
206
+ # IO threads (performance relevant)
207
+ if "io_threads_active" in server_info:
208
+ metadata["io_threads_active"] = str(server_info["io_threads_active"])
209
+
210
+ # Server name and version info
211
+ if "server_name" in server_info and server_info["server_name"]:
212
+ metadata["server_name"] = str(server_info["server_name"])
213
+ if "redis_version" in server_info:
214
+ metadata["redis_version"] = str(server_info["redis_version"])
215
+ if "valkey_version" in server_info:
216
+ metadata["valkey_version"] = str(server_info["valkey_version"])
217
+ if "valkey_release_stage" in server_info:
218
+ metadata["valkey_release_stage"] = str(server_info["valkey_release_stage"])
219
+
220
+ # Configuration file info
221
+ if "config_file" in server_info and server_info["config_file"]:
222
+ metadata["config_file"] = str(server_info["config_file"])
223
+ else:
224
+ metadata["config_file"] = "none"
225
+
226
+ logging.info(f"Extracted {len(metadata)} server metadata fields for timeseries: {list(metadata.keys())}")
227
+
228
+ return metadata
229
+
230
+ except Exception as e:
231
+ logging.error(f"Failed to extract server metadata: {e}")
232
+ return {}
233
+
234
+
82
235
  def calculate_profile_duration(benchmark_duration_seconds: int) -> int:
83
236
  """
84
237
  Calculate profiling duration based on benchmark duration.
85
-
238
+
86
239
  Args:
87
240
  benchmark_duration_seconds: Expected benchmark duration in seconds
88
-
241
+
89
242
  Returns:
90
243
  Profiling duration in seconds (minimum: benchmark duration, maximum: 30)
91
244
  """
92
245
  # Minimum duration is the benchmark duration, maximum is 30 seconds
93
246
  duration = min(max(benchmark_duration_seconds, 10), 30)
94
- logging.info(f"Calculated profile duration: {duration}s (benchmark: {benchmark_duration_seconds}s)")
247
+ logging.info(
248
+ f"Calculated profile duration: {duration}s (benchmark: {benchmark_duration_seconds}s)"
249
+ )
95
250
  return duration
96
251
 
97
252
 
@@ -102,7 +257,7 @@ def trigger_remote_profile(
102
257
  duration: int,
103
258
  timeout: int = 60,
104
259
  username: Optional[str] = None,
105
- password: Optional[str] = None
260
+ password: Optional[str] = None,
106
261
  ) -> Optional[bytes]:
107
262
  """
108
263
  Trigger remote profiling via HTTP GET request using pprof endpoint.
@@ -120,10 +275,7 @@ def trigger_remote_profile(
120
275
  Profile content in pprof binary format, or None if failed
121
276
  """
122
277
  url = f"http://{host}:{port}/debug/pprof/profile"
123
- params = {
124
- "pid": pid,
125
- "seconds": duration
126
- }
278
+ params = {"pid": pid, "seconds": duration}
127
279
 
128
280
  # Prepare authentication if provided
129
281
  auth = None
@@ -132,7 +284,9 @@ def trigger_remote_profile(
132
284
  logging.info(f"Using HTTP basic authentication with username: {username}")
133
285
 
134
286
  try:
135
- logging.info(f"Triggering remote profile: {url} with PID={pid}, duration={duration}s")
287
+ logging.info(
288
+ f"Triggering remote profile: {url} with PID={pid}, duration={duration}s"
289
+ )
136
290
  response = requests.get(url, params=params, timeout=timeout, auth=auth)
137
291
  response.raise_for_status()
138
292
 
@@ -161,7 +315,7 @@ def save_profile_with_metadata(
161
315
  benchmark_name: str,
162
316
  output_dir: str,
163
317
  redis_metadata: Dict[str, Any],
164
- duration: int
318
+ duration: int,
165
319
  ) -> Optional[str]:
166
320
  """
167
321
  Save profile content to file in pprof binary format.
@@ -188,7 +342,7 @@ def save_profile_with_metadata(
188
342
  timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
189
343
 
190
344
  # Write binary profile content directly
191
- with open(filepath, 'wb') as f:
345
+ with open(filepath, "wb") as f:
192
346
  f.write(profile_content)
193
347
 
194
348
  # Create a separate metadata file
@@ -209,7 +363,7 @@ def save_profile_with_metadata(
209
363
  f"duration_seconds={duration}\n"
210
364
  )
211
365
 
212
- with open(metadata_filepath, 'w') as f:
366
+ with open(metadata_filepath, "w") as f:
213
367
  f.write(metadata_content)
214
368
 
215
369
  logging.info(f"Saved profile to: {filepath}")
@@ -226,7 +380,14 @@ class RemoteProfiler:
226
380
  Remote profiler class to handle threaded profiling execution.
227
381
  """
228
382
 
229
- def __init__(self, host: str, port: int, output_dir: str, username: Optional[str] = None, password: Optional[str] = None):
383
+ def __init__(
384
+ self,
385
+ host: str,
386
+ port: int,
387
+ output_dir: str,
388
+ username: Optional[str] = None,
389
+ password: Optional[str] = None,
390
+ ):
230
391
  self.host = host
231
392
  self.port = port
232
393
  self.output_dir = output_dir
@@ -235,21 +396,18 @@ class RemoteProfiler:
235
396
  self.profile_thread = None
236
397
  self.profile_result = None
237
398
  self.profile_error = None
238
-
399
+
239
400
  def start_profiling(
240
- self,
241
- redis_conn,
242
- benchmark_name: str,
243
- benchmark_duration_seconds: int
401
+ self, redis_conn, benchmark_name: str, benchmark_duration_seconds: int
244
402
  ) -> bool:
245
403
  """
246
404
  Start profiling in a separate thread.
247
-
405
+
248
406
  Args:
249
407
  redis_conn: Redis connection object
250
408
  benchmark_name: Name of the benchmark
251
409
  benchmark_duration_seconds: Expected benchmark duration
252
-
410
+
253
411
  Returns:
254
412
  True if profiling thread started successfully, False otherwise
255
413
  """
@@ -257,44 +415,60 @@ class RemoteProfiler:
257
415
  # Extract Redis metadata and PID
258
416
  redis_metadata = extract_redis_metadata(redis_conn)
259
417
  pid = redis_metadata.get("process_id")
260
-
418
+
261
419
  if pid == "unknown" or pid is None:
262
420
  logging.error("Cannot start remote profiling: Redis PID not available")
263
421
  return False
264
-
422
+
265
423
  # Calculate profiling duration
266
424
  duration = calculate_profile_duration(benchmark_duration_seconds)
267
-
425
+
268
426
  # Start profiling thread
269
427
  self.profile_thread = threading.Thread(
270
428
  target=self._profile_worker,
271
429
  args=(pid, duration, benchmark_name, redis_metadata),
272
- daemon=True
430
+ daemon=True,
273
431
  )
274
432
  self.profile_thread.start()
275
-
276
- logging.info(f"Started remote profiling thread for benchmark: {benchmark_name}")
433
+
434
+ logging.info(
435
+ f"Started remote profiling thread for benchmark: {benchmark_name}"
436
+ )
277
437
  return True
278
-
438
+
279
439
  except Exception as e:
280
440
  logging.error(f"Failed to start remote profiling: {e}")
281
441
  return False
282
-
283
- def _profile_worker(self, pid: int, duration: int, benchmark_name: str, redis_metadata: Dict[str, Any]):
442
+
443
+ def _profile_worker(
444
+ self,
445
+ pid: int,
446
+ duration: int,
447
+ benchmark_name: str,
448
+ redis_metadata: Dict[str, Any],
449
+ ):
284
450
  """
285
451
  Worker function for profiling thread.
286
452
  """
287
453
  try:
288
454
  # Trigger remote profiling
289
455
  profile_content = trigger_remote_profile(
290
- self.host, self.port, pid, duration,
291
- username=self.username, password=self.password
456
+ self.host,
457
+ self.port,
458
+ pid,
459
+ duration,
460
+ username=self.username,
461
+ password=self.password,
292
462
  )
293
463
 
294
464
  if profile_content is not None:
295
465
  # Save profile with metadata
296
466
  filepath = save_profile_with_metadata(
297
- profile_content, benchmark_name, self.output_dir, redis_metadata, duration
467
+ profile_content,
468
+ benchmark_name,
469
+ self.output_dir,
470
+ redis_metadata,
471
+ duration,
298
472
  )
299
473
  self.profile_result = filepath
300
474
  else:
@@ -303,38 +477,42 @@ class RemoteProfiler:
303
477
  except Exception as e:
304
478
  self.profile_error = f"Profile worker error: {e}"
305
479
  logging.error(self.profile_error)
306
-
480
+
307
481
  def wait_for_completion(self, timeout: int = 60) -> bool:
308
482
  """
309
483
  Wait for profiling thread to complete.
310
-
484
+
311
485
  Args:
312
486
  timeout: Maximum time to wait in seconds
313
-
487
+
314
488
  Returns:
315
489
  True if completed successfully, False if timed out or failed
316
490
  """
317
491
  if self.profile_thread is None:
318
492
  return False
319
-
493
+
320
494
  try:
321
495
  self.profile_thread.join(timeout=timeout)
322
-
496
+
323
497
  if self.profile_thread.is_alive():
324
- logging.warning(f"Remote profiling thread did not complete within {timeout}s")
498
+ logging.warning(
499
+ f"Remote profiling thread did not complete within {timeout}s"
500
+ )
325
501
  return False
326
-
502
+
327
503
  if self.profile_error:
328
504
  logging.error(f"Remote profiling failed: {self.profile_error}")
329
505
  return False
330
-
506
+
331
507
  if self.profile_result:
332
- logging.info(f"Remote profiling completed successfully: {self.profile_result}")
508
+ logging.info(
509
+ f"Remote profiling completed successfully: {self.profile_result}"
510
+ )
333
511
  return True
334
512
  else:
335
513
  logging.warning("Remote profiling completed but no result available")
336
514
  return False
337
-
515
+
338
516
  except Exception as e:
339
517
  logging.error(f"Error waiting for remote profiling completion: {e}")
340
518
  return False