provide-foundation 0.0.0.dev0__py3-none-any.whl → 0.0.0.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. provide/foundation/__init__.py +12 -20
  2. provide/foundation/archive/__init__.py +23 -0
  3. provide/foundation/archive/base.py +70 -0
  4. provide/foundation/archive/bzip2.py +157 -0
  5. provide/foundation/archive/gzip.py +159 -0
  6. provide/foundation/archive/operations.py +336 -0
  7. provide/foundation/archive/tar.py +164 -0
  8. provide/foundation/archive/zip.py +203 -0
  9. provide/foundation/config/base.py +2 -2
  10. provide/foundation/config/sync.py +19 -4
  11. provide/foundation/core.py +1 -2
  12. provide/foundation/crypto/__init__.py +2 -0
  13. provide/foundation/crypto/certificates/__init__.py +34 -0
  14. provide/foundation/crypto/certificates/base.py +173 -0
  15. provide/foundation/crypto/certificates/certificate.py +290 -0
  16. provide/foundation/crypto/certificates/factory.py +213 -0
  17. provide/foundation/crypto/certificates/generator.py +138 -0
  18. provide/foundation/crypto/certificates/loader.py +130 -0
  19. provide/foundation/crypto/certificates/operations.py +198 -0
  20. provide/foundation/crypto/certificates/trust.py +107 -0
  21. provide/foundation/eventsets/__init__.py +0 -0
  22. provide/foundation/eventsets/display.py +84 -0
  23. provide/foundation/eventsets/registry.py +160 -0
  24. provide/foundation/eventsets/resolver.py +192 -0
  25. provide/foundation/eventsets/sets/das.py +128 -0
  26. provide/foundation/eventsets/sets/database.py +125 -0
  27. provide/foundation/eventsets/sets/http.py +153 -0
  28. provide/foundation/eventsets/sets/llm.py +139 -0
  29. provide/foundation/eventsets/sets/task_queue.py +107 -0
  30. provide/foundation/eventsets/types.py +70 -0
  31. provide/foundation/hub/components.py +7 -133
  32. provide/foundation/logger/__init__.py +3 -10
  33. provide/foundation/logger/config/logging.py +6 -6
  34. provide/foundation/logger/core.py +0 -2
  35. provide/foundation/logger/custom_processors.py +1 -0
  36. provide/foundation/logger/factories.py +11 -2
  37. provide/foundation/logger/processors/main.py +20 -84
  38. provide/foundation/logger/setup/__init__.py +5 -1
  39. provide/foundation/logger/setup/coordinator.py +75 -23
  40. provide/foundation/logger/setup/processors.py +2 -9
  41. provide/foundation/logger/trace.py +27 -0
  42. provide/foundation/metrics/otel.py +10 -10
  43. provide/foundation/process/lifecycle.py +82 -26
  44. provide/foundation/testing/__init__.py +77 -0
  45. provide/foundation/testing/archive/__init__.py +24 -0
  46. provide/foundation/testing/archive/fixtures.py +217 -0
  47. provide/foundation/testing/common/__init__.py +34 -0
  48. provide/foundation/testing/common/fixtures.py +263 -0
  49. provide/foundation/testing/file/__init__.py +40 -0
  50. provide/foundation/testing/file/fixtures.py +523 -0
  51. provide/foundation/testing/logger.py +41 -11
  52. provide/foundation/testing/mocking/__init__.py +46 -0
  53. provide/foundation/testing/mocking/fixtures.py +331 -0
  54. provide/foundation/testing/process/__init__.py +48 -0
  55. provide/foundation/testing/process/fixtures.py +577 -0
  56. provide/foundation/testing/threading/__init__.py +38 -0
  57. provide/foundation/testing/threading/fixtures.py +520 -0
  58. provide/foundation/testing/time/__init__.py +32 -0
  59. provide/foundation/testing/time/fixtures.py +409 -0
  60. provide/foundation/testing/transport/__init__.py +30 -0
  61. provide/foundation/testing/transport/fixtures.py +280 -0
  62. provide/foundation/tools/__init__.py +58 -0
  63. provide/foundation/tools/base.py +348 -0
  64. provide/foundation/tools/cache.py +266 -0
  65. provide/foundation/tools/downloader.py +213 -0
  66. provide/foundation/tools/installer.py +254 -0
  67. provide/foundation/tools/registry.py +223 -0
  68. provide/foundation/tools/resolver.py +321 -0
  69. provide/foundation/tools/verifier.py +186 -0
  70. provide/foundation/tracer/otel.py +7 -11
  71. provide/foundation/transport/__init__.py +155 -0
  72. provide/foundation/transport/base.py +171 -0
  73. provide/foundation/transport/client.py +266 -0
  74. provide/foundation/transport/config.py +209 -0
  75. provide/foundation/transport/errors.py +79 -0
  76. provide/foundation/transport/http.py +232 -0
  77. provide/foundation/transport/middleware.py +366 -0
  78. provide/foundation/transport/registry.py +167 -0
  79. provide/foundation/transport/types.py +45 -0
  80. {provide_foundation-0.0.0.dev0.dist-info → provide_foundation-0.0.0.dev1.dist-info}/METADATA +5 -28
  81. {provide_foundation-0.0.0.dev0.dist-info → provide_foundation-0.0.0.dev1.dist-info}/RECORD +85 -34
  82. provide/foundation/cli/commands/logs/generate_old.py +0 -569
  83. provide/foundation/crypto/certificates.py +0 -896
  84. provide/foundation/logger/emoji/__init__.py +0 -44
  85. provide/foundation/logger/emoji/matrix.py +0 -209
  86. provide/foundation/logger/emoji/sets.py +0 -458
  87. provide/foundation/logger/emoji/types.py +0 -56
  88. provide/foundation/logger/setup/emoji_resolver.py +0 -64
  89. {provide_foundation-0.0.0.dev0.dist-info → provide_foundation-0.0.0.dev1.dist-info}/WHEEL +0 -0
  90. {provide_foundation-0.0.0.dev0.dist-info → provide_foundation-0.0.0.dev1.dist-info}/entry_points.txt +0 -0
  91. {provide_foundation-0.0.0.dev0.dist-info → provide_foundation-0.0.0.dev1.dist-info}/licenses/LICENSE +0 -0
  92. {provide_foundation-0.0.0.dev0.dist-info → provide_foundation-0.0.0.dev1.dist-info}/top_level.txt +0 -0
@@ -36,3 +36,30 @@ if not hasattr(stdlib_logging, TRACE_LEVEL_NAME): # pragma: no cover
36
36
  (cast(Any, stdlib_logging.root)).trace = trace.__get__(
37
37
  stdlib_logging.root, stdlib_logging.Logger
38
38
  )
39
+
40
+ # Also patch PrintLogger from structlog to support trace method
41
+ try:
42
+ import structlog
43
+ from structlog import PrintLogger
44
+
45
+ if not hasattr(PrintLogger, "trace"): # pragma: no cover
46
+ def trace_for_print_logger(
47
+ self: PrintLogger, msg: object, *args: object, **kwargs: object
48
+ ) -> None: # pragma: no cover
49
+ # PrintLogger doesn't have level checking, so just format and print like other methods
50
+ if args:
51
+ try:
52
+ formatted_msg = str(msg) % args
53
+ except (TypeError, ValueError):
54
+ formatted_msg = f"{msg} {args}"
55
+ else:
56
+ formatted_msg = str(msg)
57
+
58
+ # Use the same output mechanism as other PrintLogger methods
59
+ self._file.write(formatted_msg + "\n")
60
+ self._file.flush()
61
+
62
+ PrintLogger.trace = trace_for_print_logger # type: ignore[attr-defined]
63
+
64
+ except ImportError: # pragma: no cover
65
+ pass
@@ -1,9 +1,9 @@
1
1
  """OpenTelemetry metrics integration."""
2
2
 
3
- from provide.foundation.logger import get_logger
4
3
  from provide.foundation.logger.config.telemetry import TelemetryConfig
4
+ from provide.foundation.logger.setup import get_vanilla_logger
5
5
 
6
- log = get_logger(__name__)
6
+ slog = get_vanilla_logger(__name__)
7
7
 
8
8
  # Feature detection
9
9
  try:
@@ -47,15 +47,15 @@ def setup_opentelemetry_metrics(config: TelemetryConfig) -> None:
47
47
  """
48
48
  # Check if metrics are disabled first, before checking dependencies
49
49
  if not config.metrics_enabled or config.globally_disabled:
50
- log.debug("📊 OpenTelemetry metrics disabled")
50
+ slog.debug("📊 OpenTelemetry metrics disabled")
51
51
  return
52
52
 
53
53
  # Check if OpenTelemetry metrics are available
54
54
  if not _HAS_OTEL_METRICS:
55
- log.debug("📊 OpenTelemetry metrics not available (dependencies not installed)")
55
+ slog.debug("📊 OpenTelemetry metrics not available (dependencies not installed)")
56
56
  return
57
57
 
58
- log.debug("📊🚀 Setting up OpenTelemetry metrics")
58
+ slog.debug("📊🚀 Setting up OpenTelemetry metrics")
59
59
 
60
60
  # Create resource with service information
61
61
  resource_attrs = {}
@@ -73,7 +73,7 @@ def setup_opentelemetry_metrics(config: TelemetryConfig) -> None:
73
73
  endpoint = config.otlp_endpoint
74
74
  headers = config.get_otlp_headers_dict()
75
75
 
76
- log.debug(f"📊📤 Configuring OTLP metrics exporter: {endpoint}")
76
+ slog.debug(f"📊📤 Configuring OTLP metrics exporter: {endpoint}")
77
77
 
78
78
  # Choose exporter based on protocol
79
79
  if config.otlp_protocol == "grpc":
@@ -91,7 +91,7 @@ def setup_opentelemetry_metrics(config: TelemetryConfig) -> None:
91
91
  reader = PeriodicExportingMetricReader(exporter, export_interval_millis=60000)
92
92
  readers.append(reader)
93
93
 
94
- log.debug(f"✅ OTLP metrics exporter configured: {config.otlp_protocol}")
94
+ slog.debug(f"✅ OTLP metrics exporter configured: {config.otlp_protocol}")
95
95
 
96
96
  # Create meter provider
97
97
  meter_provider = MeterProvider(resource=resource, metric_readers=readers)
@@ -105,7 +105,7 @@ def setup_opentelemetry_metrics(config: TelemetryConfig) -> None:
105
105
  meter = otel_metrics.get_meter(__name__)
106
106
  _set_meter(meter)
107
107
 
108
- log.info("📊✅ OpenTelemetry metrics setup complete")
108
+ slog.info("📊✅ OpenTelemetry metrics setup complete")
109
109
 
110
110
 
111
111
  def shutdown_opentelemetry_metrics() -> None:
@@ -117,6 +117,6 @@ def shutdown_opentelemetry_metrics() -> None:
117
117
  meter_provider = otel_metrics.get_meter_provider()
118
118
  if hasattr(meter_provider, "shutdown"):
119
119
  meter_provider.shutdown()
120
- log.debug("📊🛑 OpenTelemetry meter provider shutdown")
120
+ slog.debug("📊🛑 OpenTelemetry meter provider shutdown")
121
121
  except Exception as e:
122
- log.warning(f"⚠️ Error shutting down OpenTelemetry metrics: {e}")
122
+ slog.warning(f"⚠️ Error shutting down OpenTelemetry metrics: {e}")
@@ -67,8 +67,16 @@ class ManagedProcess:
67
67
  self.stderr_relay = stderr_relay
68
68
  self.kwargs = kwargs
69
69
 
70
- # Build environment
70
+ # Build environment - always start with current environment
71
71
  self._env = os.environ.copy()
72
+
73
+ # Clean coverage-related environment variables from subprocess
74
+ # to prevent interference with output capture during testing
75
+ for key in list(self._env.keys()):
76
+ if key.startswith(('COVERAGE', 'COV_CORE')):
77
+ self._env.pop(key, None)
78
+
79
+ # Merge in any provided environment variables
72
80
  if env:
73
81
  self._env.update(env)
74
82
 
@@ -357,6 +365,7 @@ async def wait_for_process_output(
357
365
  loop = asyncio.get_event_loop()
358
366
  start_time = loop.time()
359
367
  buffer = ""
368
+ last_exit_code = None
360
369
 
361
370
  plog.debug(
362
371
  "⏳ Waiting for process output pattern",
@@ -365,17 +374,62 @@ async def wait_for_process_output(
365
374
  )
366
375
 
367
376
  while (loop.time() - start_time) < timeout:
368
- # Check if process is still running
377
+ # Check if process has exited
369
378
  if not process.is_running():
370
- returncode = process.returncode
371
- plog.error("Process exited unexpectedly", returncode=returncode)
372
- raise ProcessError(f"Process exited with code {returncode}")
373
-
379
+ last_exit_code = process.returncode
380
+ plog.debug("Process exited", returncode=last_exit_code)
381
+
382
+ # Try to drain any remaining output from the pipes
383
+ if process._process and process._process.stdout:
384
+ try:
385
+ # Non-blocking read of any remaining data
386
+ remaining = process._process.stdout.read()
387
+ if remaining:
388
+ if isinstance(remaining, bytes):
389
+ buffer += remaining.decode("utf-8", errors="replace")
390
+ else:
391
+ buffer += str(remaining)
392
+ plog.debug("Read remaining output from exited process", size=len(remaining))
393
+ except Exception:
394
+ pass
395
+
396
+ # Check buffer after draining
397
+ if all(part in buffer for part in expected_parts):
398
+ plog.debug("Found expected pattern after process exit")
399
+ return buffer
400
+
401
+ # If process exited and we don't have the pattern, fail
402
+ if last_exit_code is not None:
403
+ if last_exit_code != 0:
404
+ plog.error("Process exited with error", returncode=last_exit_code, buffer=buffer[:200])
405
+ raise ProcessError(f"Process exited with code {last_exit_code}")
406
+ else:
407
+ # For exit code 0, give it a small window to collect buffered output
408
+ await asyncio.sleep(0.1)
409
+ # Try one more time to drain output
410
+ if process._process and process._process.stdout:
411
+ try:
412
+ remaining = process._process.stdout.read()
413
+ if remaining:
414
+ if isinstance(remaining, bytes):
415
+ buffer += remaining.decode("utf-8", errors="replace")
416
+ else:
417
+ buffer += str(remaining)
418
+ except Exception:
419
+ pass
420
+ # Final check
421
+ if all(part in buffer for part in expected_parts):
422
+ plog.debug("Found expected pattern after final drain")
423
+ return buffer
424
+ # Process exited cleanly but pattern not found
425
+ plog.error("Process exited without expected output", returncode=0, buffer=buffer[:200])
426
+ raise ProcessError(f"Process exited with code {last_exit_code} before expected output found")
427
+
374
428
  try:
375
- # Try to read a line first
376
- line = await process.read_line_async(timeout=2.0)
429
+ # Try to read a line with short timeout
430
+ line = await process.read_line_async(timeout=0.1)
377
431
  if line:
378
- buffer += line
432
+ buffer += line + "\n" # Add newline back since readline strips it
379
433
  plog.debug("Read line from process", line=line[:100])
380
434
 
381
435
  # Check if we have all expected parts
@@ -384,23 +438,25 @@ async def wait_for_process_output(
384
438
  return buffer
385
439
 
386
440
  except TimeoutError:
387
- plog.debug("Line read timeout, trying character-by-character")
388
-
389
- try:
390
- # Fall back to character-by-character reading
391
- char = await process.read_char_async(timeout=1.0)
392
- if char:
393
- buffer += char
394
- plog.debug("Read character", char=repr(char), buffer_size=len(buffer))
395
-
396
- # Check pattern again
397
- if all(part in buffer for part in expected_parts):
398
- plog.debug("Found expected pattern in buffer (char mode)")
399
- return buffer
400
-
401
- except TimeoutError:
402
- await asyncio.sleep(0.25)
403
-
441
+ pass
442
+ except Exception:
443
+ # Process might have exited, continue
444
+ pass
445
+
446
+ # Short sleep to avoid busy loop
447
+ await asyncio.sleep(0.01)
448
+
449
+ # Final check of buffer before timeout error
450
+ if all(part in buffer for part in expected_parts):
451
+ return buffer
452
+
453
+ # If process exited with 0 but we didn't get output, that's still a timeout
454
+ plog.error(
455
+ "Timeout waiting for pattern",
456
+ expected_parts=expected_parts,
457
+ buffer=buffer[:200],
458
+ last_exit_code=last_exit_code,
459
+ )
404
460
  raise TimeoutError(
405
461
  f"Expected pattern {expected_parts} not found within {timeout}s timeout"
406
462
  )
@@ -93,6 +93,83 @@ def __getattr__(name: str) -> Any:
93
93
  import provide.foundation.testing.fixtures as fixtures_module
94
94
 
95
95
  return getattr(fixtures_module, name)
96
+
97
+ # Import submodules directly
98
+ elif name in ["archive", "common", "file", "process", "transport", "mocking", "time", "threading"]:
99
+ import importlib
100
+ return importlib.import_module(f"provide.foundation.testing.{name}")
101
+
102
+ # File testing utilities (backward compatibility)
103
+ elif name in [
104
+ "temp_directory",
105
+ "test_files_structure",
106
+ "temp_file",
107
+ "binary_file",
108
+ "nested_directory_structure",
109
+ "empty_directory",
110
+ "readonly_file",
111
+ ]:
112
+ import provide.foundation.testing.file.fixtures as file_module
113
+ return getattr(file_module, name)
114
+
115
+ # Process/async testing utilities (backward compatibility)
116
+ elif name in [
117
+ "clean_event_loop",
118
+ "async_timeout",
119
+ "mock_async_process",
120
+ "async_stream_reader",
121
+ "event_loop_policy",
122
+ "async_context_manager",
123
+ "async_iterator",
124
+ "async_queue",
125
+ "async_lock",
126
+ "mock_async_sleep",
127
+ ]:
128
+ import provide.foundation.testing.process.fixtures as process_module
129
+ return getattr(process_module, name)
130
+
131
+ # Common mock utilities (backward compatibility)
132
+ elif name in [
133
+ "mock_http_config",
134
+ "mock_telemetry_config",
135
+ "mock_config_source",
136
+ "mock_event_emitter",
137
+ "mock_transport",
138
+ "mock_metrics_collector",
139
+ "mock_cache",
140
+ "mock_database",
141
+ "mock_file_system",
142
+ "mock_subprocess",
143
+ ]:
144
+ import provide.foundation.testing.common.fixtures as common_module
145
+ return getattr(common_module, name)
146
+
147
+ # Transport/network testing utilities (backward compatibility)
148
+ elif name in [
149
+ "free_port",
150
+ "mock_server",
151
+ "httpx_mock_responses",
152
+ "mock_websocket",
153
+ "mock_dns_resolver",
154
+ "tcp_client_server",
155
+ "mock_ssl_context",
156
+ "network_timeout",
157
+ "mock_http_headers",
158
+ ]:
159
+ import provide.foundation.testing.transport.fixtures as transport_module
160
+ return getattr(transport_module, name)
161
+
162
+ # Archive testing utilities
163
+ elif name in [
164
+ "archive_test_content",
165
+ "large_file_for_compression",
166
+ "multi_format_archives",
167
+ "archive_with_permissions",
168
+ "corrupted_archives",
169
+ "archive_stress_test_files",
170
+ ]:
171
+ import provide.foundation.testing.archive.fixtures as archive_module
172
+ return getattr(archive_module, name)
96
173
 
97
174
  # Crypto fixtures (many fixtures)
98
175
  elif name in [
@@ -0,0 +1,24 @@
1
+ """
2
+ Archive testing fixtures for the provide-io ecosystem.
3
+
4
+ Standard fixtures for testing archive operations (tar, zip, gzip, bzip2)
5
+ across any project that depends on provide.foundation.
6
+ """
7
+
8
+ from provide.foundation.testing.archive.fixtures import (
9
+ archive_test_content,
10
+ large_file_for_compression,
11
+ multi_format_archives,
12
+ archive_with_permissions,
13
+ corrupted_archives,
14
+ archive_stress_test_files,
15
+ )
16
+
17
+ __all__ = [
18
+ "archive_test_content",
19
+ "large_file_for_compression",
20
+ "multi_format_archives",
21
+ "archive_with_permissions",
22
+ "corrupted_archives",
23
+ "archive_stress_test_files",
24
+ ]
@@ -0,0 +1,217 @@
1
+ """
2
+ Archive Testing Fixtures.
3
+
4
+ Fixtures specific to testing archive operations like tar, zip, gzip, bzip2.
5
+ Builds on top of file fixtures for archive-specific test scenarios.
6
+ """
7
+
8
+ from pathlib import Path
9
+ from collections.abc import Generator
10
+
11
+ import pytest
12
+
13
+ from provide.foundation.testing.file.fixtures import temp_directory
14
+
15
+
16
+ @pytest.fixture
17
+ def archive_test_content() -> Generator[tuple[Path, dict[str, str]], None, None]:
18
+ """
19
+ Create a standard set of files for archive testing.
20
+
21
+ Creates multiple files with different types of content to ensure
22
+ proper compression and extraction testing.
23
+
24
+ Yields:
25
+ Tuple of (source_dir, content_map) where content_map maps
26
+ relative paths to their expected content.
27
+ """
28
+ with temp_directory() as temp_dir:
29
+ source = temp_dir / "archive_source"
30
+ source.mkdir()
31
+
32
+ content_map = {
33
+ "text_file.txt": "This is a text file for archive testing.\n" * 10,
34
+ "data.json": '{"test": "data", "array": [1, 2, 3]}',
35
+ "script.py": "#!/usr/bin/env python\nprint('Hello from archive')\n",
36
+ "nested/dir/file.md": "# Nested File\nContent in nested directory",
37
+ "binary.dat": "Binary\x00\x01\x02\x03\xFF\xFE data",
38
+ "empty.txt": "",
39
+ }
40
+
41
+ # Create all files
42
+ for rel_path, content in content_map.items():
43
+ file_path = source / rel_path
44
+ file_path.parent.mkdir(parents=True, exist_ok=True)
45
+
46
+ if isinstance(content, str):
47
+ file_path.write_text(content)
48
+ else:
49
+ file_path.write_bytes(content.encode() if isinstance(content, str) else content)
50
+
51
+ yield source, content_map
52
+
53
+
54
+ @pytest.fixture
55
+ def large_file_for_compression() -> Generator[Path, None, None]:
56
+ """
57
+ Create a large file suitable for compression testing.
58
+
59
+ The file contains repetitive content that compresses well.
60
+
61
+ Yields:
62
+ Path to a large file with compressible content.
63
+ """
64
+ with temp_directory() as temp_dir:
65
+ large_file = temp_dir / "large_compressible.txt"
66
+
67
+ # Create 10MB of highly compressible content
68
+ content = "This is a line of text that will be repeated many times.\n" * 100
69
+ large_content = content * 1000 # ~6MB of repetitive text
70
+
71
+ large_file.write_text(large_content)
72
+ yield large_file
73
+
74
+
75
+ @pytest.fixture
76
+ def multi_format_archives() -> Generator[dict[str, Path], None, None]:
77
+ """
78
+ Create sample archives in different formats for format detection testing.
79
+
80
+ Yields:
81
+ Dict mapping format names to paths of sample archives.
82
+ """
83
+ with temp_directory() as temp_dir:
84
+ archives = {}
85
+
86
+ # Create minimal valid archives in different formats
87
+ # Note: These are minimal headers, not full valid archives
88
+
89
+ # GZIP file (magic: 1f 8b)
90
+ gzip_file = temp_dir / "sample.gz"
91
+ gzip_file.write_bytes(b'\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\x03' + b'compressed data')
92
+ archives["gzip"] = gzip_file
93
+
94
+ # BZIP2 file (magic: BZh)
95
+ bzip2_file = temp_dir / "sample.bz2"
96
+ bzip2_file.write_bytes(b'BZh91AY&SY' + b'compressed data')
97
+ archives["bzip2"] = bzip2_file
98
+
99
+ # ZIP file (magic: PK\x03\x04)
100
+ zip_file = temp_dir / "sample.zip"
101
+ zip_file.write_bytes(b'PK\x03\x04' + b'\x00' * 16 + b'zipfile')
102
+ archives["zip"] = zip_file
103
+
104
+ # TAR file (has specific header structure)
105
+ tar_file = temp_dir / "sample.tar"
106
+ # Minimal tar header (512 bytes)
107
+ tar_header = b'testfile.txt' + b'\x00' * 88 # name
108
+ tar_header += b'0000644\x00' # mode
109
+ tar_header += b'0000000\x00' # uid
110
+ tar_header += b'0000000\x00' # gid
111
+ tar_header += b'00000000000\x00' # size
112
+ tar_header += b'00000000000\x00' # mtime
113
+ tar_header += b' ' # checksum placeholder
114
+ tar_header += b'0' # typeflag
115
+ tar_header += b'\x00' * 355 # padding to 512 bytes
116
+ tar_file.write_bytes(tar_header[:512])
117
+ archives["tar"] = tar_file
118
+
119
+ yield archives
120
+
121
+
122
+ @pytest.fixture
123
+ def archive_with_permissions() -> Generator[Path, None, None]:
124
+ """
125
+ Create files with specific permissions for archive permission testing.
126
+
127
+ Yields:
128
+ Path to directory containing files with various permission modes.
129
+ """
130
+ with temp_directory() as temp_dir:
131
+ source = temp_dir / "permissions_test"
132
+ source.mkdir()
133
+
134
+ # Regular file
135
+ regular = source / "regular.txt"
136
+ regular.write_text("Regular file")
137
+ regular.chmod(0o644)
138
+
139
+ # Executable file
140
+ executable = source / "script.sh"
141
+ executable.write_text("#!/bin/bash\necho 'Hello'")
142
+ executable.chmod(0o755)
143
+
144
+ # Read-only file
145
+ readonly = source / "readonly.txt"
146
+ readonly.write_text("Read only content")
147
+ readonly.chmod(0o444)
148
+
149
+ # Directory with specific permissions
150
+ special_dir = source / "special"
151
+ special_dir.mkdir()
152
+ special_dir.chmod(0o700)
153
+
154
+ yield source
155
+
156
+
157
+ @pytest.fixture
158
+ def corrupted_archives() -> Generator[dict[str, Path], None, None]:
159
+ """
160
+ Create corrupted archive files for error handling testing.
161
+
162
+ Yields:
163
+ Dict mapping format names to paths of corrupted archives.
164
+ """
165
+ with temp_directory() as temp_dir:
166
+ corrupted = {}
167
+
168
+ # Corrupted GZIP (invalid header)
169
+ bad_gzip = temp_dir / "corrupted.gz"
170
+ bad_gzip.write_bytes(b'\x1f\x8c' + b'not really gzip data')
171
+ corrupted["gzip"] = bad_gzip
172
+
173
+ # Corrupted ZIP (incomplete header)
174
+ bad_zip = temp_dir / "corrupted.zip"
175
+ bad_zip.write_bytes(b'PK\x03') # Incomplete magic
176
+ corrupted["zip"] = bad_zip
177
+
178
+ # Corrupted BZIP2 (wrong magic)
179
+ bad_bzip2 = temp_dir / "corrupted.bz2"
180
+ bad_bzip2.write_bytes(b'BZX' + b'not bzip2')
181
+ corrupted["bzip2"] = bad_bzip2
182
+
183
+ # Empty file claiming to be archive
184
+ empty_archive = temp_dir / "empty.tar.gz"
185
+ empty_archive.write_bytes(b'')
186
+ corrupted["empty"] = empty_archive
187
+
188
+ yield corrupted
189
+
190
+
191
+ @pytest.fixture
192
+ def archive_stress_test_files() -> Generator[Path, None, None]:
193
+ """
194
+ Create a large number of files for stress testing archive operations.
195
+
196
+ Yields:
197
+ Path to directory with many files for stress testing.
198
+ """
199
+ with temp_directory() as temp_dir:
200
+ stress_dir = temp_dir / "stress_test"
201
+ stress_dir.mkdir()
202
+
203
+ # Create 100 files in various subdirectories
204
+ for i in range(10):
205
+ subdir = stress_dir / f"subdir_{i}"
206
+ subdir.mkdir()
207
+
208
+ for j in range(10):
209
+ file_path = subdir / f"file_{j}.txt"
210
+ file_path.write_text(f"Content of file {i}_{j}\n" * 10)
211
+
212
+ # Add some binary files
213
+ for i in range(5):
214
+ bin_file = stress_dir / f"binary_{i}.dat"
215
+ bin_file.write_bytes(bytes(range(256)) * 10)
216
+
217
+ yield stress_dir
@@ -0,0 +1,34 @@
1
+ """
2
+ Common testing fixtures for the provide-io ecosystem.
3
+
4
+ Standard mock objects and fixtures that are used across multiple modules
5
+ in any project that depends on provide.foundation.
6
+ """
7
+
8
+ from provide.foundation.testing.common.fixtures import (
9
+ mock_http_config,
10
+ mock_logger,
11
+ mock_telemetry_config,
12
+ mock_config_source,
13
+ mock_event_emitter,
14
+ mock_transport,
15
+ mock_metrics_collector,
16
+ mock_cache,
17
+ mock_database,
18
+ mock_file_system,
19
+ mock_subprocess,
20
+ )
21
+
22
+ __all__ = [
23
+ "mock_http_config",
24
+ "mock_logger",
25
+ "mock_telemetry_config",
26
+ "mock_config_source",
27
+ "mock_event_emitter",
28
+ "mock_transport",
29
+ "mock_metrics_collector",
30
+ "mock_cache",
31
+ "mock_database",
32
+ "mock_file_system",
33
+ "mock_subprocess",
34
+ ]