fraiseql-confiture 0.3.4__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (119) hide show
  1. confiture/__init__.py +48 -0
  2. confiture/_core.cp311-win_amd64.pyd +0 -0
  3. confiture/cli/__init__.py +0 -0
  4. confiture/cli/dry_run.py +116 -0
  5. confiture/cli/lint_formatter.py +193 -0
  6. confiture/cli/main.py +1656 -0
  7. confiture/config/__init__.py +0 -0
  8. confiture/config/environment.py +263 -0
  9. confiture/core/__init__.py +51 -0
  10. confiture/core/anonymization/__init__.py +0 -0
  11. confiture/core/anonymization/audit.py +485 -0
  12. confiture/core/anonymization/benchmarking.py +372 -0
  13. confiture/core/anonymization/breach_notification.py +652 -0
  14. confiture/core/anonymization/compliance.py +617 -0
  15. confiture/core/anonymization/composer.py +298 -0
  16. confiture/core/anonymization/data_subject_rights.py +669 -0
  17. confiture/core/anonymization/factory.py +319 -0
  18. confiture/core/anonymization/governance.py +737 -0
  19. confiture/core/anonymization/performance.py +1092 -0
  20. confiture/core/anonymization/profile.py +284 -0
  21. confiture/core/anonymization/registry.py +195 -0
  22. confiture/core/anonymization/security/kms_manager.py +547 -0
  23. confiture/core/anonymization/security/lineage.py +888 -0
  24. confiture/core/anonymization/security/token_store.py +686 -0
  25. confiture/core/anonymization/strategies/__init__.py +41 -0
  26. confiture/core/anonymization/strategies/address.py +359 -0
  27. confiture/core/anonymization/strategies/credit_card.py +374 -0
  28. confiture/core/anonymization/strategies/custom.py +161 -0
  29. confiture/core/anonymization/strategies/date.py +218 -0
  30. confiture/core/anonymization/strategies/differential_privacy.py +398 -0
  31. confiture/core/anonymization/strategies/email.py +141 -0
  32. confiture/core/anonymization/strategies/format_preserving_encryption.py +310 -0
  33. confiture/core/anonymization/strategies/hash.py +150 -0
  34. confiture/core/anonymization/strategies/ip_address.py +235 -0
  35. confiture/core/anonymization/strategies/masking_retention.py +252 -0
  36. confiture/core/anonymization/strategies/name.py +298 -0
  37. confiture/core/anonymization/strategies/phone.py +119 -0
  38. confiture/core/anonymization/strategies/preserve.py +85 -0
  39. confiture/core/anonymization/strategies/redact.py +101 -0
  40. confiture/core/anonymization/strategies/salted_hashing.py +322 -0
  41. confiture/core/anonymization/strategies/text_redaction.py +183 -0
  42. confiture/core/anonymization/strategies/tokenization.py +334 -0
  43. confiture/core/anonymization/strategy.py +241 -0
  44. confiture/core/anonymization/syncer_audit.py +357 -0
  45. confiture/core/blue_green.py +683 -0
  46. confiture/core/builder.py +500 -0
  47. confiture/core/checksum.py +358 -0
  48. confiture/core/connection.py +132 -0
  49. confiture/core/differ.py +522 -0
  50. confiture/core/drift.py +564 -0
  51. confiture/core/dry_run.py +182 -0
  52. confiture/core/health.py +313 -0
  53. confiture/core/hooks/__init__.py +87 -0
  54. confiture/core/hooks/base.py +232 -0
  55. confiture/core/hooks/context.py +146 -0
  56. confiture/core/hooks/execution_strategies.py +57 -0
  57. confiture/core/hooks/observability.py +220 -0
  58. confiture/core/hooks/phases.py +53 -0
  59. confiture/core/hooks/registry.py +295 -0
  60. confiture/core/large_tables.py +775 -0
  61. confiture/core/linting/__init__.py +70 -0
  62. confiture/core/linting/composer.py +192 -0
  63. confiture/core/linting/libraries/__init__.py +17 -0
  64. confiture/core/linting/libraries/gdpr.py +168 -0
  65. confiture/core/linting/libraries/general.py +184 -0
  66. confiture/core/linting/libraries/hipaa.py +144 -0
  67. confiture/core/linting/libraries/pci_dss.py +104 -0
  68. confiture/core/linting/libraries/sox.py +120 -0
  69. confiture/core/linting/schema_linter.py +491 -0
  70. confiture/core/linting/versioning.py +151 -0
  71. confiture/core/locking.py +389 -0
  72. confiture/core/migration_generator.py +298 -0
  73. confiture/core/migrator.py +793 -0
  74. confiture/core/observability/__init__.py +44 -0
  75. confiture/core/observability/audit.py +323 -0
  76. confiture/core/observability/logging.py +187 -0
  77. confiture/core/observability/metrics.py +174 -0
  78. confiture/core/observability/tracing.py +192 -0
  79. confiture/core/pg_version.py +418 -0
  80. confiture/core/pool.py +406 -0
  81. confiture/core/risk/__init__.py +39 -0
  82. confiture/core/risk/predictor.py +188 -0
  83. confiture/core/risk/scoring.py +248 -0
  84. confiture/core/rollback_generator.py +388 -0
  85. confiture/core/schema_analyzer.py +769 -0
  86. confiture/core/schema_to_schema.py +590 -0
  87. confiture/core/security/__init__.py +32 -0
  88. confiture/core/security/logging.py +201 -0
  89. confiture/core/security/validation.py +416 -0
  90. confiture/core/signals.py +371 -0
  91. confiture/core/syncer.py +540 -0
  92. confiture/exceptions.py +192 -0
  93. confiture/integrations/__init__.py +0 -0
  94. confiture/models/__init__.py +0 -0
  95. confiture/models/lint.py +193 -0
  96. confiture/models/migration.py +180 -0
  97. confiture/models/schema.py +203 -0
  98. confiture/scenarios/__init__.py +36 -0
  99. confiture/scenarios/compliance.py +586 -0
  100. confiture/scenarios/ecommerce.py +199 -0
  101. confiture/scenarios/financial.py +253 -0
  102. confiture/scenarios/healthcare.py +315 -0
  103. confiture/scenarios/multi_tenant.py +340 -0
  104. confiture/scenarios/saas.py +295 -0
  105. confiture/testing/FRAMEWORK_API.md +722 -0
  106. confiture/testing/__init__.py +38 -0
  107. confiture/testing/fixtures/__init__.py +11 -0
  108. confiture/testing/fixtures/data_validator.py +229 -0
  109. confiture/testing/fixtures/migration_runner.py +167 -0
  110. confiture/testing/fixtures/schema_snapshotter.py +352 -0
  111. confiture/testing/frameworks/__init__.py +10 -0
  112. confiture/testing/frameworks/mutation.py +587 -0
  113. confiture/testing/frameworks/performance.py +479 -0
  114. confiture/testing/utils/__init__.py +0 -0
  115. fraiseql_confiture-0.3.4.dist-info/METADATA +438 -0
  116. fraiseql_confiture-0.3.4.dist-info/RECORD +119 -0
  117. fraiseql_confiture-0.3.4.dist-info/WHEEL +4 -0
  118. fraiseql_confiture-0.3.4.dist-info/entry_points.txt +2 -0
  119. fraiseql_confiture-0.3.4.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,500 @@
1
+ """Schema builder - builds PostgreSQL schemas from DDL files
2
+
3
+ The SchemaBuilder concatenates SQL files from db/schema/ in deterministic order
4
+ to create a complete schema file. This implements "Medium 1: Build from Source DDL".
5
+
6
+ Performance: Uses Rust extension (_core) when available for 10-50x speedup.
7
+ """
8
+
9
+ import hashlib
10
+ from datetime import datetime
11
+ from pathlib import Path
12
+ from typing import TYPE_CHECKING, Any
13
+
14
+ from confiture.config.environment import Environment
15
+ from confiture.exceptions import SchemaError
16
+
17
+ # Try to import Rust extension for 10-50x performance boost
18
+ _core: Any = None
19
+ HAS_RUST = False
20
+
21
+ if not TYPE_CHECKING:
22
+ try:
23
+ from confiture import _core # type: ignore
24
+
25
+ HAS_RUST = True
26
+ except ImportError:
27
+ pass
28
+
29
+
30
+ class SchemaBuilder:
31
+ """Build PostgreSQL schema from DDL source files
32
+
33
+ The SchemaBuilder discovers SQL files in the schema directory, concatenates
34
+ them in deterministic order, and generates a complete schema file.
35
+
36
+ Attributes:
37
+ env_config: Environment configuration
38
+ schema_dir: Base directory for schema files
39
+
40
+ Example:
41
+ >>> builder = SchemaBuilder(env="local")
42
+ >>> schema = builder.build()
43
+ >>> print(len(schema))
44
+ 15234
45
+ """
46
+
47
+ def __init__(self, env: str, project_dir: Path | None = None):
48
+ """Initialize SchemaBuilder with recursive directory support
49
+
50
+ Args:
51
+ env: Environment name (e.g., "local", "production")
52
+ project_dir: Project root directory. If None, uses current directory.
53
+ """
54
+ self.env_config = Environment.load(env, project_dir=project_dir)
55
+
56
+ # Validate include_dirs
57
+ if not self.env_config.include_dirs:
58
+ raise SchemaError("No include_dirs specified in environment config")
59
+
60
+ # Parse include_dirs (support string, dict, and DirectoryConfig formats)
61
+ self.include_configs = []
62
+ for include in self.env_config.include_dirs:
63
+ if isinstance(include, str):
64
+ self.include_configs.append(
65
+ {
66
+ "path": Path(include),
67
+ "recursive": True, # Default recursive for backward compatibility
68
+ "include": ["**/*.sql"],
69
+ "exclude": [],
70
+ "auto_discover": True,
71
+ "order": 0,
72
+ }
73
+ )
74
+ elif isinstance(include, dict):
75
+ recursive = include.get("recursive", True)
76
+ default_include = ["**/*.sql"] if recursive else ["*.sql"]
77
+ self.include_configs.append(
78
+ {
79
+ "path": Path(include["path"]),
80
+ "recursive": recursive,
81
+ "include": include.get("include", default_include),
82
+ "exclude": include.get("exclude", []),
83
+ "auto_discover": include.get("auto_discover", True),
84
+ "order": include.get("order", 0),
85
+ }
86
+ )
87
+ elif hasattr(include, "path"): # DirectoryConfig object
88
+ recursive = include.recursive
89
+ # If using default include pattern and recursive=False, adjust to non-recursive pattern
90
+ include_patterns = include.include
91
+ if include_patterns == ["**/*.sql"] and not recursive:
92
+ include_patterns = ["*.sql"]
93
+ self.include_configs.append(
94
+ {
95
+ "path": Path(include.path),
96
+ "recursive": recursive,
97
+ "include": include_patterns,
98
+ "exclude": include.exclude,
99
+ "auto_discover": include.auto_discover,
100
+ "order": include.order,
101
+ }
102
+ )
103
+ elif isinstance(include, dict):
104
+ self.include_configs.append(
105
+ {
106
+ "path": Path(include["path"]),
107
+ "recursive": include.get("recursive", True),
108
+ "include": include.get("include", ["**/*.sql"]),
109
+ "exclude": include.get("exclude", []),
110
+ "auto_discover": include.get("auto_discover", True),
111
+ "order": include.get("order", 0),
112
+ }
113
+ )
114
+ elif hasattr(include, "path"): # DirectoryConfig object
115
+ self.include_configs.append(
116
+ {
117
+ "path": Path(include.path),
118
+ "recursive": include.recursive,
119
+ "include": include.include,
120
+ "exclude": include.exclude,
121
+ "auto_discover": include.auto_discover,
122
+ "order": include.order,
123
+ }
124
+ )
125
+ elif isinstance(include, dict):
126
+ self.include_configs.append(
127
+ {
128
+ "path": Path(include["path"]),
129
+ "recursive": include.get("recursive", True),
130
+ "order": include.get("order", 0),
131
+ }
132
+ )
133
+ elif hasattr(include, "path"): # DirectoryConfig object
134
+ self.include_configs.append(
135
+ {
136
+ "path": Path(include.path),
137
+ "recursive": include.recursive,
138
+ "order": include.order,
139
+ }
140
+ )
141
+
142
+ # Sort by order
143
+ self.include_configs.sort(key=lambda x: int(x["order"])) # type: ignore
144
+
145
+ # Extract paths for backward compatibility
146
+ self.include_dirs: list[Path] = [cfg["path"] for cfg in self.include_configs] # type: ignore
147
+
148
+ # Base directory for relative path calculation
149
+ # Find the common parent of all include directories
150
+ self.base_dir = self._find_common_parent(self.include_dirs)
151
+
152
+ def _find_common_parent(self, paths: list[Path]) -> Path:
153
+ """Find common parent directory of all paths.
154
+
155
+ Args:
156
+ paths: List of paths to find common parent
157
+
158
+ Returns:
159
+ Common parent directory
160
+
161
+ Example:
162
+ >>> paths = [Path("db/schema/00_common"), Path("db/seeds/common")]
163
+ >>> _find_common_parent(paths)
164
+ Path("db")
165
+ """
166
+ if len(paths) == 1:
167
+ return paths[0]
168
+
169
+ # Convert to absolute paths for comparison
170
+ abs_paths = [p.resolve() for p in paths]
171
+
172
+ # Get all parent parts for each path (including the path itself)
173
+ all_parts = [p.parts for p in abs_paths]
174
+
175
+ # Find common prefix
176
+ common_parts = []
177
+ for parts_at_level in zip(*all_parts, strict=False):
178
+ if len(set(parts_at_level)) == 1:
179
+ common_parts.append(parts_at_level[0])
180
+ else:
181
+ break
182
+
183
+ if not common_parts:
184
+ # No common parent, use current directory
185
+ return Path(".")
186
+
187
+ # Reconstruct path from common parts
188
+ return Path(*common_parts)
189
+
190
+ def _is_hex_prefix(self, filename: str) -> bool:
191
+ """Check if filename starts with hexadecimal prefix.
192
+
193
+ Hex prefixes must consist of valid hexadecimal characters where
194
+ all letters are uppercase, followed by an underscore.
195
+
196
+ Args:
197
+ filename: Filename to check
198
+
199
+ Returns:
200
+ True if filename starts with valid hex prefix
201
+ """
202
+ parts = filename.split("_", 1)
203
+ if len(parts) != 2:
204
+ return False
205
+ prefix = parts[0]
206
+
207
+ # Check that all letters are uppercase
208
+ if not all(c.isupper() or c.isdigit() for c in prefix):
209
+ return False
210
+
211
+ try:
212
+ int(prefix, 16)
213
+ return True
214
+ except ValueError:
215
+ return False
216
+
217
+ def _hex_sort_key(self, path: Path) -> tuple[float | int, str]:
218
+ """Generate sort key for hexadecimal-prefixed files.
219
+
220
+ Args:
221
+ path: File path to generate sort key for
222
+
223
+ Returns:
224
+ Tuple for sorting: (hex_value, rest_of_filename) or (inf, filename)
225
+ """
226
+ filename = path.stem
227
+ if self._is_hex_prefix(filename):
228
+ parts = filename.split("_", 1)
229
+ hex_value = int(parts[0], 16)
230
+ rest = parts[1] if len(parts) > 1 else ""
231
+ return (hex_value, rest)
232
+ return (float("inf"), filename)
233
+
234
+ def find_sql_files(self) -> list[Path]:
235
+ """Discover SQL files with pattern matching
236
+
237
+ Files are returned in deterministic order based on configuration.
238
+ Supports glob patterns for include/exclude and auto-discovery.
239
+
240
+ Returns:
241
+ Sorted list of SQL file paths
242
+
243
+ Raises:
244
+ SchemaError: If include directories don't exist or no SQL files found
245
+
246
+ Example:
247
+ >>> builder = SchemaBuilder(env="local")
248
+ >>> files = builder.find_sql_files()
249
+ >>> print(files[0])
250
+ /path/to/db/schema/00_common/extensions.sql
251
+ """
252
+ all_sql_files = []
253
+
254
+ for config in self.include_configs:
255
+ include_dir: Path = config["path"] # type: ignore
256
+ recursive = config["recursive"]
257
+ include_patterns = config["include"]
258
+ exclude_patterns = config["exclude"]
259
+ auto_discover = config["auto_discover"]
260
+
261
+ if not include_dir.exists():
262
+ if auto_discover:
263
+ # Skip non-existent directories in auto-discover mode
264
+ continue
265
+ else:
266
+ raise SchemaError(f"Include directory does not exist: {include_dir}")
267
+
268
+ # Find files matching include patterns
269
+ for pattern in include_patterns: # type: ignore
270
+ if recursive:
271
+ sql_files = list(include_dir.rglob(pattern))
272
+ else:
273
+ sql_files = list(include_dir.glob(pattern))
274
+
275
+ # Filter out excluded patterns
276
+ for file in sql_files:
277
+ rel_path = file.relative_to(include_dir)
278
+ is_excluded = any(
279
+ rel_path.match(exclude_pattern)
280
+ for exclude_pattern in exclude_patterns # type: ignore
281
+ )
282
+
283
+ if not is_excluded:
284
+ all_sql_files.append(file)
285
+
286
+ # Filter out excluded directories (legacy support)
287
+ filtered_files = []
288
+ exclude_paths = [Path(d) for d in self.env_config.exclude_dirs]
289
+
290
+ for file in all_sql_files:
291
+ # Check if file is in any excluded directory
292
+ is_excluded = any(file.is_relative_to(exclude_dir) for exclude_dir in exclude_paths)
293
+ if not is_excluded:
294
+ filtered_files.append(file)
295
+
296
+ if not filtered_files:
297
+ include_dirs_str = ", ".join(str(d) for d in self.include_dirs)
298
+ raise SchemaError(
299
+ f"No SQL files found in include directories: {include_dirs_str}\n"
300
+ f"Expected files in subdirectories like 00_common/, 10_tables/, etc."
301
+ )
302
+
303
+ # Sort files based on configuration
304
+ if self.env_config.build.sort_mode == "hex":
305
+ # Check if any file has hex prefix
306
+ has_hex = any(self._is_hex_prefix(f.stem) for f in filtered_files)
307
+
308
+ if has_hex:
309
+ # Sort by hex value
310
+ return sorted(filtered_files, key=self._hex_sort_key)
311
+ else:
312
+ # Default alphabetical sort
313
+ return sorted(filtered_files)
314
+ else:
315
+ # Default alphabetical sort
316
+ return sorted(filtered_files)
317
+
318
+ def build(self, output_path: Path | None = None) -> str:
319
+ """Build schema by concatenating DDL files
320
+
321
+ Generates a complete schema file by concatenating all SQL files in
322
+ deterministic order, with headers and file separators.
323
+
324
+ Performance: Uses Rust extension when available for 10-50x speedup.
325
+ Falls back gracefully to Python implementation if Rust unavailable.
326
+
327
+ Args:
328
+ output_path: Optional path to write schema file. If None, only returns content.
329
+
330
+ Returns:
331
+ Generated schema content as string
332
+
333
+ Raises:
334
+ SchemaError: If schema build fails
335
+
336
+ Example:
337
+ >>> builder = SchemaBuilder(env="local")
338
+ >>> schema = builder.build(output_path=Path("schema.sql"))
339
+ >>> print(f"Generated {len(schema)} bytes")
340
+ """
341
+ files = self.find_sql_files()
342
+
343
+ # Generate header
344
+ header = self._generate_header(len(files))
345
+
346
+ # Use Rust extension if available (10-50x faster)
347
+ if HAS_RUST:
348
+ try:
349
+ # Build file content using Rust
350
+ file_paths = [str(f) for f in files]
351
+ content: str = _core.build_schema(file_paths)
352
+
353
+ # Add headers and separators (Python side for flexibility)
354
+ schema = self._add_headers_and_separators(header, files, content)
355
+ except Exception:
356
+ # Fallback to Python if Rust fails
357
+ schema = self._build_python(header, files)
358
+ else:
359
+ # Pure Python implementation (fallback)
360
+ schema = self._build_python(header, files)
361
+
362
+ # Write to file if requested
363
+ if output_path:
364
+ try:
365
+ output_path.parent.mkdir(parents=True, exist_ok=True)
366
+ output_path.write_text(schema, encoding="utf-8")
367
+ except Exception as e:
368
+ raise SchemaError(f"Error writing schema to {output_path}: {e}") from e
369
+
370
+ return schema
371
+
372
+ def _build_python(self, header: str, files: list[Path]) -> str:
373
+ """Pure Python implementation of schema building (fallback)
374
+
375
+ Args:
376
+ header: Schema header
377
+ files: List of SQL files to concatenate
378
+
379
+ Returns:
380
+ Complete schema content
381
+ """
382
+ parts = [header]
383
+
384
+ # Concatenate all files
385
+ for file in files:
386
+ try:
387
+ # Relative path for header
388
+ rel_path = file.relative_to(self.base_dir)
389
+
390
+ # Add file separator
391
+ parts.append("\n-- ============================================\n")
392
+ parts.append(f"-- File: {rel_path}\n")
393
+ parts.append("-- ============================================\n\n")
394
+
395
+ # Add file content
396
+ content = file.read_text(encoding="utf-8")
397
+ parts.append(content)
398
+
399
+ # Ensure newline at end
400
+ if not content.endswith("\n"):
401
+ parts.append("\n")
402
+
403
+ except Exception as e:
404
+ raise SchemaError(f"Error reading {file}: {e}") from e
405
+
406
+ return "".join(parts)
407
+
408
+ def _add_headers_and_separators(self, header: str, _files: list[Path], content: str) -> str:
409
+ """Add main header to Rust-built content
410
+
411
+ The Rust layer now includes file separators, so this function
412
+ only needs to prepend the main schema header.
413
+
414
+ Args:
415
+ header: Schema header
416
+ _files: List of SQL files (unused, kept for API compatibility)
417
+ content: Concatenated content from Rust (includes file separators)
418
+
419
+ Returns:
420
+ Content with main header
421
+ """
422
+ # Rust layer now includes file separators, just prepend main header
423
+ return header + content
424
+
425
+ def compute_hash(self) -> str:
426
+ """Compute deterministic SHA256 hash of schema
427
+
428
+ The hash includes both file paths and content, ensuring that any change
429
+ to the schema (content or structure) is detected.
430
+
431
+ Performance: Uses Rust extension when available for 30-60x speedup.
432
+
433
+ Returns:
434
+ SHA256 hexadecimal digest
435
+
436
+ Example:
437
+ >>> builder = SchemaBuilder(env="local")
438
+ >>> hash1 = builder.compute_hash()
439
+ >>> # Modify a file...
440
+ >>> hash2 = builder.compute_hash()
441
+ >>> assert hash1 != hash2 # Change detected
442
+ """
443
+ files = self.find_sql_files()
444
+
445
+ # Use Rust extension if available (30-60x faster)
446
+ if HAS_RUST:
447
+ try:
448
+ file_paths = [str(f) for f in files]
449
+ hash_result: str = _core.hash_files(file_paths)
450
+ return hash_result
451
+ except Exception:
452
+ # Fallback to Python if Rust fails
453
+ pass
454
+
455
+ # Pure Python implementation (fallback)
456
+ hasher = hashlib.sha256()
457
+
458
+ for file in files:
459
+ # Include relative path in hash (detects file renames)
460
+ rel_path = file.relative_to(self.base_dir)
461
+ hasher.update(str(rel_path).encode("utf-8"))
462
+ hasher.update(b"\x00") # Separator
463
+
464
+ # Include file content
465
+ try:
466
+ content = file.read_bytes()
467
+ hasher.update(content)
468
+ hasher.update(b"\x00") # Separator
469
+ except Exception as e:
470
+ raise SchemaError(f"Error reading {file} for hash: {e}") from e
471
+
472
+ return hasher.hexdigest()
473
+
474
+ def _generate_header(self, file_count: int) -> str:
475
+ """Generate schema file header
476
+
477
+ Args:
478
+ file_count: Number of SQL files included
479
+
480
+ Returns:
481
+ Header string
482
+ """
483
+ timestamp = datetime.now().isoformat()
484
+ schema_hash = self.compute_hash()
485
+
486
+ return f"""-- ============================================
487
+ -- PostgreSQL Schema for Confiture
488
+ -- ============================================
489
+ --
490
+ -- Environment: {self.env_config.name}
491
+ -- Generated: {timestamp}
492
+ -- Schema Hash: {schema_hash}
493
+ -- Files Included: {file_count}
494
+ --
495
+ -- This file was generated by Confiture (confiture build)
496
+ -- DO NOT EDIT MANUALLY - Edit source files in db/schema/
497
+ --
498
+ -- ============================================
499
+
500
+ """