iris-devtester 1.8.1__py3-none-any.whl → 1.9.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- iris_devtester/__init__.py +3 -2
- iris_devtester/cli/__init__.py +4 -2
- iris_devtester/cli/__main__.py +1 -1
- iris_devtester/cli/connection_commands.py +31 -51
- iris_devtester/cli/container.py +42 -113
- iris_devtester/cli/container_commands.py +6 -4
- iris_devtester/cli/fixture_commands.py +97 -73
- iris_devtester/config/auto_discovery.py +8 -20
- iris_devtester/config/container_config.py +24 -35
- iris_devtester/config/container_state.py +19 -43
- iris_devtester/config/discovery.py +10 -10
- iris_devtester/config/presets.py +3 -10
- iris_devtester/config/yaml_loader.py +3 -2
- iris_devtester/connections/__init__.py +25 -30
- iris_devtester/connections/connection.py +4 -3
- iris_devtester/connections/dbapi.py +5 -1
- iris_devtester/connections/jdbc.py +2 -6
- iris_devtester/connections/manager.py +1 -1
- iris_devtester/connections/retry.py +2 -5
- iris_devtester/containers/__init__.py +6 -6
- iris_devtester/containers/cpf_manager.py +13 -12
- iris_devtester/containers/iris_container.py +268 -436
- iris_devtester/containers/models.py +18 -43
- iris_devtester/containers/monitor_utils.py +1 -3
- iris_devtester/containers/monitoring.py +31 -46
- iris_devtester/containers/performance.py +5 -5
- iris_devtester/containers/validation.py +27 -60
- iris_devtester/containers/wait_strategies.py +13 -4
- iris_devtester/fixtures/__init__.py +14 -13
- iris_devtester/fixtures/creator.py +127 -555
- iris_devtester/fixtures/loader.py +221 -78
- iris_devtester/fixtures/manifest.py +8 -6
- iris_devtester/fixtures/obj_export.py +45 -35
- iris_devtester/fixtures/validator.py +4 -7
- iris_devtester/integrations/langchain.py +2 -6
- iris_devtester/ports/registry.py +5 -4
- iris_devtester/testing/__init__.py +3 -0
- iris_devtester/testing/fixtures.py +10 -1
- iris_devtester/testing/helpers.py +5 -12
- iris_devtester/testing/models.py +3 -2
- iris_devtester/testing/schema_reset.py +1 -3
- iris_devtester/utils/__init__.py +20 -5
- iris_devtester/utils/container_port.py +2 -6
- iris_devtester/utils/container_status.py +2 -6
- iris_devtester/utils/dbapi_compat.py +29 -14
- iris_devtester/utils/enable_callin.py +5 -7
- iris_devtester/utils/health_checks.py +18 -33
- iris_devtester/utils/iris_container_adapter.py +27 -26
- iris_devtester/utils/password.py +673 -0
- iris_devtester/utils/progress.py +1 -1
- iris_devtester/utils/test_connection.py +4 -6
- {iris_devtester-1.8.1.dist-info → iris_devtester-1.9.1.dist-info}/METADATA +7 -7
- iris_devtester-1.9.1.dist-info/RECORD +66 -0
- {iris_devtester-1.8.1.dist-info → iris_devtester-1.9.1.dist-info}/WHEEL +1 -1
- iris_devtester/utils/password_reset.py +0 -594
- iris_devtester/utils/password_verification.py +0 -350
- iris_devtester/utils/unexpire_passwords.py +0 -168
- iris_devtester-1.8.1.dist-info/RECORD +0 -68
- {iris_devtester-1.8.1.dist-info → iris_devtester-1.9.1.dist-info}/entry_points.txt +0 -0
- {iris_devtester-1.8.1.dist-info → iris_devtester-1.9.1.dist-info}/licenses/LICENSE +0 -0
- {iris_devtester-1.8.1.dist-info → iris_devtester-1.9.1.dist-info}/top_level.txt +0 -0
|
@@ -1,21 +1,16 @@
|
|
|
1
|
-
"""IRIS .DAT Fixture Creator.
|
|
2
|
-
|
|
3
|
-
This module provides the FixtureCreator class for creating IRIS database
|
|
4
|
-
fixtures by exporting namespaces using BACKUP^DBACK routine.
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
1
|
import datetime
|
|
8
2
|
import subprocess
|
|
3
|
+
import time
|
|
9
4
|
from pathlib import Path
|
|
10
|
-
from typing import
|
|
5
|
+
from typing import Any, Dict, List, Optional
|
|
11
6
|
|
|
12
|
-
from iris_devtester.connections import get_connection
|
|
13
7
|
from iris_devtester.config import IRISConfig
|
|
8
|
+
from iris_devtester.connections import get_connection
|
|
14
9
|
|
|
15
10
|
from .manifest import (
|
|
11
|
+
FixtureCreateError,
|
|
16
12
|
FixtureManifest,
|
|
17
13
|
TableInfo,
|
|
18
|
-
FixtureCreateError,
|
|
19
14
|
)
|
|
20
15
|
from .validator import FixtureValidator
|
|
21
16
|
|
|
@@ -23,53 +18,11 @@ from .validator import FixtureValidator
|
|
|
23
18
|
class FixtureCreator:
|
|
24
19
|
"""
|
|
25
20
|
Creates .DAT fixtures by exporting IRIS namespaces.
|
|
26
|
-
|
|
27
|
-
This class creates database fixtures by:
|
|
28
|
-
1. Exporting entire namespace to IRIS.DAT via BACKUP^DBACK
|
|
29
|
-
2. Querying table list with row counts
|
|
30
|
-
3. Calculating SHA256 checksum
|
|
31
|
-
4. Generating manifest.json
|
|
32
|
-
|
|
33
|
-
Example:
|
|
34
|
-
>>> from iris_devtester.fixtures import FixtureCreator
|
|
35
|
-
>>> creator = FixtureCreator()
|
|
36
|
-
>>> manifest = creator.create_fixture(
|
|
37
|
-
... fixture_id="test-data",
|
|
38
|
-
... namespace="USER",
|
|
39
|
-
... output_dir="./fixtures/test-data",
|
|
40
|
-
... description="Test fixture with sample data"
|
|
41
|
-
... )
|
|
42
|
-
>>> print(f"Created fixture with {len(manifest.tables)} tables")
|
|
43
|
-
|
|
44
|
-
Constitutional Principle #2: DBAPI First
|
|
45
|
-
Constitutional Principle #5: Fail Fast with Guidance
|
|
46
|
-
Constitutional Principle #7: Medical-Grade Reliability
|
|
47
21
|
"""
|
|
48
22
|
|
|
49
|
-
def __init__(
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
Args:
|
|
54
|
-
connection_config: Optional IRIS connection configuration.
|
|
55
|
-
If None, auto-discovers from environment.
|
|
56
|
-
container: Optional IRISContainer for docker exec operations.
|
|
57
|
-
Required for BACKUP/RESTORE operations.
|
|
58
|
-
|
|
59
|
-
Example:
|
|
60
|
-
>>> # Auto-discover connection
|
|
61
|
-
>>> creator = FixtureCreator()
|
|
62
|
-
|
|
63
|
-
>>> # With container (for docker exec)
|
|
64
|
-
>>> from iris_devtester.containers import IRISContainer
|
|
65
|
-
>>> with IRISContainer.community() as container:
|
|
66
|
-
... creator = FixtureCreator(container=container)
|
|
67
|
-
|
|
68
|
-
>>> # Explicit config
|
|
69
|
-
>>> from iris_devtester.config import IRISConfig
|
|
70
|
-
>>> config = IRISConfig(host="localhost", port=1972)
|
|
71
|
-
>>> creator = FixtureCreator(config)
|
|
72
|
-
"""
|
|
23
|
+
def __init__(
|
|
24
|
+
self, connection_config: Optional[IRISConfig] = None, container: Optional[Any] = None
|
|
25
|
+
):
|
|
73
26
|
self.connection_config = connection_config
|
|
74
27
|
self.container = container
|
|
75
28
|
self.validator = FixtureValidator()
|
|
@@ -84,80 +37,18 @@ class FixtureCreator:
|
|
|
84
37
|
version: str = "1.0.0",
|
|
85
38
|
features: Optional[Dict[str, Any]] = None,
|
|
86
39
|
) -> FixtureManifest:
|
|
87
|
-
"""
|
|
88
|
-
Create fixture by exporting IRIS namespace.
|
|
89
|
-
|
|
90
|
-
Steps:
|
|
91
|
-
1. Verify namespace exists
|
|
92
|
-
2. Create output directory
|
|
93
|
-
3. Export namespace to IRIS.DAT via BACKUP^DBACK
|
|
94
|
-
4. Query table list with row counts
|
|
95
|
-
5. Calculate SHA256 checksum
|
|
96
|
-
6. Generate and save manifest.json
|
|
97
|
-
|
|
98
|
-
Args:
|
|
99
|
-
fixture_id: Unique identifier (e.g., "test-entities-100")
|
|
100
|
-
namespace: Source namespace to export (e.g., "USER", "USER_TEST_100")
|
|
101
|
-
output_dir: Output directory path (will be created if doesn't exist)
|
|
102
|
-
description: Human-readable description
|
|
103
|
-
version: Semantic version (default: "1.0.0")
|
|
104
|
-
features: Optional custom metadata
|
|
105
|
-
|
|
106
|
-
Returns:
|
|
107
|
-
FixtureManifest with complete fixture metadata
|
|
108
|
-
|
|
109
|
-
Raises:
|
|
110
|
-
FileExistsError: If output directory already exists
|
|
111
|
-
FixtureCreateError: If creation fails (with remediation guidance)
|
|
112
|
-
|
|
113
|
-
Example:
|
|
114
|
-
>>> creator = FixtureCreator()
|
|
115
|
-
>>> manifest = creator.create_fixture(
|
|
116
|
-
... fixture_id="test-entities-100",
|
|
117
|
-
... namespace="USER_TEST_100",
|
|
118
|
-
... output_dir="./fixtures/test-entities-100",
|
|
119
|
-
... description="Test data with 100 RAG entities"
|
|
120
|
-
... )
|
|
121
|
-
"""
|
|
122
40
|
output_path = Path(output_dir)
|
|
123
41
|
|
|
124
|
-
# Check if output directory already exists
|
|
125
42
|
if output_path.exists():
|
|
126
|
-
raise FileExistsError(
|
|
127
|
-
f"Fixture directory already exists: {output_dir}\n"
|
|
128
|
-
"\n"
|
|
129
|
-
"What went wrong:\n"
|
|
130
|
-
" Cannot overwrite existing fixture for safety.\n"
|
|
131
|
-
"\n"
|
|
132
|
-
"How to fix it:\n"
|
|
133
|
-
f" 1. Delete existing fixture: rm -rf {output_dir}\n"
|
|
134
|
-
" 2. Choose different output path\n"
|
|
135
|
-
" 3. Use refresh_fixture() to update existing fixture\n"
|
|
136
|
-
)
|
|
43
|
+
raise FileExistsError(f"Fixture directory already exists: {output_dir}")
|
|
137
44
|
|
|
138
|
-
|
|
139
|
-
try:
|
|
140
|
-
output_path.mkdir(parents=True, exist_ok=False)
|
|
141
|
-
except Exception as e:
|
|
142
|
-
raise FixtureCreateError(
|
|
143
|
-
f"Failed to create output directory: {output_dir}\n"
|
|
144
|
-
f"Error: {e}\n"
|
|
145
|
-
"\n"
|
|
146
|
-
"What went wrong:\n"
|
|
147
|
-
" Could not create fixture directory.\n"
|
|
148
|
-
"\n"
|
|
149
|
-
"How to fix it:\n"
|
|
150
|
-
" 1. Check directory permissions\n"
|
|
151
|
-
" 2. Verify parent directory exists\n"
|
|
152
|
-
" 3. Check disk space: df -h\n"
|
|
153
|
-
)
|
|
45
|
+
output_path.mkdir(parents=True, exist_ok=False)
|
|
154
46
|
|
|
155
47
|
# Export namespace to IRIS.DAT
|
|
156
48
|
dat_file_path = output_path / "IRIS.DAT"
|
|
157
49
|
try:
|
|
158
50
|
self.export_namespace_to_dat(namespace, str(dat_file_path))
|
|
159
51
|
except Exception as e:
|
|
160
|
-
# Cleanup on failure
|
|
161
52
|
try:
|
|
162
53
|
output_path.rmdir()
|
|
163
54
|
except:
|
|
@@ -168,7 +59,20 @@ class FixtureCreator:
|
|
|
168
59
|
iris_version = self._get_iris_version()
|
|
169
60
|
|
|
170
61
|
# Get table list with row counts
|
|
171
|
-
|
|
62
|
+
import dataclasses
|
|
63
|
+
|
|
64
|
+
from iris_devtester.config import discover_config
|
|
65
|
+
from iris_devtester.connections import get_connection as get_conn_factory
|
|
66
|
+
|
|
67
|
+
base_config = self.connection_config or discover_config()
|
|
68
|
+
ns_config = dataclasses.replace(base_config, namespace=namespace)
|
|
69
|
+
|
|
70
|
+
ns_connection = get_conn_factory(ns_config)
|
|
71
|
+
|
|
72
|
+
try:
|
|
73
|
+
tables = self.get_namespace_tables(ns_connection, namespace)
|
|
74
|
+
finally:
|
|
75
|
+
ns_connection.close()
|
|
172
76
|
|
|
173
77
|
# Calculate checksum
|
|
174
78
|
checksum = self.calculate_checksum(str(dat_file_path))
|
|
@@ -179,7 +83,7 @@ class FixtureCreator:
|
|
|
179
83
|
version=version,
|
|
180
84
|
schema_version="1.0",
|
|
181
85
|
description=description,
|
|
182
|
-
created_at=datetime.datetime.
|
|
86
|
+
created_at=datetime.datetime.now(datetime.timezone.utc).isoformat() + "Z",
|
|
183
87
|
iris_version=iris_version,
|
|
184
88
|
namespace=namespace,
|
|
185
89
|
dat_file="IRIS.DAT",
|
|
@@ -196,471 +100,139 @@ class FixtureCreator:
|
|
|
196
100
|
|
|
197
101
|
def export_namespace_to_dat(self, namespace: str, dat_file_path: str) -> str:
|
|
198
102
|
"""
|
|
199
|
-
Export
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
103
|
+
Export namespace to a fixture package (classes + globals).
|
|
104
|
+
|
|
105
|
+
We export:
|
|
106
|
+
1. Class definitions (XML) - for SQL schema
|
|
107
|
+
2. Globals (GOF) - for data
|
|
108
|
+
"""
|
|
109
|
+
if self.container is None:
|
|
110
|
+
raise FixtureCreateError("Export operations require container parameter")
|
|
111
|
+
|
|
112
|
+
container_name = self.container.get_container_name()
|
|
113
|
+
container_gof_path = f"/tmp/GLOBALS_{namespace}.gof"
|
|
114
|
+
container_cls_path = f"/tmp/CLASSES_{namespace}.xml"
|
|
115
|
+
|
|
116
|
+
# Step 1: Export class definitions (for SQL schema)
|
|
117
|
+
export_classes_script = f"""
|
|
118
|
+
Set clsFile = "{container_cls_path}"
|
|
119
|
+
Set sc = $SYSTEM.OBJ.ExportAllClasses(clsFile)
|
|
120
|
+
If 'sc Write "WARN_CLS:",$System.Status.GetErrorText(sc),!
|
|
121
|
+
Write "CLASSES_DONE"
|
|
122
|
+
Halt
|
|
123
|
+
"""
|
|
124
|
+
result = subprocess.run(
|
|
125
|
+
["docker", "exec", "-i", container_name, "iris", "session", "IRIS", "-U", namespace],
|
|
126
|
+
input=export_classes_script.encode("utf-8"),
|
|
127
|
+
capture_output=True,
|
|
128
|
+
timeout=120,
|
|
129
|
+
)
|
|
130
|
+
stdout = result.stdout.decode("utf-8", errors="replace")
|
|
131
|
+
if "CLASSES_DONE" not in stdout:
|
|
132
|
+
# Non-fatal - some namespaces may have no user classes
|
|
133
|
+
pass
|
|
134
|
+
|
|
135
|
+
# Step 2: Export globals (for data)
|
|
136
|
+
export_globals_script = f"""
|
|
137
|
+
Set file = "{container_gof_path}"
|
|
138
|
+
Set glist = ""
|
|
139
|
+
Set g = "" For Set g = $Order(^$Global(g)) Quit:g="" If $Extract(g,1)'="%" Set glist = glist_$Select(glist="":"",1:",")_g
|
|
140
|
+
If glist = "" Write "WARN:No user globals found",! Write "SUCCESS" Halt
|
|
141
|
+
Write "Exporting globals: ",glist,!
|
|
142
|
+
Set sc = ##class(%Library.Global).Export($Namespace, glist, file, 7)
|
|
143
|
+
If 'sc Write "ERR:",$System.Status.GetErrorText(sc) Halt
|
|
144
|
+
If '##class(%File).Exists(file) Write "ERR_FILE_NOT_FOUND" Halt
|
|
145
|
+
Write "SUCCESS"
|
|
146
|
+
Halt
|
|
147
|
+
"""
|
|
148
|
+
result = subprocess.run(
|
|
149
|
+
["docker", "exec", "-i", container_name, "iris", "session", "IRIS", "-U", namespace],
|
|
150
|
+
input=export_globals_script.encode("utf-8"),
|
|
151
|
+
capture_output=True,
|
|
152
|
+
timeout=120,
|
|
153
|
+
)
|
|
240
154
|
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
# BACKUP to /tmp inside container, then docker cp to host
|
|
245
|
-
# This avoids volume mounting complexity
|
|
246
|
-
container_path = f"/tmp/IRIS_{namespace}.DAT"
|
|
247
|
-
|
|
248
|
-
# Simpler approach: Get database file path and copy it
|
|
249
|
-
# This is essentially an "external backup" approach
|
|
250
|
-
# Get namespace configuration to find database directory
|
|
251
|
-
# Use single-line commands to avoid ObjectScript block syntax issues
|
|
252
|
-
objectscript_commands = f"""Do ##class(Config.Namespaces).Get("{namespace}",.nsProps)
|
|
253
|
-
Set dbName = $Get(nsProps("Globals"))
|
|
254
|
-
If dbName="" Write "ERROR_NO_NAMESPACE" Halt
|
|
255
|
-
Do ##class(Config.Databases).Get(dbName,.dbProps)
|
|
256
|
-
Write dbProps("Directory")
|
|
257
|
-
Halt"""
|
|
258
|
-
|
|
259
|
-
cmd = [
|
|
260
|
-
"docker",
|
|
261
|
-
"exec",
|
|
262
|
-
"-i",
|
|
263
|
-
container_name,
|
|
264
|
-
"iris", "session", "IRIS", "-U", "%SYS"
|
|
265
|
-
]
|
|
266
|
-
|
|
267
|
-
result = subprocess.run(
|
|
268
|
-
cmd,
|
|
269
|
-
input=f"{objectscript_commands}\nHalt\n",
|
|
270
|
-
capture_output=True,
|
|
271
|
-
text=True,
|
|
272
|
-
timeout=60
|
|
273
|
-
)
|
|
155
|
+
stdout = result.stdout.decode("utf-8", errors="replace")
|
|
156
|
+
if "SUCCESS" not in stdout:
|
|
157
|
+
raise FixtureCreateError(f"GOF export failed: {stdout}")
|
|
274
158
|
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
f"stderr: {result.stderr}\n"
|
|
281
|
-
)
|
|
282
|
-
|
|
283
|
-
# Extract database directory from output
|
|
284
|
-
# Find the line that looks like a directory path (starts with /)
|
|
285
|
-
db_dir = None
|
|
286
|
-
for line in result.stdout.strip().split('\n'):
|
|
287
|
-
line = line.strip()
|
|
288
|
-
if line.startswith('/') and 'mgr' in line:
|
|
289
|
-
db_dir = line.rstrip('/')
|
|
290
|
-
break
|
|
291
|
-
|
|
292
|
-
if not db_dir:
|
|
293
|
-
raise FixtureCreateError(
|
|
294
|
-
f"Could not parse database directory from output:\n"
|
|
295
|
-
f"stdout: {result.stdout}\n"
|
|
296
|
-
f"stderr: {result.stderr}\n"
|
|
297
|
-
)
|
|
298
|
-
|
|
299
|
-
# Database file is IRIS.DAT in that directory
|
|
300
|
-
db_file = f"{db_dir}/IRIS.DAT"
|
|
301
|
-
|
|
302
|
-
# Copy database file to /tmp in container
|
|
303
|
-
cp_internal_cmd = [
|
|
159
|
+
# Copy both files from container to host
|
|
160
|
+
# The dat_file_path is expected to be IRIS.DAT, we'll use it as base
|
|
161
|
+
base_path = Path(dat_file_path).parent
|
|
162
|
+
subprocess.run(
|
|
163
|
+
[
|
|
304
164
|
"docker",
|
|
305
|
-
"exec",
|
|
306
|
-
container_name,
|
|
307
165
|
"cp",
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
]
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
cp_internal_cmd, capture_output=True, text=True, timeout=30
|
|
314
|
-
)
|
|
166
|
+
f"{container_name}:{container_gof_path}",
|
|
167
|
+
str(base_path / "globals.gof"),
|
|
168
|
+
],
|
|
169
|
+
check=True,
|
|
170
|
+
)
|
|
315
171
|
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
"exec",
|
|
321
|
-
container_name,
|
|
322
|
-
"ls",
|
|
323
|
-
"-la",
|
|
324
|
-
container_path
|
|
325
|
-
]
|
|
326
|
-
|
|
327
|
-
check_result = subprocess.run(
|
|
328
|
-
check_cmd, capture_output=True, text=True, timeout=10
|
|
329
|
-
)
|
|
330
|
-
|
|
331
|
-
if check_result.returncode != 0:
|
|
332
|
-
raise FixtureCreateError(
|
|
333
|
-
f"BACKUP reported success but file not in container:\n"
|
|
334
|
-
f"File path: {container_path}\n"
|
|
335
|
-
f"BACKUP stdout: {result.stdout}\n"
|
|
336
|
-
f"BACKUP stderr: {result.stderr}\n"
|
|
337
|
-
f"ls check: {check_result.stderr}\n"
|
|
338
|
-
"\n"
|
|
339
|
-
"What went wrong:\n"
|
|
340
|
-
" BackupGeneral() may have failed silently.\n"
|
|
341
|
-
"\n"
|
|
342
|
-
"How to fix it:\n"
|
|
343
|
-
" 1. Check IRIS logs in container\n"
|
|
344
|
-
" 2. Verify backup permissions\n"
|
|
345
|
-
" 3. Check disk space in container\n"
|
|
346
|
-
)
|
|
347
|
-
|
|
348
|
-
# Copy file from container to host
|
|
349
|
-
cp_cmd = [
|
|
172
|
+
# Try to copy classes file (may not exist if no user classes)
|
|
173
|
+
try:
|
|
174
|
+
subprocess.run(
|
|
175
|
+
[
|
|
350
176
|
"docker",
|
|
351
177
|
"cp",
|
|
352
|
-
f"{container_name}:{
|
|
353
|
-
str(
|
|
354
|
-
]
|
|
355
|
-
|
|
356
|
-
cp_result = subprocess.run(
|
|
357
|
-
cp_cmd, capture_output=True, text=True, timeout=30
|
|
358
|
-
)
|
|
359
|
-
|
|
360
|
-
if cp_result.returncode != 0:
|
|
361
|
-
raise FixtureCreateError(
|
|
362
|
-
f"Failed to copy file from container:\n"
|
|
363
|
-
f"stdout: {cp_result.stdout}\n"
|
|
364
|
-
f"stderr: {cp_result.stderr}\n"
|
|
365
|
-
)
|
|
366
|
-
|
|
367
|
-
# Verify file was copied
|
|
368
|
-
if not Path(dat_file_path).exists():
|
|
369
|
-
raise FixtureCreateError(
|
|
370
|
-
f"Docker cp succeeded but file not found: {dat_file_path}\n"
|
|
371
|
-
"\n"
|
|
372
|
-
"What went wrong:\n"
|
|
373
|
-
" File copy from container to host failed.\n"
|
|
374
|
-
"\n"
|
|
375
|
-
"How to fix it:\n"
|
|
376
|
-
" 1. Check file permissions\n"
|
|
377
|
-
" 2. Verify output directory exists\n"
|
|
378
|
-
" 3. Check disk space\n"
|
|
379
|
-
)
|
|
380
|
-
|
|
381
|
-
return dat_file_path
|
|
382
|
-
else:
|
|
383
|
-
raise FixtureCreateError(
|
|
384
|
-
f"Failed to copy database file for namespace '{namespace}'\n"
|
|
385
|
-
f"Database file: {db_file}\n"
|
|
386
|
-
f"Container path: {container_path}\n"
|
|
387
|
-
f"cp stdout: {cp_internal_result.stdout}\n"
|
|
388
|
-
f"cp stderr: {cp_internal_result.stderr}\n"
|
|
389
|
-
"\n"
|
|
390
|
-
"What went wrong:\n"
|
|
391
|
-
" Could not copy IRIS.DAT file from database directory.\n"
|
|
392
|
-
"\n"
|
|
393
|
-
"How to fix it:\n"
|
|
394
|
-
" 1. Check database directory exists\n"
|
|
395
|
-
" 2. Verify IRIS.DAT file is present\n"
|
|
396
|
-
" 3. Check file permissions\n"
|
|
397
|
-
" 4. Verify disk space: df -h\n"
|
|
398
|
-
)
|
|
399
|
-
|
|
400
|
-
except subprocess.TimeoutExpired:
|
|
401
|
-
raise FixtureCreateError(
|
|
402
|
-
f"Timeout during BACKUP of namespace '{namespace}'\n"
|
|
403
|
-
"\n"
|
|
404
|
-
"What went wrong:\n"
|
|
405
|
-
" BACKUP operation took longer than 60 seconds.\n"
|
|
406
|
-
"\n"
|
|
407
|
-
"How to fix it:\n"
|
|
408
|
-
" 1. Check namespace size (large namespaces take longer)\n"
|
|
409
|
-
" 2. Verify IRIS is responsive\n"
|
|
410
|
-
" 3. Check disk I/O performance\n"
|
|
178
|
+
f"{container_name}:{container_cls_path}",
|
|
179
|
+
str(base_path / "classes.xml"),
|
|
180
|
+
],
|
|
181
|
+
check=True,
|
|
411
182
|
)
|
|
183
|
+
except subprocess.CalledProcessError:
|
|
184
|
+
pass # No classes to export
|
|
412
185
|
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
"\n"
|
|
417
|
-
"What went wrong:\n"
|
|
418
|
-
" Cannot execute BACKUP via docker exec.\n"
|
|
419
|
-
"\n"
|
|
420
|
-
"How to fix it:\n"
|
|
421
|
-
" 1. Verify Docker is installed and in PATH\n"
|
|
422
|
-
" 2. Check Docker daemon is running\n"
|
|
423
|
-
)
|
|
424
|
-
|
|
425
|
-
except Exception as e:
|
|
426
|
-
if isinstance(e, FixtureCreateError):
|
|
427
|
-
raise
|
|
428
|
-
raise FixtureCreateError(
|
|
429
|
-
f"Failed to export namespace '{namespace}'\n"
|
|
430
|
-
f"Error: {e}\n"
|
|
431
|
-
"\n"
|
|
432
|
-
"What went wrong:\n"
|
|
433
|
-
" An error occurred during namespace backup.\n"
|
|
434
|
-
"\n"
|
|
435
|
-
"How to fix it:\n"
|
|
436
|
-
" 1. Verify IRIS container is running\n"
|
|
437
|
-
" 2. Check container logs: docker logs <container>\n"
|
|
438
|
-
" 3. Try listing namespaces: do $SYSTEM.OBJ.ListNamespaces()\n"
|
|
439
|
-
)
|
|
186
|
+
# Create a marker file at the expected dat_file_path location
|
|
187
|
+
Path(dat_file_path).write_text("GOF_FIXTURE")
|
|
188
|
+
return dat_file_path
|
|
440
189
|
|
|
441
190
|
def calculate_checksum(self, dat_file_path: str) -> str:
|
|
442
|
-
"""
|
|
443
|
-
Calculate SHA256 checksum for .DAT file.
|
|
444
|
-
|
|
445
|
-
Args:
|
|
446
|
-
dat_file_path: Path to .DAT file
|
|
447
|
-
|
|
448
|
-
Returns:
|
|
449
|
-
SHA256 checksum (format: "sha256:abc123...")
|
|
450
|
-
|
|
451
|
-
Raises:
|
|
452
|
-
FileNotFoundError: If .DAT file doesn't exist
|
|
453
|
-
|
|
454
|
-
Example:
|
|
455
|
-
>>> creator = FixtureCreator()
|
|
456
|
-
>>> checksum = creator.calculate_checksum("./fixtures/test/IRIS.DAT")
|
|
457
|
-
>>> print(f"Checksum: {checksum}")
|
|
458
|
-
"""
|
|
459
191
|
return self.validator.calculate_sha256(dat_file_path)
|
|
460
192
|
|
|
461
193
|
def get_namespace_tables(self, connection: Any = None, namespace: str = "") -> List[TableInfo]:
|
|
462
|
-
"""
|
|
463
|
-
Get list of user tables in namespace with row counts.
|
|
464
|
-
|
|
465
|
-
Args:
|
|
466
|
-
connection: Active DBAPI connection (optional)
|
|
467
|
-
namespace: Source namespace (e.g., "USER")
|
|
468
|
-
|
|
469
|
-
Returns:
|
|
470
|
-
List of TableInfo objects
|
|
471
|
-
"""
|
|
472
194
|
if connection is None:
|
|
473
195
|
connection = self.get_connection()
|
|
474
|
-
if not namespace:
|
|
475
|
-
namespace = "USER"
|
|
476
196
|
|
|
477
197
|
tables = []
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
WHERE TABLE_TYPE = 'BASE TABLE'
|
|
486
|
-
AND TABLE_SCHEMA NOT IN ('INFORMATION_SCHEMA', '%SYS', '%Library')
|
|
487
|
-
"""
|
|
488
|
-
)
|
|
489
|
-
|
|
490
|
-
for row in cursor.fetchall():
|
|
491
|
-
schema_name = row[0]
|
|
492
|
-
table_name = row[1]
|
|
493
|
-
qualified_name = f"{schema_name}.{table_name}"
|
|
494
|
-
|
|
495
|
-
# Get row count
|
|
496
|
-
try:
|
|
497
|
-
cursor.execute(f"SELECT COUNT(*) FROM {qualified_name}")
|
|
498
|
-
count_row = cursor.fetchone()
|
|
499
|
-
row_count = count_row[0] if count_row else 0
|
|
500
|
-
|
|
501
|
-
tables.append(TableInfo(name=qualified_name, row_count=row_count))
|
|
502
|
-
except Exception:
|
|
503
|
-
# Skip tables we can't count (permissions, corrupted, etc.)
|
|
504
|
-
continue
|
|
505
|
-
|
|
506
|
-
cursor.close()
|
|
507
|
-
return tables
|
|
508
|
-
|
|
509
|
-
except Exception as e:
|
|
510
|
-
raise FixtureCreateError(
|
|
511
|
-
f"Failed to query tables in namespace '{namespace}'\n"
|
|
512
|
-
f"Error: {e}\n"
|
|
513
|
-
"\n"
|
|
514
|
-
"What went wrong:\n"
|
|
515
|
-
" Could not retrieve table list from namespace.\n"
|
|
516
|
-
"\n"
|
|
517
|
-
"How to fix it:\n"
|
|
518
|
-
" 1. Verify namespace exists: do $SYSTEM.OBJ.ListNamespaces()\n"
|
|
519
|
-
" 2. Check user has SELECT permission\n"
|
|
520
|
-
" 3. Try querying tables manually: SELECT * FROM INFORMATION_SCHEMA.TABLES\n"
|
|
521
|
-
)
|
|
522
|
-
|
|
523
|
-
def refresh_fixture(self, fixture_dir: str, namespace: str) -> FixtureManifest:
|
|
198
|
+
cursor = connection.cursor()
|
|
199
|
+
cursor.execute(
|
|
200
|
+
"""
|
|
201
|
+
SELECT TABLE_SCHEMA, TABLE_NAME
|
|
202
|
+
FROM INFORMATION_SCHEMA.TABLES
|
|
203
|
+
WHERE TABLE_TYPE = 'BASE TABLE'
|
|
204
|
+
AND TABLE_SCHEMA NOT IN ('INFORMATION_SCHEMA', '%SYS', '%Library')
|
|
524
205
|
"""
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
This updates the IRIS.DAT file, recalculates checksum, and updates manifest.
|
|
528
|
-
|
|
529
|
-
Args:
|
|
530
|
-
fixture_dir: Path to existing fixture directory
|
|
531
|
-
namespace: Source namespace to re-export
|
|
532
|
-
|
|
533
|
-
Returns:
|
|
534
|
-
Updated FixtureManifest
|
|
535
|
-
|
|
536
|
-
Raises:
|
|
537
|
-
FileNotFoundError: If fixture directory doesn't exist
|
|
538
|
-
FixtureCreateError: If refresh fails
|
|
539
|
-
|
|
540
|
-
Example:
|
|
541
|
-
>>> creator = FixtureCreator()
|
|
542
|
-
>>> manifest = creator.refresh_fixture(
|
|
543
|
-
... "./fixtures/test-100",
|
|
544
|
-
... namespace="USER_TEST_100"
|
|
545
|
-
... )
|
|
546
|
-
>>> print(f"Refreshed, new checksum: {manifest.checksum}")
|
|
547
|
-
"""
|
|
548
|
-
fixture_path = Path(fixture_dir)
|
|
549
|
-
|
|
550
|
-
if not fixture_path.exists():
|
|
551
|
-
raise FileNotFoundError(
|
|
552
|
-
f"Fixture directory not found: {fixture_dir}\n"
|
|
553
|
-
"\n"
|
|
554
|
-
"What went wrong:\n"
|
|
555
|
-
" Cannot refresh non-existent fixture.\n"
|
|
556
|
-
"\n"
|
|
557
|
-
"How to fix it:\n"
|
|
558
|
-
" 1. Verify fixture path is correct\n"
|
|
559
|
-
" 2. Use create_fixture() for new fixtures\n"
|
|
560
|
-
)
|
|
561
|
-
|
|
562
|
-
manifest_file = fixture_path / "manifest.json"
|
|
563
|
-
if not manifest_file.exists():
|
|
564
|
-
raise FileNotFoundError(
|
|
565
|
-
f"Manifest not found: {manifest_file}\n"
|
|
566
|
-
"\n"
|
|
567
|
-
"What went wrong:\n"
|
|
568
|
-
" Fixture directory exists but manifest.json is missing.\n"
|
|
569
|
-
"\n"
|
|
570
|
-
"How to fix it:\n"
|
|
571
|
-
" 1. Re-create fixture: use create_fixture()\n"
|
|
572
|
-
" 2. Restore manifest.json from backup\n"
|
|
573
|
-
)
|
|
574
|
-
|
|
575
|
-
# Load existing manifest
|
|
576
|
-
manifest = FixtureManifest.from_file(str(manifest_file))
|
|
577
|
-
|
|
578
|
-
# Create backup of old manifest
|
|
579
|
-
backup_file = fixture_path / "manifest.json.backup"
|
|
580
|
-
manifest.to_file(str(backup_file))
|
|
581
|
-
|
|
582
|
-
# Re-export namespace to IRIS.DAT
|
|
583
|
-
dat_file_path = fixture_path / manifest.dat_file
|
|
584
|
-
try:
|
|
585
|
-
# Remove old .DAT file
|
|
586
|
-
if dat_file_path.exists():
|
|
587
|
-
dat_file_path.unlink()
|
|
588
|
-
|
|
589
|
-
# Export new .DAT file
|
|
590
|
-
self.export_namespace_to_dat(namespace, str(dat_file_path))
|
|
591
|
-
|
|
592
|
-
# Get updated table list
|
|
593
|
-
tables = self.get_namespace_tables(self.get_connection(), namespace)
|
|
594
|
-
|
|
595
|
-
# Recalculate checksum
|
|
596
|
-
checksum = self.calculate_checksum(str(dat_file_path))
|
|
597
|
-
|
|
598
|
-
# Update manifest
|
|
599
|
-
manifest.tables = tables
|
|
600
|
-
manifest.checksum = checksum
|
|
601
|
-
manifest.created_at = datetime.datetime.utcnow().isoformat() + "Z"
|
|
602
|
-
manifest.iris_version = self._get_iris_version()
|
|
603
|
-
|
|
604
|
-
# Save updated manifest
|
|
605
|
-
manifest.to_file(str(manifest_file))
|
|
606
|
-
|
|
607
|
-
return manifest
|
|
206
|
+
)
|
|
608
207
|
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
f"Error: {e}\n"
|
|
620
|
-
"\n"
|
|
621
|
-
"What went wrong:\n"
|
|
622
|
-
" Could not re-export namespace to update fixture.\n"
|
|
623
|
-
"\n"
|
|
624
|
-
"How to fix it:\n"
|
|
625
|
-
" 1. Verify namespace still exists\n"
|
|
626
|
-
" 2. Check IRIS connection\n"
|
|
627
|
-
" 3. Review previous manifest backup: manifest.json.backup\n"
|
|
628
|
-
)
|
|
208
|
+
for row in cursor.fetchall():
|
|
209
|
+
qualified_name = f"{row[0]}.{row[1]}"
|
|
210
|
+
try:
|
|
211
|
+
cursor.execute(f"SELECT COUNT(*) FROM {qualified_name}")
|
|
212
|
+
row_count = cursor.fetchone()[0]
|
|
213
|
+
tables.append(TableInfo(name=qualified_name, row_count=row_count))
|
|
214
|
+
except:
|
|
215
|
+
continue
|
|
216
|
+
cursor.close()
|
|
217
|
+
return tables
|
|
629
218
|
|
|
630
219
|
def get_connection(self) -> Any:
|
|
631
|
-
"""
|
|
632
|
-
Get or create IRIS connection.
|
|
633
|
-
|
|
634
|
-
Returns:
|
|
635
|
-
IRIS database connection (DBAPI)
|
|
636
|
-
|
|
637
|
-
Raises:
|
|
638
|
-
ConnectionError: If connection fails
|
|
639
|
-
|
|
640
|
-
Example:
|
|
641
|
-
>>> creator = FixtureCreator()
|
|
642
|
-
>>> conn = creator.get_connection()
|
|
643
|
-
>>> cursor = conn.cursor()
|
|
644
|
-
"""
|
|
645
220
|
if self._connection is None:
|
|
646
|
-
|
|
221
|
+
# Use the modern connection manager which has automatic password reset remediation
|
|
222
|
+
from iris_devtester.connections.connection import (
|
|
223
|
+
get_connection as get_modern_connection,
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
self._connection = get_modern_connection(self.connection_config)
|
|
647
227
|
return self._connection
|
|
648
228
|
|
|
649
229
|
def _get_iris_version(self) -> str:
|
|
650
|
-
"""
|
|
651
|
-
Get IRIS version from system.
|
|
652
|
-
|
|
653
|
-
Returns:
|
|
654
|
-
IRIS version string (e.g., "2024.1")
|
|
655
|
-
"""
|
|
656
230
|
try:
|
|
657
231
|
conn = self.get_connection()
|
|
658
232
|
cursor = conn.cursor()
|
|
659
233
|
cursor.execute("SELECT $SYSTEM.Version.GetVersion()")
|
|
660
234
|
row = cursor.fetchone()
|
|
661
235
|
cursor.close()
|
|
662
|
-
if row
|
|
663
|
-
|
|
664
|
-
return "unknown"
|
|
665
|
-
except Exception:
|
|
236
|
+
return str(row[0]) if row else "unknown"
|
|
237
|
+
except:
|
|
666
238
|
return "unknown"
|