pum 1.3.0__tar.gz → 1.3.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. {pum-1.3.0 → pum-1.3.2}/PKG-INFO +1 -1
  2. {pum-1.3.0 → pum-1.3.2}/pum/hook.py +13 -21
  3. {pum-1.3.0 → pum-1.3.2}/pum/pum_config.py +16 -4
  4. {pum-1.3.0 → pum-1.3.2}/pum/schema_migrations.py +19 -23
  5. {pum-1.3.0 → pum-1.3.2}/pum.egg-info/PKG-INFO +1 -1
  6. {pum-1.3.0 → pum-1.3.2}/pum.egg-info/SOURCES.txt +0 -1
  7. {pum-1.3.0 → pum-1.3.2}/test/test_hooks.py +76 -26
  8. pum-1.3.0/test/test_transaction_fix.py +0 -100
  9. {pum-1.3.0 → pum-1.3.2}/LICENSE +0 -0
  10. {pum-1.3.0 → pum-1.3.2}/README.md +0 -0
  11. {pum-1.3.0 → pum-1.3.2}/pum/__init__.py +0 -0
  12. {pum-1.3.0 → pum-1.3.2}/pum/changelog.py +0 -0
  13. {pum-1.3.0 → pum-1.3.2}/pum/checker.py +0 -0
  14. {pum-1.3.0 → pum-1.3.2}/pum/cli.py +0 -0
  15. {pum-1.3.0 → pum-1.3.2}/pum/config_model.py +0 -0
  16. {pum-1.3.0 → pum-1.3.2}/pum/connection.py +0 -0
  17. {pum-1.3.0 → pum-1.3.2}/pum/dependency_handler.py +0 -0
  18. {pum-1.3.0 → pum-1.3.2}/pum/dumper.py +0 -0
  19. {pum-1.3.0 → pum-1.3.2}/pum/exceptions.py +0 -0
  20. {pum-1.3.0 → pum-1.3.2}/pum/feedback.py +0 -0
  21. {pum-1.3.0 → pum-1.3.2}/pum/info.py +0 -0
  22. {pum-1.3.0 → pum-1.3.2}/pum/parameter.py +0 -0
  23. {pum-1.3.0 → pum-1.3.2}/pum/report_generator.py +0 -0
  24. {pum-1.3.0 → pum-1.3.2}/pum/role_manager.py +0 -0
  25. {pum-1.3.0 → pum-1.3.2}/pum/sql_content.py +0 -0
  26. {pum-1.3.0 → pum-1.3.2}/pum/upgrader.py +0 -0
  27. {pum-1.3.0 → pum-1.3.2}/pum.egg-info/dependency_links.txt +0 -0
  28. {pum-1.3.0 → pum-1.3.2}/pum.egg-info/entry_points.txt +0 -0
  29. {pum-1.3.0 → pum-1.3.2}/pum.egg-info/requires.txt +0 -0
  30. {pum-1.3.0 → pum-1.3.2}/pum.egg-info/top_level.txt +0 -0
  31. {pum-1.3.0 → pum-1.3.2}/pyproject.toml +0 -0
  32. {pum-1.3.0 → pum-1.3.2}/requirements/base.txt +0 -0
  33. {pum-1.3.0 → pum-1.3.2}/requirements/development.txt +0 -0
  34. {pum-1.3.0 → pum-1.3.2}/requirements/html.txt +0 -0
  35. {pum-1.3.0 → pum-1.3.2}/setup.cfg +0 -0
  36. {pum-1.3.0 → pum-1.3.2}/test/test_changelog.py +0 -0
  37. {pum-1.3.0 → pum-1.3.2}/test/test_checker.py +0 -0
  38. {pum-1.3.0 → pum-1.3.2}/test/test_config.py +0 -0
  39. {pum-1.3.0 → pum-1.3.2}/test/test_dumper.py +0 -0
  40. {pum-1.3.0 → pum-1.3.2}/test/test_feedback.py +0 -0
  41. {pum-1.3.0 → pum-1.3.2}/test/test_roles.py +0 -0
  42. {pum-1.3.0 → pum-1.3.2}/test/test_schema_migrations.py +0 -0
  43. {pum-1.3.0 → pum-1.3.2}/test/test_sql_content.py +0 -0
  44. {pum-1.3.0 → pum-1.3.2}/test/test_upgrader.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pum
3
- Version: 1.3.0
3
+ Version: 1.3.2
4
4
  Summary: Pum stands for "Postgres Upgrades Manager". It is a Database migration management tool very similar to flyway-db or Liquibase, based on metadata tables.
5
5
  Author-email: Denis Rouzaud <denis@opengis.ch>
6
6
  License-Expression: GPL-2.0-or-later
@@ -93,7 +93,6 @@ class HookHandler:
93
93
  self.code = code
94
94
  self.hook_instance = None
95
95
  self.sys_path_additions = [] # Store paths to add during execution
96
- self._imported_modules = [] # Track modules imported by this hook
97
96
 
98
97
  if file:
99
98
  if isinstance(file, str):
@@ -122,22 +121,25 @@ class HookHandler:
122
121
  if base_path_str not in sys.path and base_path_str != parent_dir:
123
122
  self.sys_path_additions.append(base_path_str)
124
123
 
125
- # Temporarily add paths for module loading
126
- for path in self.sys_path_additions:
124
+ # Temporarily add paths for module loading - insert at position 0 for priority
125
+ for path in reversed(self.sys_path_additions):
127
126
  sys.path.insert(0, path)
128
127
 
129
- # Track modules before loading to detect new imports
130
- modules_before = set(sys.modules.keys())
131
-
132
128
  try:
133
- spec = importlib.util.spec_from_file_location(self.file.stem, self.file)
129
+ logger.debug(f"Loading hook from: {self.file}")
130
+ logger.debug(f"sys.path additions: {self.sys_path_additions}")
131
+ spec = importlib.util.spec_from_file_location(
132
+ self.file.stem,
133
+ self.file,
134
+ submodule_search_locations=[parent_dir],
135
+ )
134
136
  module = importlib.util.module_from_spec(spec)
137
+ # Set __path__ to enable package-like imports from the hook's directory
138
+ module.__path__ = [parent_dir]
139
+ # Add to sys.modules before executing so imports can find it
140
+ sys.modules[self.file.stem] = module
135
141
  spec.loader.exec_module(module)
136
142
 
137
- # Track modules that were imported by this hook
138
- modules_after = set(sys.modules.keys())
139
- self._imported_modules = list(modules_after - modules_before)
140
-
141
143
  # Check that the module contains a class named Hook inheriting from HookBase
142
144
  # Do this BEFORE removing paths from sys.path
143
145
  hook_class = getattr(module, "Hook", None)
@@ -182,16 +184,6 @@ class HookHandler:
182
184
  if path in sys.path:
183
185
  sys.path.remove(path)
184
186
 
185
- def cleanup_imports(self):
186
- """Remove imported modules from sys.modules cache.
187
- This should be called when switching to a different module version
188
- to prevent import conflicts.
189
- """
190
- for module_name in self._imported_modules:
191
- if module_name in sys.modules:
192
- del sys.modules[module_name]
193
- self._imported_modules.clear()
194
-
195
187
  def __repr__(self) -> str:
196
188
  """Return a string representation of the Hook instance."""
197
189
  return f"<hook: {self.file}>"
@@ -189,10 +189,22 @@ class PumConfig:
189
189
  This should be called when switching to a different module version to ensure
190
190
  that cached imports from the previous version don't cause conflicts.
191
191
  """
192
- for handler in self._cached_handlers:
193
- if hasattr(handler, "cleanup_imports"):
194
- handler.cleanup_imports()
195
- # Clear the cache after cleanup
192
+ # Clear all modules that were loaded from this base_path
193
+ base_path_str = str(self._base_path.resolve())
194
+ modules_to_remove = []
195
+
196
+ for module_name, module in list(sys.modules.items()):
197
+ if module is None:
198
+ continue
199
+ module_file = getattr(module, "__file__", None)
200
+ if module_file and module_file.startswith(base_path_str):
201
+ modules_to_remove.append(module_name)
202
+
203
+ for module_name in modules_to_remove:
204
+ if module_name in sys.modules:
205
+ logger.debug(f"Removing cached module: {module_name}")
206
+ del sys.modules[module_name]
207
+
196
208
  self._cached_handlers.clear()
197
209
 
198
210
  def parameters(self) -> list[ParameterDefinition]:
@@ -74,10 +74,9 @@ class SchemaMigrations:
74
74
  "schema": psycopg.sql.Literal(self.config.config.pum.migration_table_schema),
75
75
  }
76
76
 
77
- with connection.transaction():
78
- cursor = SqlContent(query).execute(connection, parameters=parameters)
79
- result = cursor._pum_results[0] if cursor._pum_results else None
80
- return result[0] if result else False
77
+ cursor = SqlContent(query).execute(connection, parameters=parameters)
78
+ result = cursor._pum_results[0] if cursor._pum_results else None
79
+ return result[0] if result else False
81
80
 
82
81
  def exists_in_other_schemas(self, connection: psycopg.Connection) -> list[str]:
83
82
  """Check if the schema_migrations information table exists in other schemas.
@@ -100,9 +99,8 @@ class SchemaMigrations:
100
99
  parameters = {
101
100
  "schema": psycopg.sql.Literal(self.config.config.pum.migration_table_schema),
102
101
  }
103
- with connection.transaction():
104
- cursor = SqlContent(query).execute(connection, parameters=parameters)
105
- return [row[0] for row in (cursor._pum_results or [])]
102
+ cursor = SqlContent(query).execute(connection, parameters=parameters)
103
+ return [row[0] for row in (cursor._pum_results or [])]
106
104
 
107
105
  def create(
108
106
  self,
@@ -346,14 +344,13 @@ INSERT INTO {table} (
346
344
  "table": self.migration_table_identifier,
347
345
  }
348
346
 
349
- with connection.transaction():
350
- cursor = SqlContent(query).execute(connection, parameters=parameters)
351
- row = cursor._pum_results[0] if cursor._pum_results else None
352
- if row is None:
353
- raise PumSchemaMigrationNoBaselineError(
354
- f"Baseline version not found in the {self.migration_table_identifier_str} table."
355
- )
356
- return packaging.version.parse(row[0])
347
+ cursor = SqlContent(query).execute(connection, parameters=parameters)
348
+ row = cursor._pum_results[0] if cursor._pum_results else None
349
+ if row is None:
350
+ raise PumSchemaMigrationNoBaselineError(
351
+ f"Baseline version not found in the {self.migration_table_identifier_str} table."
352
+ )
353
+ return packaging.version.parse(row[0])
357
354
 
358
355
  def migration_details(self, connection: psycopg.Connection, version: str | None = None) -> dict:
359
356
  """Return the migration details from the migration table.
@@ -404,14 +401,13 @@ INSERT INTO {table} (
404
401
  "version": psycopg.sql.Literal(version),
405
402
  }
406
403
 
407
- with connection.transaction():
408
- cursor = SqlContent(query).execute(connection, parameters=parameters)
409
- row = cursor._pum_results[0] if cursor._pum_results else None
410
- if row is None:
411
- raise PumSchemaMigrationError(
412
- f"Migration details not found for version {version} in the {self.migration_table_identifier_str} table."
413
- )
414
- return dict(zip([desc[0] for desc in cursor._pum_description], row, strict=False))
404
+ cursor = SqlContent(query).execute(connection, parameters=parameters)
405
+ row = cursor._pum_results[0] if cursor._pum_results else None
406
+ if row is None:
407
+ raise PumSchemaMigrationError(
408
+ f"Migration details not found for version {version} in the {self.migration_table_identifier_str} table."
409
+ )
410
+ return dict(zip([desc[0] for desc in cursor._pum_description], row, strict=False))
415
411
 
416
412
  def compare(self, connection: psycopg.Connection) -> int:
417
413
  """Compare the migrations details in the database to the changelogs in the source.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pum
3
- Version: 1.3.0
3
+ Version: 1.3.2
4
4
  Summary: Pum stands for "Postgres Upgrades Manager". It is a Database migration management tool very similar to flyway-db or Liquibase, based on metadata tables.
5
5
  Author-email: Denis Rouzaud <denis@opengis.ch>
6
6
  License-Expression: GPL-2.0-or-later
@@ -38,5 +38,4 @@ test/test_hooks.py
38
38
  test/test_roles.py
39
39
  test/test_schema_migrations.py
40
40
  test/test_sql_content.py
41
- test/test_transaction_fix.py
42
41
  test/test_upgrader.py
@@ -1,5 +1,6 @@
1
1
  """Test module for hook functionality."""
2
2
 
3
+ import sys
3
4
  import unittest
4
5
  from pathlib import Path
5
6
  from unittest.mock import Mock
@@ -7,6 +8,24 @@ from unittest.mock import Mock
7
8
  from pum.hook import HookHandler
8
9
 
9
10
 
11
+ def cleanup_modules_by_path(base_path: Path) -> None:
12
+ """Clean up all modules imported from a base path.
13
+
14
+ This is a test helper that mimics the cleanup logic from PumConfig.cleanup_hook_imports().
15
+ """
16
+ base_path_str = str(base_path.resolve())
17
+ modules_to_remove = []
18
+ for module_name, module in list(sys.modules.items()):
19
+ if module is None:
20
+ continue
21
+ module_file = getattr(module, "__file__", None)
22
+ if module_file and module_file.startswith(base_path_str):
23
+ modules_to_remove.append(module_name)
24
+ for module_name in modules_to_remove:
25
+ if module_name in sys.modules:
26
+ del sys.modules[module_name]
27
+
28
+
10
29
  class TestHooks(unittest.TestCase):
11
30
  """Test the hook functionality."""
12
31
 
@@ -96,12 +115,10 @@ class TestHooks(unittest.TestCase):
96
115
  def test_hook_cleanup_imports(self) -> None:
97
116
  """Test that hook imports can be cleaned up to prevent conflicts when switching versions.
98
117
 
99
- This test verifies that when hooks are cleaned up, their imported modules
100
- are removed from sys.modules cache, allowing fresh imports when switching
101
- to a different module version.
118
+ This test verifies that when hooks are cleaned up via path-based cleanup,
119
+ their imported modules are removed from sys.modules cache, allowing fresh imports
120
+ when switching to a different module version.
102
121
  """
103
- import sys
104
-
105
122
  test_dir = Path("test") / "data" / "hook_sibling_imports"
106
123
  hook_file = test_dir / "app" / "create_hook.py"
107
124
 
@@ -113,15 +130,6 @@ class TestHooks(unittest.TestCase):
113
130
  # Load the hook - this will import view.helper
114
131
  handler = HookHandler(base_path=test_dir, file=str(hook_file.relative_to(test_dir)))
115
132
 
116
- # Verify that view.helper was imported and tracked
117
- self.assertGreater(
118
- len(handler._imported_modules), 0, "Should have tracked imported modules"
119
- )
120
- self.assertTrue(
121
- any("view" in mod for mod in handler._imported_modules),
122
- "Should have tracked view module",
123
- )
124
-
125
133
  # Verify view.helper is in sys.modules
126
134
  view_module_found = any("view" in mod for mod in sys.modules)
127
135
  self.assertTrue(
@@ -132,13 +140,8 @@ class TestHooks(unittest.TestCase):
132
140
  mock_conn = Mock()
133
141
  handler.execute(connection=mock_conn, parameters={})
134
142
 
135
- # Clean up imports
136
- handler.cleanup_imports()
137
-
138
- # Verify that tracked modules were cleared
139
- self.assertEqual(
140
- len(handler._imported_modules), 0, "Should have cleared tracked modules list"
141
- )
143
+ # Clean up imports via path-based cleanup
144
+ cleanup_modules_by_path(test_dir)
142
145
 
143
146
  # Verify that view modules were removed from sys.modules
144
147
  view_modules_after = [mod for mod in sys.modules if "view.helper" in mod or mod == "view"]
@@ -154,8 +157,6 @@ class TestHooks(unittest.TestCase):
154
157
  This test simulates switching between module versions by loading a hook,
155
158
  cleaning it up, and loading it again.
156
159
  """
157
- import sys
158
-
159
160
  test_dir = Path("test") / "data" / "hook_sibling_imports"
160
161
  hook_file = test_dir / "app" / "create_hook.py"
161
162
 
@@ -176,8 +177,8 @@ class TestHooks(unittest.TestCase):
176
177
  view_module_id_1 = id(sys.modules[mod_name])
177
178
  break
178
179
 
179
- # Clean up
180
- handler1.cleanup_imports()
180
+ # Clean up via path-based cleanup
181
+ cleanup_modules_by_path(test_dir)
181
182
 
182
183
  # Verify cleanup worked
183
184
  view_modules = [mod for mod in sys.modules if "view.helper" in mod or mod == "view"]
@@ -199,4 +200,53 @@ class TestHooks(unittest.TestCase):
199
200
  self.assertIsNotNone(view_module_id_2, "Second load should have imported view")
200
201
 
201
202
  # Clean up after test
202
- handler2.cleanup_imports()
203
+ cleanup_modules_by_path(test_dir)
204
+
205
+ def test_hook_submodule_cleanup_on_version_switch(self) -> None:
206
+ """Test that submodules are properly cleaned up when switching between module versions.
207
+
208
+ This test simulates the real-world scenario where a user switches between
209
+ different versions of a module that imports from nested submodules (e.g., view.submodule.helper).
210
+ Without proper submodule cleanup, the cached view module from v1 would prevent
211
+ v2 view.submodule.helper from being imported correctly.
212
+ """
213
+ v1_dir = Path("test") / "data" / "hook_submodule_cleanup" / "v1"
214
+ v2_dir = Path("test") / "data" / "hook_submodule_cleanup" / "v2"
215
+ hook_file = Path("app") / "create_hook.py"
216
+
217
+ # Clear any previously imported view modules
218
+ modules_to_remove = [key for key in sys.modules if key == "view" or key.startswith("view.")]
219
+ for module in modules_to_remove:
220
+ del sys.modules[module]
221
+
222
+ # Load v1 hook - imports view.submodule.helper which returns value_from_submodule_v1
223
+ handler_v1 = HookHandler(base_path=v1_dir, file=str(hook_file))
224
+ mock_conn = Mock()
225
+ # Execute v1 hook - the assertion inside run_hook will fail if wrong module is imported
226
+ handler_v1.execute(connection=mock_conn, parameters={})
227
+
228
+ # Verify submodules were imported
229
+ view_submodules = [mod for mod in sys.modules if mod.startswith("view.submodule")]
230
+ self.assertGreater(len(view_submodules), 0, "Should have imported view.submodule modules")
231
+
232
+ # Clean up v1 imports via path-based cleanup
233
+ cleanup_modules_by_path(v1_dir)
234
+
235
+ # Verify ALL view modules (including submodules) were cleaned up
236
+ remaining_view_modules = [
237
+ mod for mod in sys.modules if mod == "view" or mod.startswith("view.")
238
+ ]
239
+ self.assertEqual(
240
+ len(remaining_view_modules),
241
+ 0,
242
+ f"All view modules should be cleaned up, but found: {remaining_view_modules}",
243
+ )
244
+
245
+ # Load v2 hook - should import fresh view.submodule.helper which returns value_from_submodule_v2
246
+ # This is the critical part - without submodule cleanup, Python would use the cached
247
+ # view.submodule.helper from v1 and the assertion inside run_hook would fail
248
+ handler_v2 = HookHandler(base_path=v2_dir, file=str(hook_file))
249
+ handler_v2.execute(connection=mock_conn, parameters={})
250
+
251
+ # Clean up
252
+ cleanup_modules_by_path(v2_dir)
@@ -1,100 +0,0 @@
1
- import unittest
2
- from pathlib import Path
3
-
4
- import psycopg
5
-
6
- from pum.pum_config import PumConfig
7
- from pum.schema_migrations import SchemaMigrations
8
-
9
-
10
- class TestTransactionFix(unittest.TestCase):
11
- """Test that verifies the transaction fix prevents 'idle in transaction' state."""
12
-
13
- def setUp(self) -> None:
14
- """Set up the test environment."""
15
- self.pg_service = "pum_test"
16
-
17
- def test_schema_migrations_exists_transaction_state(self) -> None:
18
- """Test that schema_migrations.exists() doesn't leave connection in 'idle in transaction'."""
19
- test_dir = Path("test") / "data" / "single_changelog"
20
- cfg = PumConfig(test_dir, pum={"module": "test_single_changelog"})
21
- sm = SchemaMigrations(cfg)
22
-
23
- with psycopg.connect(f"service={self.pg_service}") as conn:
24
- # Clean up any existing table
25
- with conn.transaction():
26
- conn.execute("DROP TABLE IF EXISTS public.pum_migrations")
27
-
28
- # Initial state should be IDLE
29
- self.assertEqual(conn.info.transaction_status.name, "IDLE")
30
-
31
- # Call exists() which executes a query
32
- sm.exists(conn)
33
-
34
- # After the call, connection should still be IDLE (not "IDLE_IN_TRANSACTION")
35
- # This will FAIL if the transaction block is not used in exists()
36
- transaction_status = conn.info.transaction_status.name
37
- self.assertEqual(
38
- transaction_status,
39
- "IDLE",
40
- f"Connection in '{transaction_status}' state after exists() - should use transaction block",
41
- )
42
-
43
- def test_schema_migrations_baseline_transaction_state(self) -> None:
44
- """Test that schema_migrations.baseline() doesn't leave connection in 'idle in transaction'."""
45
- test_dir = Path("test") / "data" / "single_changelog"
46
- cfg = PumConfig(test_dir, pum={"module": "test_single_changelog"})
47
- sm = SchemaMigrations(cfg)
48
-
49
- with psycopg.connect(f"service={self.pg_service}") as conn:
50
- # Create the migrations table with some data
51
- with conn.transaction():
52
- sm.create(conn)
53
- sm.set_baseline(conn, "1.0.0", commit=False)
54
-
55
- # Initial state should be IDLE
56
- self.assertEqual(conn.info.transaction_status.name, "IDLE")
57
-
58
- # Call baseline() which executes a query
59
- sm.baseline(conn)
60
-
61
- # After the call, connection should still be IDLE
62
- transaction_status = conn.info.transaction_status.name
63
- self.assertEqual(
64
- transaction_status,
65
- "IDLE",
66
- f"Connection in '{transaction_status}' state after baseline() - should use transaction block",
67
- )
68
-
69
- # Clean up
70
- with conn.transaction():
71
- conn.execute("DROP TABLE IF EXISTS public.pum_migrations")
72
-
73
- def test_multiple_queries_without_transaction_causes_idle_in_transaction(self) -> None:
74
- """Test that demonstrates the problem: queries without transaction blocks cause 'idle in transaction'."""
75
- with psycopg.connect(f"service={self.pg_service}") as conn:
76
- # Start with IDLE state
77
- self.assertEqual(conn.info.transaction_status.name, "IDLE")
78
-
79
- # Execute a query WITHOUT using a transaction block
80
- # This simulates the old buggy behavior
81
- cursor = conn.cursor()
82
- cursor.execute("SELECT 1")
83
- cursor.fetchone()
84
- cursor.close()
85
-
86
- # Now the connection is stuck in INTRANS (idle in transaction)
87
- transaction_status = conn.info.transaction_status.name
88
- self.assertEqual(
89
- transaction_status,
90
- "INTRANS",
91
- "Without transaction blocks, connection gets stuck in INTRANS (idle in transaction)",
92
- )
93
-
94
- # Need to explicitly commit to get back to IDLE
95
- conn.commit()
96
- self.assertEqual(conn.info.transaction_status.name, "IDLE")
97
-
98
-
99
- if __name__ == "__main__":
100
- unittest.main()
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes