pum 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pum/__init__.py +27 -0
- pum/changelog.py +111 -0
- pum/checker.py +431 -0
- pum/cli.py +402 -0
- pum/conf/pum_config_example.yaml +19 -0
- pum/config_model.py +152 -0
- pum/dumper.py +110 -0
- pum/exceptions.py +47 -0
- pum/hook.py +231 -0
- pum/info.py +30 -0
- pum/parameter.py +72 -0
- pum/pum_config.py +231 -0
- pum/role_manager.py +253 -0
- pum/schema_migrations.py +306 -0
- pum/sql_content.py +265 -0
- pum/upgrader.py +188 -0
- pum-1.0.0.dist-info/METADATA +61 -0
- pum-1.0.0.dist-info/RECORD +22 -0
- pum-1.0.0.dist-info/WHEEL +5 -0
- pum-1.0.0.dist-info/entry_points.txt +2 -0
- pum-1.0.0.dist-info/licenses/LICENSE +339 -0
- pum-1.0.0.dist-info/top_level.txt +1 -0
pum/sql_content.py
ADDED
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import re
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
import psycopg
|
|
6
|
+
|
|
7
|
+
from .exceptions import PumSqlError
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def sql_chunks_from_file(file: str | Path) -> list[psycopg.sql.SQL]:
|
|
13
|
+
"""Read SQL from a file, remove comments, and split into chunks.
|
|
14
|
+
|
|
15
|
+
Args:
|
|
16
|
+
file (str | Path): Path to the SQL file.
|
|
17
|
+
|
|
18
|
+
Returns:
|
|
19
|
+
list: List of SQL statements.
|
|
20
|
+
|
|
21
|
+
Raises:
|
|
22
|
+
PumInvalidSqlFile: If the SQL file contains forbidden transaction statements.
|
|
23
|
+
|
|
24
|
+
"""
|
|
25
|
+
file = Path(file) if not isinstance(file, Path) else file
|
|
26
|
+
sql_code = []
|
|
27
|
+
with Path.open(file) as file:
|
|
28
|
+
sql_content = file.read()
|
|
29
|
+
|
|
30
|
+
# Remove SQL comments
|
|
31
|
+
def remove_sql_comments(sql: str) -> str:
|
|
32
|
+
"""Remove SQL comments from the SQL string.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
sql (str): The SQL string to process.
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
str: The SQL string without comments.
|
|
39
|
+
|
|
40
|
+
"""
|
|
41
|
+
# Remove multiline comments (/* ... */)
|
|
42
|
+
sql = re.sub(r"/\*.*?\*/", "", sql, flags=re.DOTALL)
|
|
43
|
+
# Remove single-line comments (-- ...)
|
|
44
|
+
sql = re.sub(r"(?m)(^|;)\s*--.*?(\r\n|\r|\n)", r"\1", sql)
|
|
45
|
+
return sql
|
|
46
|
+
|
|
47
|
+
sql_content = remove_sql_comments(sql_content)
|
|
48
|
+
|
|
49
|
+
# Check for forbidden transaction statements
|
|
50
|
+
forbidden_statements = (
|
|
51
|
+
(
|
|
52
|
+
r"\bBEGIN\b\s*;",
|
|
53
|
+
"BEGIN; COMMIT; is not authroized in executed SQL since connections are handled by PUM.",
|
|
54
|
+
),
|
|
55
|
+
(
|
|
56
|
+
r"\bCOMMIT\b\s*;",
|
|
57
|
+
"BEGIN; COMMIT; is not authroized in executed SQL since connections are handled by PUM.",
|
|
58
|
+
),
|
|
59
|
+
(
|
|
60
|
+
r"SELECT +pg_catalog.set_config.*search_path.*;",
|
|
61
|
+
"Setting of search path is not authorized in executed SQL as it breaks PostGIS installation",
|
|
62
|
+
),
|
|
63
|
+
)
|
|
64
|
+
for forbidden, message in forbidden_statements:
|
|
65
|
+
if re.search(forbidden, sql_content, re.IGNORECASE):
|
|
66
|
+
raise PumSqlError(f"SQL contains forbidden transaction statement: {message}")
|
|
67
|
+
|
|
68
|
+
def split_sql_statements(sql: str) -> list[str]:
|
|
69
|
+
"""
|
|
70
|
+
Split SQL statements by semicolon, ignoring those inside single/double quotes, dollar-quoted blocks, and DO/BODY blocks.
|
|
71
|
+
Do NOT split on semicolons inside string literals (e.g. COMMENT ON ... IS '...;...'),
|
|
72
|
+
and do NOT split on semicolons inside dollar-quoted blocks (e.g. $$...;...$$, $BODY$...;...$BODY$),
|
|
73
|
+
but DO split on semicolons that are not inside a string, block, or quote, even if the statement contains a SQL comment with a semicolon.
|
|
74
|
+
"""
|
|
75
|
+
# Step 1: Replace all dollar-quoted blocks with placeholders
|
|
76
|
+
body_blocks = []
|
|
77
|
+
block_pattern = (
|
|
78
|
+
r"(\$\$BODY\$\$.*?\$\$BODY\$\$"
|
|
79
|
+
r"|\$BODY\$.*?\$BODY\$"
|
|
80
|
+
r"|\$\$DO\$\$.*?\$\$DO\$\$"
|
|
81
|
+
r"|\$DO\$.*?\$DO\$"
|
|
82
|
+
r"|\$[A-Za-z0-9_]*\$.*?\$[A-Za-z0-9_]*\$" # generic $tag$...$tag$
|
|
83
|
+
r"|\$\$.*?\$\$" # generic $$...$$
|
|
84
|
+
r")"
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
def block_replacer(match):
|
|
88
|
+
body_blocks.append(match.group(0))
|
|
89
|
+
return f"__BLOCK_{len(body_blocks) - 1}__"
|
|
90
|
+
|
|
91
|
+
sql_wo_blocks = re.sub(
|
|
92
|
+
block_pattern, block_replacer, sql, flags=re.DOTALL | re.IGNORECASE
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
# Step 2: Split by semicolon, but only when not inside a string or comment
|
|
96
|
+
statements = []
|
|
97
|
+
current = []
|
|
98
|
+
in_single = False
|
|
99
|
+
in_double = False
|
|
100
|
+
in_line_comment = False
|
|
101
|
+
i = 0
|
|
102
|
+
while i < len(sql_wo_blocks):
|
|
103
|
+
c = sql_wo_blocks[i]
|
|
104
|
+
next2 = sql_wo_blocks[i : i + 2]
|
|
105
|
+
if in_line_comment:
|
|
106
|
+
current.append(c)
|
|
107
|
+
if c == "\n":
|
|
108
|
+
in_line_comment = False
|
|
109
|
+
i += 1
|
|
110
|
+
continue
|
|
111
|
+
if not in_single and not in_double and next2 == "--":
|
|
112
|
+
in_line_comment = True
|
|
113
|
+
current.append("--")
|
|
114
|
+
i += 2
|
|
115
|
+
continue
|
|
116
|
+
if not in_double and c == "'":
|
|
117
|
+
in_single = not in_single
|
|
118
|
+
current.append(c)
|
|
119
|
+
i += 1
|
|
120
|
+
continue
|
|
121
|
+
if not in_single and c == '"':
|
|
122
|
+
in_double = not in_double
|
|
123
|
+
current.append(c)
|
|
124
|
+
i += 1
|
|
125
|
+
continue
|
|
126
|
+
if not in_single and not in_double and not in_line_comment and c == ";":
|
|
127
|
+
statements.append("".join(current).strip())
|
|
128
|
+
current = []
|
|
129
|
+
i += 1
|
|
130
|
+
continue
|
|
131
|
+
current.append(c)
|
|
132
|
+
i += 1
|
|
133
|
+
if current:
|
|
134
|
+
statements.append("".join(current).strip())
|
|
135
|
+
|
|
136
|
+
# Step 3: Restore dollar-quoted blocks
|
|
137
|
+
def restore_blocks(stmt):
|
|
138
|
+
for idx, block in enumerate(body_blocks):
|
|
139
|
+
stmt = stmt.replace(f"__BLOCK_{idx}__", block)
|
|
140
|
+
return stmt
|
|
141
|
+
|
|
142
|
+
return [restore_blocks(stmt) for stmt in statements if stmt]
|
|
143
|
+
|
|
144
|
+
sql_code = split_sql_statements(sql_content)
|
|
145
|
+
|
|
146
|
+
# if we want to remove new lines from the SQL code, we need to handle comments starting with --
|
|
147
|
+
# and remove them before removing new lines
|
|
148
|
+
# sql_code = [re.sub(r"[\r\n]+", " ", stmt) for stmt in sql_code]
|
|
149
|
+
sql_code = [psycopg.sql.SQL(stmt) for stmt in sql_code]
|
|
150
|
+
|
|
151
|
+
return sql_code
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
class SqlContent:
|
|
155
|
+
"""Class to handle SQL content preparation and execution."""
|
|
156
|
+
|
|
157
|
+
def __init__(self, sql: str | psycopg.sql.SQL | Path) -> None:
|
|
158
|
+
"""Initialize the SqlContent class.
|
|
159
|
+
|
|
160
|
+
Args:
|
|
161
|
+
sql: The SQL statement to execute or a path to a SQL file.
|
|
162
|
+
|
|
163
|
+
"""
|
|
164
|
+
self.sql = sql
|
|
165
|
+
|
|
166
|
+
def validate(self, parameters: dict | None) -> bool:
|
|
167
|
+
"""Validate the SQL content.
|
|
168
|
+
This is done by checking if the SQL content is not empty.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
parameters: The parameters to pass to the SQL files.
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
bool: True if valid, False otherwise.
|
|
175
|
+
|
|
176
|
+
"""
|
|
177
|
+
if not self.sql:
|
|
178
|
+
raise PumSqlError("SQL content is empty.")
|
|
179
|
+
self._prepare_sql(parameters)
|
|
180
|
+
return True
|
|
181
|
+
|
|
182
|
+
def execute(
|
|
183
|
+
self,
|
|
184
|
+
connection: psycopg.Connection,
|
|
185
|
+
*,
|
|
186
|
+
parameters: dict | None = None,
|
|
187
|
+
commit: bool = False,
|
|
188
|
+
) -> psycopg.Cursor:
|
|
189
|
+
"""Execute a SQL statement with optional parameters.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
connection: The database connection to execute the SQL statement.
|
|
193
|
+
parameters: Parameters to bind to the SQL statement. Defaults to ().
|
|
194
|
+
commit: Whether to commit the transaction. Defaults to False.
|
|
195
|
+
|
|
196
|
+
"""
|
|
197
|
+
cursor = connection.cursor()
|
|
198
|
+
|
|
199
|
+
for sql_code in self._prepare_sql(parameters):
|
|
200
|
+
try:
|
|
201
|
+
statement = sql_code.as_string(connection)
|
|
202
|
+
except (psycopg.errors.SyntaxError, psycopg.errors.ProgrammingError) as e:
|
|
203
|
+
raise PumSqlError(
|
|
204
|
+
f"SQL preparation failed for the following code: {statement} {e}"
|
|
205
|
+
) from e
|
|
206
|
+
try:
|
|
207
|
+
logger.debug(f"Executing SQL statement: {statement}")
|
|
208
|
+
cursor.execute(statement)
|
|
209
|
+
except (psycopg.errors.SyntaxError, psycopg.errors.ProgrammingError) as e:
|
|
210
|
+
raise PumSqlError(
|
|
211
|
+
f"SQL execution failed for the following code: {statement} {e}"
|
|
212
|
+
) from e
|
|
213
|
+
if commit:
|
|
214
|
+
connection.commit()
|
|
215
|
+
|
|
216
|
+
return cursor
|
|
217
|
+
|
|
218
|
+
def _prepare_sql(self, parameters: dict | None) -> list[psycopg.sql.SQL]:
|
|
219
|
+
"""Prepare SQL for execution.
|
|
220
|
+
|
|
221
|
+
Args:
|
|
222
|
+
sql: The SQL statement to execute or a path to a SQL file.
|
|
223
|
+
parameters: Parameters to bind to the SQL statement. Defaults to ().
|
|
224
|
+
|
|
225
|
+
Returns:
|
|
226
|
+
list: A list of prepared SQL statements.
|
|
227
|
+
|
|
228
|
+
Raises:
|
|
229
|
+
PumSqlError: If SQL preparation fails.
|
|
230
|
+
|
|
231
|
+
"""
|
|
232
|
+
if isinstance(self.sql, Path):
|
|
233
|
+
logger.info(
|
|
234
|
+
f"Checking SQL from file: {self.sql} with parameters: {parameters}",
|
|
235
|
+
)
|
|
236
|
+
sql_code = sql_chunks_from_file(self.sql)
|
|
237
|
+
elif isinstance(self.sql, str):
|
|
238
|
+
sql_code = [psycopg.sql.SQL(self.sql)]
|
|
239
|
+
else:
|
|
240
|
+
sql_code = [self.sql]
|
|
241
|
+
|
|
242
|
+
def format_sql(
|
|
243
|
+
statement: psycopg.sql.SQL, parameters: dict | None = None
|
|
244
|
+
) -> psycopg.sql.SQL:
|
|
245
|
+
for key, value in (parameters or {}).items():
|
|
246
|
+
if (
|
|
247
|
+
not isinstance(value, psycopg.sql.Literal)
|
|
248
|
+
and not isinstance(value, psycopg.sql.Identifier)
|
|
249
|
+
and not isinstance(value, psycopg.sql.Composed)
|
|
250
|
+
):
|
|
251
|
+
raise PumSqlError(
|
|
252
|
+
f"Invalid parameter type for key '{key}': {type(value)}. "
|
|
253
|
+
"Parameters must be psycopg.sql.Literal or psycopg.sql.Identifier."
|
|
254
|
+
)
|
|
255
|
+
try:
|
|
256
|
+
return statement.format(**parameters)
|
|
257
|
+
except TypeError:
|
|
258
|
+
# if parameters is None, we can ignore this error
|
|
259
|
+
return statement
|
|
260
|
+
except KeyError as e:
|
|
261
|
+
raise PumSqlError(
|
|
262
|
+
f"SQL preparation failed for the following code: missing parameter: {statement} {e}"
|
|
263
|
+
) from e
|
|
264
|
+
|
|
265
|
+
return [format_sql(statement, parameters) for statement in sql_code]
|
pum/upgrader.py
ADDED
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
import logging
|
|
3
|
+
|
|
4
|
+
import packaging
|
|
5
|
+
import packaging.version
|
|
6
|
+
import psycopg
|
|
7
|
+
import copy
|
|
8
|
+
|
|
9
|
+
from .pum_config import PumConfig
|
|
10
|
+
from .exceptions import PumException
|
|
11
|
+
from .schema_migrations import SchemaMigrations
|
|
12
|
+
from .sql_content import SqlContent
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
logger = logging.getLogger(__name__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class Upgrader:
|
|
19
|
+
"""Class to handle the upgrade of a module.
|
|
20
|
+
This class is used to install a new instance or to upgrade an existing instance of a module.
|
|
21
|
+
It stores the info about the upgrade in a table on the database.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
def __init__(
|
|
25
|
+
self,
|
|
26
|
+
config: PumConfig,
|
|
27
|
+
max_version: packaging.version.Version | str | None = None,
|
|
28
|
+
) -> None:
|
|
29
|
+
"""Initialize the Upgrader class.
|
|
30
|
+
This class is used to install a new instance or to upgrade an existing instance of a module.
|
|
31
|
+
Stores the info about the upgrade in a table on the database.
|
|
32
|
+
The table is created in the schema defined in the config file if it does not exist.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
connection:
|
|
36
|
+
The database connection to use for the upgrade.
|
|
37
|
+
config:
|
|
38
|
+
The configuration object
|
|
39
|
+
max_version:
|
|
40
|
+
Maximum (including) version to run the deltas up to.
|
|
41
|
+
|
|
42
|
+
"""
|
|
43
|
+
self.config = config
|
|
44
|
+
self.max_version = packaging.parse(max_version) if max_version else None
|
|
45
|
+
self.schema_migrations = SchemaMigrations(self.config)
|
|
46
|
+
|
|
47
|
+
def install(
|
|
48
|
+
self,
|
|
49
|
+
connection: psycopg.Connection = None,
|
|
50
|
+
*,
|
|
51
|
+
parameters: dict | None = None,
|
|
52
|
+
max_version: str | packaging.version.Version | None = None,
|
|
53
|
+
roles: bool = False,
|
|
54
|
+
grant: bool = False,
|
|
55
|
+
demo_data: str | None = None,
|
|
56
|
+
commit: bool = False,
|
|
57
|
+
) -> None:
|
|
58
|
+
"""Installs the given module
|
|
59
|
+
This will create the schema_migrations table if it does not exist.
|
|
60
|
+
The changelogs are applied in the order they are found in the directory.
|
|
61
|
+
It will also set the baseline version to the current version of the module.
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
connection:
|
|
65
|
+
The database connection to use for the upgrade.
|
|
66
|
+
parameters:
|
|
67
|
+
The parameters to pass for the migration.
|
|
68
|
+
max_version:
|
|
69
|
+
The maximum version to apply. If None, all versions are applied.
|
|
70
|
+
roles:
|
|
71
|
+
If True, roles will be created.
|
|
72
|
+
grant:
|
|
73
|
+
If True, permissions will be granted to the roles.
|
|
74
|
+
demo_data:
|
|
75
|
+
The name of the demo data to load. If None, no demo data is loaded.
|
|
76
|
+
commit:
|
|
77
|
+
If True, the changes will be committed to the database.
|
|
78
|
+
"""
|
|
79
|
+
parameters_literals = self._prepare_parameters(parameters)
|
|
80
|
+
|
|
81
|
+
if demo_data and demo_data not in self.config.demo_data():
|
|
82
|
+
raise PumException(
|
|
83
|
+
f"Demo data '{demo_data}' not found in the configuration. Available demo data: {self.config.demo_data()}"
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
if self.schema_migrations.exists(connection):
|
|
87
|
+
msg = (
|
|
88
|
+
f"Schema migrations table {self.config.config.pum.migration_table_schema}.pum_migrations already exists. "
|
|
89
|
+
"This means that the module is already installed or the database is not empty. "
|
|
90
|
+
"Use upgrade() to upgrade the db or start with a clean db."
|
|
91
|
+
)
|
|
92
|
+
raise PumException(msg)
|
|
93
|
+
self.schema_migrations.create(connection, commit=False)
|
|
94
|
+
|
|
95
|
+
if roles or grant:
|
|
96
|
+
self.config.role_manager().create_roles(
|
|
97
|
+
connection=connection, grant=False, commit=False
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
for pre_hook in self.config.pre_hook_handlers():
|
|
101
|
+
pre_hook.execute(connection=connection, commit=False, parameters=parameters_literals)
|
|
102
|
+
|
|
103
|
+
last_changelog = None
|
|
104
|
+
for changelog in self.config.changelogs(max_version=max_version):
|
|
105
|
+
last_changelog = changelog
|
|
106
|
+
changelog_files = changelog.apply(
|
|
107
|
+
connection, commit=False, parameters=parameters_literals
|
|
108
|
+
)
|
|
109
|
+
changelog_files = [str(f) for f in changelog_files]
|
|
110
|
+
self.schema_migrations.set_baseline(
|
|
111
|
+
connection=connection,
|
|
112
|
+
version=changelog.version,
|
|
113
|
+
beta_testing=False,
|
|
114
|
+
commit=False,
|
|
115
|
+
changelog_files=changelog_files,
|
|
116
|
+
parameters=parameters,
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
for post_hook in self.config.post_hook_handlers():
|
|
120
|
+
post_hook.execute(connection=connection, commit=False, parameters=parameters_literals)
|
|
121
|
+
|
|
122
|
+
logger.info(
|
|
123
|
+
"Installed %s.pum_migrations table and applied changelogs up to version %s",
|
|
124
|
+
self.config.config.pum.migration_table_schema,
|
|
125
|
+
last_changelog.version,
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
if grant:
|
|
129
|
+
self.config.role_manager().grant_permissions(connection=connection, commit=False)
|
|
130
|
+
|
|
131
|
+
if commit:
|
|
132
|
+
connection.commit()
|
|
133
|
+
logger.info("Changes committed to the database.")
|
|
134
|
+
|
|
135
|
+
def install_demo_data(
|
|
136
|
+
self,
|
|
137
|
+
connection: psycopg.Connection,
|
|
138
|
+
name: str,
|
|
139
|
+
*,
|
|
140
|
+
parameters: dict | None = None,
|
|
141
|
+
) -> None:
|
|
142
|
+
"""Install demo data for the module.
|
|
143
|
+
Args:
|
|
144
|
+
connection: The database connection to use.
|
|
145
|
+
name: The name of the demo data to install.
|
|
146
|
+
parameters: The parameters to pass to the demo data SQL.
|
|
147
|
+
"""
|
|
148
|
+
if name not in self.config.demo_data():
|
|
149
|
+
raise PumException(f"Demo data '{name}' not found in the configuration.")
|
|
150
|
+
|
|
151
|
+
parameters_literals = self._prepare_parameters(parameters)
|
|
152
|
+
|
|
153
|
+
demo_data_file = self.config.base_path / self.config.demo_data()[name]
|
|
154
|
+
logger.info("Installing demo data from %s", demo_data_file)
|
|
155
|
+
|
|
156
|
+
for pre_hook in self.config.pre_hook_handlers():
|
|
157
|
+
pre_hook.execute(connection=connection, commit=False, parameters=parameters_literals)
|
|
158
|
+
|
|
159
|
+
connection.commit()
|
|
160
|
+
|
|
161
|
+
SqlContent(sql=demo_data_file).execute(
|
|
162
|
+
connection=connection,
|
|
163
|
+
commit=False,
|
|
164
|
+
parameters=parameters_literals,
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
connection.commit()
|
|
168
|
+
|
|
169
|
+
for post_hook in self.config.post_hook_handlers():
|
|
170
|
+
post_hook.execute(connection=connection, commit=False, parameters=parameters_literals)
|
|
171
|
+
|
|
172
|
+
logger.info("Demo data '%s' installed successfully.", name)
|
|
173
|
+
|
|
174
|
+
@staticmethod
|
|
175
|
+
def _prepare_parameters(parameters: dict | None):
|
|
176
|
+
"""
|
|
177
|
+
Prepares a dictionary of parameters for use in SQL queries by converting each value to a psycopg.sql.Literal.
|
|
178
|
+
|
|
179
|
+
Args:
|
|
180
|
+
parameters: A dictionary of parameters to be converted, or None.
|
|
181
|
+
|
|
182
|
+
Returns:
|
|
183
|
+
dict: A new dictionary with the same keys as `parameters`, where each value is wrapped in psycopg.sql.Literal.
|
|
184
|
+
"""
|
|
185
|
+
parameters_literals = copy.deepcopy(parameters) if parameters else {}
|
|
186
|
+
for key, value in parameters_literals.items():
|
|
187
|
+
parameters_literals[key] = psycopg.sql.Literal(value)
|
|
188
|
+
return parameters_literals
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: pum
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: Pum stands for "Postgres Upgrades Manager". It is a Database migration management tool very similar to flyway-db or Liquibase, based on metadata tables.
|
|
5
|
+
Author-email: Denis Rouzaud <denis@opengis.ch>
|
|
6
|
+
License-Expression: GPL-2.0-or-later
|
|
7
|
+
Project-URL: homepage, https://opengisch.github.io/pum/
|
|
8
|
+
Project-URL: documentation, https://opengisch.github.io/pum/
|
|
9
|
+
Project-URL: repository, https://github.com/opengisch/pum/
|
|
10
|
+
Project-URL: tracker, https://github.com/opengisch/pum/issues
|
|
11
|
+
Keywords: postgres,database,versioning
|
|
12
|
+
Classifier: Topic :: Database
|
|
13
|
+
Classifier: Intended Audience :: System Administrators
|
|
14
|
+
Classifier: Intended Audience :: Information Technology
|
|
15
|
+
Classifier: Topic :: Software Development :: Version Control
|
|
16
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
17
|
+
Classifier: Programming Language :: Python :: 3
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
21
|
+
Requires-Python: >=3.10
|
|
22
|
+
Description-Content-Type: text/markdown
|
|
23
|
+
License-File: LICENSE
|
|
24
|
+
Requires-Dist: packaging
|
|
25
|
+
Requires-Dist: pydantic
|
|
26
|
+
Requires-Dist: PyYAML
|
|
27
|
+
Requires-Dist: psycopg[binary]
|
|
28
|
+
Provides-Extra: dev
|
|
29
|
+
Requires-Dist: black; extra == "dev"
|
|
30
|
+
Requires-Dist: flake8-builtins; extra == "dev"
|
|
31
|
+
Requires-Dist: flake8-isort; extra == "dev"
|
|
32
|
+
Requires-Dist: flake8-print; extra == "dev"
|
|
33
|
+
Requires-Dist: pre-commit; extra == "dev"
|
|
34
|
+
Requires-Dist: nose2; extra == "dev"
|
|
35
|
+
Dynamic: license-file
|
|
36
|
+
|
|
37
|
+
# PostgreSQL Upgrades Manager (PUM)
|
|
38
|
+
|
|
39
|
+

|
|
40
|
+
|
|
41
|
+
## New version
|
|
42
|
+
|
|
43
|
+
This is the code of pum version 1.
|
|
44
|
+
You can find version 0.x documentation at https://github.com/opengisch/pum/tree/old-v1
|
|
45
|
+
|
|
46
|
+
## About
|
|
47
|
+
|
|
48
|
+
PUM (PostgreSQL Upgrades Manager) is a robust database migration management tool designed to streamline the process of managing PostgreSQL database upgrades. Inspired by tools like FlywayDB and Liquibase, PUM leverages metadata tables to ensure seamless database versioning and migration.
|
|
49
|
+
|
|
50
|
+
## Key Features
|
|
51
|
+
|
|
52
|
+
- **Command-line and Python Integration**: Use PUM as a standalone CLI tool or integrate it into your Python project.
|
|
53
|
+
- **Database Versioning**: Automatically manage database versioning with a metadata table.
|
|
54
|
+
- **Changelog Management**: Apply and track SQL delta files for database upgrades.
|
|
55
|
+
- **Migration Hooks**: Define custom hooks to execute additional SQL or Python code before or after migrations. This feature allows you to isolate data (table) code from application code (such as views and triggers), ensuring a clear separation of concerns and more maintainable database structures.
|
|
56
|
+
|
|
57
|
+
## Why PUM?
|
|
58
|
+
|
|
59
|
+
Managing database migrations in a Version Control System (VCS) can be challenging, especially for production databases. PUM simplifies this process by embedding version metadata directly into the database, enabling efficient tracking and application of migrations.
|
|
60
|
+
|
|
61
|
+
PUM was developed to address challenges in the [TEKSI](https://github.com/TESKI) project, an open-source GIS for network management based on [QGIS](http://qgis.org/fr/site/).
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
pum/__init__.py,sha256=IG1g3LMuqSxlW8GC1-H5XdnPX3KF-Irq7MuMEbCYCUM,684
|
|
2
|
+
pum/changelog.py,sha256=yDc5swmMd5gb2vCEAlenoq5gs-ZEGc4uXicBtiGxkOk,3692
|
|
3
|
+
pum/checker.py,sha256=GT2v7793HP1g94dv0mL6CHtQfblQwAyeFHEWCy44lkc,14379
|
|
4
|
+
pum/cli.py,sha256=GcxBiQM_S4CDG1v1Gy9xj0noaf-GdeJFi8jc2yfVjLU,13956
|
|
5
|
+
pum/config_model.py,sha256=G8FsVGsYzMEQKrVIc3SltycbJvwmgfhu6uNduAL-D8E,5107
|
|
6
|
+
pum/dumper.py,sha256=EJZ8T44JM0GKgdqw1ENOfhZ-RI89OQ4DNdoTZKtLdEw,3404
|
|
7
|
+
pum/exceptions.py,sha256=HYgC0kLk6Gel7RtT9AjxHdtkmZ4BtjKdB5BHYL67LVs,1042
|
|
8
|
+
pum/hook.py,sha256=L4Cnr34zrgPzxso9CdsUYWmtuOXRmFccQZ9Lp4IYCBM,9326
|
|
9
|
+
pum/info.py,sha256=VSCUZJJ_ae-khKaudwbgqszZXBMKB_yskuQo5Mc1PgY,1024
|
|
10
|
+
pum/parameter.py,sha256=e9f80kMZpART9laeImW_YECeTvwDyDSmZlTeJGvpS_8,2449
|
|
11
|
+
pum/pum_config.py,sha256=WopogLoPEJkvuKEdpWq56YZaZc3mgK-pnII0TbYtIlQ,8454
|
|
12
|
+
pum/role_manager.py,sha256=yr-fmytflGqANY3IZIpgJBoMOK98ynTWfemIBhAy79A,10131
|
|
13
|
+
pum/schema_migrations.py,sha256=FiaqAbhFX7vd3Rk_R43kd7-QWfil-Q5587EU8xSLBkA,10504
|
|
14
|
+
pum/sql_content.py,sha256=gwgvcdXOXxNz3RvLtL8Bqr5WO3KKq3sluhbj4OAEnQs,9756
|
|
15
|
+
pum/upgrader.py,sha256=jvl6vmpgxGyYiw8rrWC_bDC7Zd4wHJqGLXCK8EMt9wY,7109
|
|
16
|
+
pum/conf/pum_config_example.yaml,sha256=_nwV_7z6S_Se-mejh_My0JFLY-A0Q4nigeLGPZAfcqg,424
|
|
17
|
+
pum-1.0.0.dist-info/licenses/LICENSE,sha256=2ylvL381vKOhdO-w6zkrOxe9lLNBhRQpo9_0EbHC_HM,18046
|
|
18
|
+
pum-1.0.0.dist-info/METADATA,sha256=HVBChQBJ0xnjyYYxnYyU7EAhU4CLs0V8SIJhM09j9iE,3146
|
|
19
|
+
pum-1.0.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
20
|
+
pum-1.0.0.dist-info/entry_points.txt,sha256=U6dmxSpKs1Pe9vWiR29VPhJMDjrmZeJCSxvfLGR8BD4,36
|
|
21
|
+
pum-1.0.0.dist-info/top_level.txt,sha256=ddiI4HLBhY6ql-NNm0Ez0JhoOHdWDIzrHeCdHmmagcc,4
|
|
22
|
+
pum-1.0.0.dist-info/RECORD,,
|